commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3b92d215a42a8c4047d2be57b7679b87a6bfb737 | cnxmathml2svg.py | cnxmathml2svg.py | from pyramid.config import Configurator
__all__ = ('main',)
def main(global_config, **settings):
"""Application factory"""
config = Configurator(settings=settings)
return config.make_wsgi_app()
| from pyramid.config import Configurator
from pyramid.response import Response
__all__ = ('main',)
def convert(request):
"""Convert the POST'd MathML to SVG"""
return Response()
def main(global_config, **settings):
"""Application factory"""
config = Configurator(settings=settings)
config.add_route('convert', '/', request_method='POST')
config.add_view(convert, route_name='convert')
return config.make_wsgi_app()
| Add the conversion view to the app. | Add the conversion view to the app.
| Python | agpl-3.0 | Connexions/cnx-mathml2svg,pumazi/cnx-mathml2svg | from pyramid.config import Configurator
+ from pyramid.response import Response
+
+ __all__ = ('main',)
- __all__ = ('main',)
+ def convert(request):
+ """Convert the POST'd MathML to SVG"""
+ return Response()
def main(global_config, **settings):
"""Application factory"""
config = Configurator(settings=settings)
+ config.add_route('convert', '/', request_method='POST')
+ config.add_view(convert, route_name='convert')
return config.make_wsgi_app()
| Add the conversion view to the app. | ## Code Before:
from pyramid.config import Configurator
__all__ = ('main',)
def main(global_config, **settings):
"""Application factory"""
config = Configurator(settings=settings)
return config.make_wsgi_app()
## Instruction:
Add the conversion view to the app.
## Code After:
from pyramid.config import Configurator
from pyramid.response import Response
__all__ = ('main',)
def convert(request):
"""Convert the POST'd MathML to SVG"""
return Response()
def main(global_config, **settings):
"""Application factory"""
config = Configurator(settings=settings)
config.add_route('convert', '/', request_method='POST')
config.add_view(convert, route_name='convert')
return config.make_wsgi_app()
| // ... existing code ...
from pyramid.config import Configurator
from pyramid.response import Response
__all__ = ('main',)
// ... modified code ...
def convert(request):
"""Convert the POST'd MathML to SVG"""
return Response()
...
config = Configurator(settings=settings)
config.add_route('convert', '/', request_method='POST')
config.add_view(convert, route_name='convert')
return config.make_wsgi_app()
// ... rest of the code ... |
5631276591cf2c4e3c83920da32857e47286d9c9 | wanikani/django.py | wanikani/django.py | from __future__ import absolute_import
import os
import logging
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.wanikani')
with open(CONFIG_PATH) as fp:
API_KEY = fp.read()
logger = logging.getLogger(__name__)
class WaniKaniView(View):
def get(self, request, *args, **kwargs):
client = WaniKani(API_KEY)
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//My calendar product//mxm.dk//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
|
from __future__ import absolute_import
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
class WaniKaniView(View):
def get(self, request, **kwargs):
client = WaniKani(kwargs['api_key'])
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
| Switch to getting the API key from the URL instead of a config file. | Switch to getting the API key from the URL instead of a config file.
Allows other people to get their anki calendar if they want. | Python | mit | kfdm/wanikani,kfdm/wanikani | +
from __future__ import absolute_import
-
- import os
- import logging
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
- CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.wanikani')
-
- with open(CONFIG_PATH) as fp:
- API_KEY = fp.read()
-
- logger = logging.getLogger(__name__)
-
class WaniKaniView(View):
- def get(self, request, *args, **kwargs):
+ def get(self, request, **kwargs):
- client = WaniKani(API_KEY)
+ client = WaniKani(kwargs['api_key'])
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
- cal.add('prodid', '-//My calendar product//mxm.dk//')
+ cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
| Switch to getting the API key from the URL instead of a config file. | ## Code Before:
from __future__ import absolute_import
import os
import logging
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.wanikani')
with open(CONFIG_PATH) as fp:
API_KEY = fp.read()
logger = logging.getLogger(__name__)
class WaniKaniView(View):
def get(self, request, *args, **kwargs):
client = WaniKani(API_KEY)
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//My calendar product//mxm.dk//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
## Instruction:
Switch to getting the API key from the URL instead of a config file.
## Code After:
from __future__ import absolute_import
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
class WaniKaniView(View):
def get(self, request, **kwargs):
client = WaniKani(kwargs['api_key'])
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
| # ... existing code ...
from __future__ import absolute_import
# ... modified code ...
...
class WaniKaniView(View):
def get(self, request, **kwargs):
client = WaniKani(kwargs['api_key'])
...
cal = Calendar()
cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
# ... rest of the code ... |
4bcf35efcfc751a1c337fdcf50d23d9d06549717 | demo/apps/catalogue/models.py | demo/apps/catalogue/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
class Category(Page):
"""
user oscars category as a wagtail Page.
this works becuase they both use treebeard
"""
name = models.CharField(
verbose_name=_('name'),
max_length=255,
help_text=_("Category name")
)
from oscar.apps.catalogue.models import * # noqa
| from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(
verbose_name=_('name'),
max_length=255,
help_text=_("Category name")
)
from oscar.apps.catalogue.models import * # noqa
| Fix typo in doc string | Fix typo in doc string
| Python | mit | pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo | from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
class Category(Page):
"""
- user oscars category as a wagtail Page.
+ The Oscars Category as a Wagtail Page
- this works becuase they both use treebeard
+ This works because they both use Treebeard
"""
name = models.CharField(
verbose_name=_('name'),
max_length=255,
help_text=_("Category name")
)
from oscar.apps.catalogue.models import * # noqa
| Fix typo in doc string | ## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
class Category(Page):
"""
user oscars category as a wagtail Page.
this works becuase they both use treebeard
"""
name = models.CharField(
verbose_name=_('name'),
max_length=255,
help_text=_("Category name")
)
from oscar.apps.catalogue.models import * # noqa
## Instruction:
Fix typo in doc string
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(
verbose_name=_('name'),
max_length=255,
help_text=_("Category name")
)
from oscar.apps.catalogue.models import * # noqa
| # ... existing code ...
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
# ... rest of the code ... |
2d74b55a0c110a836190af819b55673bce2300a0 | gaphor/ui/macosshim.py | gaphor/ui/macosshim.py | try:
import gi
gi.require_version("GtkosxApplication", "1.0")
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
| try:
import gi
from gi.repository import Gtk
if Gtk.get_major_version() == 3:
gi.require_version("GtkosxApplication", "1.0")
else:
raise ValueError()
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
| Fix macos shim for gtk 4 | Fix macos shim for gtk 4
| Python | lgpl-2.1 | amolenaar/gaphor,amolenaar/gaphor | try:
import gi
+ from gi.repository import Gtk
+ if Gtk.get_major_version() == 3:
- gi.require_version("GtkosxApplication", "1.0")
+ gi.require_version("GtkosxApplication", "1.0")
+ else:
+ raise ValueError()
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
| Fix macos shim for gtk 4 | ## Code Before:
try:
import gi
gi.require_version("GtkosxApplication", "1.0")
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
## Instruction:
Fix macos shim for gtk 4
## Code After:
try:
import gi
from gi.repository import Gtk
if Gtk.get_major_version() == 3:
gi.require_version("GtkosxApplication", "1.0")
else:
raise ValueError()
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
| # ... existing code ...
import gi
from gi.repository import Gtk
if Gtk.get_major_version() == 3:
gi.require_version("GtkosxApplication", "1.0")
else:
raise ValueError()
except ValueError:
# ... rest of the code ... |
c2df896183f80fe3ca0eab259874bc4385d399e9 | tests/test_parallel.py | tests/test_parallel.py | from __future__ import with_statement
from datetime import datetime
import copy
import getpass
import sys
import paramiko
from nose.tools import with_setup
from fudge import (Fake, clear_calls, clear_expectations, patch_object, verify,
with_patched_object, patched_context, with_fakes)
from fabric.context_managers import settings, hide, show
from fabric.network import (HostConnectionCache, join_host_strings, normalize,
denormalize)
from fabric.io import output_loop
import fabric.network # So I can call patch_object correctly. Sigh.
from fabric.state import env, output, _get_system_username
from fabric.operations import run, sudo
from fabric.decorators import parallel
from utils import *
from server import (server, PORT, RESPONSES, PASSWORDS, CLIENT_PRIVKEY, USER,
CLIENT_PRIVKEY_PASSPHRASE)
class TestParallel(FabricTest):
@server()
@parallel
def test_parallel(self):
"""
Want to do a simple call and respond
"""
env.pool_size = 10
cmd = "ls /simple"
with hide('everything'):
eq_(run(cmd), RESPONSES[cmd])
| from __future__ import with_statement
from fabric.api import run, parallel, env, hide
from utils import FabricTest, eq_
from server import server, RESPONSES
class TestParallel(FabricTest):
@server()
@parallel
def test_parallel(self):
"""
Want to do a simple call and respond
"""
env.pool_size = 10
cmd = "ls /simple"
with hide('everything'):
eq_(run(cmd), RESPONSES[cmd])
| Clean up detrius in parallel test file | Clean up detrius in parallel test file
| Python | bsd-2-clause | bitprophet/fabric,MjAbuz/fabric,likesxuqiang/fabric,sdelements/fabric,opavader/fabric,TarasRudnyk/fabric,tekapo/fabric,haridsv/fabric,SamuelMarks/fabric,bspink/fabric,tolbkni/fabric,rane-hs/fabric-py3,mathiasertl/fabric,askulkarni2/fabric,fernandezcuesta/fabric,elijah513/fabric,xLegoz/fabric,raimon49/fabric,amaniak/fabric,pgroudas/fabric,hrubi/fabric,rodrigc/fabric,cgvarela/fabric,cmattoon/fabric,ploxiln/fabric,itoed/fabric,kxxoling/fabric,jaraco/fabric,bitmonk/fabric,felix-d/fabric,rbramwell/fabric,qinrong/fabric,StackStorm/fabric,pashinin/fabric,kmonsoor/fabric,akaariai/fabric,getsentry/fabric | from __future__ import with_statement
+ from fabric.api import run, parallel, env, hide
- from datetime import datetime
- import copy
- import getpass
- import sys
+ from utils import FabricTest, eq_
+ from server import server, RESPONSES
- import paramiko
- from nose.tools import with_setup
- from fudge import (Fake, clear_calls, clear_expectations, patch_object, verify,
- with_patched_object, patched_context, with_fakes)
- from fabric.context_managers import settings, hide, show
- from fabric.network import (HostConnectionCache, join_host_strings, normalize,
- denormalize)
- from fabric.io import output_loop
- import fabric.network # So I can call patch_object correctly. Sigh.
- from fabric.state import env, output, _get_system_username
- from fabric.operations import run, sudo
- from fabric.decorators import parallel
-
- from utils import *
- from server import (server, PORT, RESPONSES, PASSWORDS, CLIENT_PRIVKEY, USER,
- CLIENT_PRIVKEY_PASSPHRASE)
class TestParallel(FabricTest):
@server()
@parallel
def test_parallel(self):
"""
Want to do a simple call and respond
"""
env.pool_size = 10
cmd = "ls /simple"
with hide('everything'):
eq_(run(cmd), RESPONSES[cmd])
| Clean up detrius in parallel test file | ## Code Before:
from __future__ import with_statement
from datetime import datetime
import copy
import getpass
import sys
import paramiko
from nose.tools import with_setup
from fudge import (Fake, clear_calls, clear_expectations, patch_object, verify,
with_patched_object, patched_context, with_fakes)
from fabric.context_managers import settings, hide, show
from fabric.network import (HostConnectionCache, join_host_strings, normalize,
denormalize)
from fabric.io import output_loop
import fabric.network # So I can call patch_object correctly. Sigh.
from fabric.state import env, output, _get_system_username
from fabric.operations import run, sudo
from fabric.decorators import parallel
from utils import *
from server import (server, PORT, RESPONSES, PASSWORDS, CLIENT_PRIVKEY, USER,
CLIENT_PRIVKEY_PASSPHRASE)
class TestParallel(FabricTest):
@server()
@parallel
def test_parallel(self):
"""
Want to do a simple call and respond
"""
env.pool_size = 10
cmd = "ls /simple"
with hide('everything'):
eq_(run(cmd), RESPONSES[cmd])
## Instruction:
Clean up detrius in parallel test file
## Code After:
from __future__ import with_statement
from fabric.api import run, parallel, env, hide
from utils import FabricTest, eq_
from server import server, RESPONSES
class TestParallel(FabricTest):
@server()
@parallel
def test_parallel(self):
"""
Want to do a simple call and respond
"""
env.pool_size = 10
cmd = "ls /simple"
with hide('everything'):
eq_(run(cmd), RESPONSES[cmd])
| # ... existing code ...
from fabric.api import run, parallel, env, hide
from utils import FabricTest, eq_
from server import server, RESPONSES
# ... rest of the code ... |
94e70a0958f0db737ca82c5ea09528bf4e5e4fef | voteswap/wsgi.py | voteswap/wsgi.py |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "voteswap.settings")
application = get_wsgi_application()
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "voteswap.settings")
application = get_wsgi_application()
try:
from google.appengine.ext import vendor
vendor.add('lib')
except ImportError:
pass
| Add vendor dir to path | Add vendor dir to path
| Python | mit | sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "voteswap.settings")
application = get_wsgi_application()
+ try:
+ from google.appengine.ext import vendor
+ vendor.add('lib')
+ except ImportError:
+ pass
+ | Add vendor dir to path | ## Code Before:
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "voteswap.settings")
application = get_wsgi_application()
## Instruction:
Add vendor dir to path
## Code After:
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "voteswap.settings")
application = get_wsgi_application()
try:
from google.appengine.ext import vendor
vendor.add('lib')
except ImportError:
pass
| // ... existing code ...
application = get_wsgi_application()
try:
from google.appengine.ext import vendor
vendor.add('lib')
except ImportError:
pass
// ... rest of the code ... |
ad0859f2e7b6f659fe964f786277ea2ad3fdf787 | src/listener.py | src/listener.py | import logging
import socket
import threading
from connection import Connection
import shared
class Listener(threading.Thread):
def __init__(self, host, port, family=socket.AF_INET):
super().__init__(name='Listener')
self.host = host
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
self.s.bind((self.host, self.port))
def run(self):
self.s.listen(1)
self.s.settimeout(1)
while True:
try:
conn, addr = self.s.accept()
logging.info('Incoming connection from: {}:{}'.format(addr[0], addr[1]))
with shared.connections_lock:
c = Connection(addr[0], addr[1], conn)
c.start()
shared.connections.add(c)
except socket.timeout:
pass
| import logging
import socket
import threading
from connection import Connection
import shared
class Listener(threading.Thread):
def __init__(self, host, port, family=socket.AF_INET):
super().__init__(name='Listener')
self.host = host
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind((self.host, self.port))
def run(self):
self.s.listen(1)
self.s.settimeout(1)
while True:
try:
conn, addr = self.s.accept()
logging.info('Incoming connection from: {}:{}'.format(addr[0], addr[1]))
with shared.connections_lock:
c = Connection(addr[0], addr[1], conn)
c.start()
shared.connections.add(c)
except socket.timeout:
pass
| Add SO_REUSEADDR to socket options | Add SO_REUSEADDR to socket options
| Python | mit | TheKysek/MiNode,TheKysek/MiNode | import logging
import socket
import threading
from connection import Connection
import shared
class Listener(threading.Thread):
def __init__(self, host, port, family=socket.AF_INET):
super().__init__(name='Listener')
self.host = host
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
+ self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind((self.host, self.port))
def run(self):
self.s.listen(1)
self.s.settimeout(1)
while True:
try:
conn, addr = self.s.accept()
logging.info('Incoming connection from: {}:{}'.format(addr[0], addr[1]))
with shared.connections_lock:
c = Connection(addr[0], addr[1], conn)
c.start()
shared.connections.add(c)
except socket.timeout:
pass
| Add SO_REUSEADDR to socket options | ## Code Before:
import logging
import socket
import threading
from connection import Connection
import shared
class Listener(threading.Thread):
def __init__(self, host, port, family=socket.AF_INET):
super().__init__(name='Listener')
self.host = host
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
self.s.bind((self.host, self.port))
def run(self):
self.s.listen(1)
self.s.settimeout(1)
while True:
try:
conn, addr = self.s.accept()
logging.info('Incoming connection from: {}:{}'.format(addr[0], addr[1]))
with shared.connections_lock:
c = Connection(addr[0], addr[1], conn)
c.start()
shared.connections.add(c)
except socket.timeout:
pass
## Instruction:
Add SO_REUSEADDR to socket options
## Code After:
import logging
import socket
import threading
from connection import Connection
import shared
class Listener(threading.Thread):
def __init__(self, host, port, family=socket.AF_INET):
super().__init__(name='Listener')
self.host = host
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind((self.host, self.port))
def run(self):
self.s.listen(1)
self.s.settimeout(1)
while True:
try:
conn, addr = self.s.accept()
logging.info('Incoming connection from: {}:{}'.format(addr[0], addr[1]))
with shared.connections_lock:
c = Connection(addr[0], addr[1], conn)
c.start()
shared.connections.add(c)
except socket.timeout:
pass
| # ... existing code ...
self.s = socket.socket(self.family, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind((self.host, self.port))
# ... rest of the code ... |
758553edd8da7adbfeb2d291c83442dce77c748c | spotify/__init__.py | spotify/__init__.py | from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def _add_enum(obj, prefix):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
_add_enum(Error, 'SP_ERROR_')
| from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
| Use a class decorator to add enum values to classes | Use a class decorator to add enum values to classes
| Python | apache-2.0 | jodal/pyspotify,kotamat/pyspotify,felix1m/pyspotify,jodal/pyspotify,kotamat/pyspotify,jodal/pyspotify,mopidy/pyspotify,felix1m/pyspotify,kotamat/pyspotify,mopidy/pyspotify,felix1m/pyspotify | from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
- def _add_enum(obj, prefix):
+ def enum(prefix):
+ def wrapper(obj):
- for attr in dir(lib):
+ for attr in dir(lib):
- if attr.startswith(prefix):
+ if attr.startswith(prefix):
- setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
+ setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
+ return obj
+ return wrapper
+ @enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
- _add_enum(Error, 'SP_ERROR_')
- | Use a class decorator to add enum values to classes | ## Code Before:
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def _add_enum(obj, prefix):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
_add_enum(Error, 'SP_ERROR_')
## Instruction:
Use a class decorator to add enum values to classes
## Code After:
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
| # ... existing code ...
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
# ... modified code ...
@enum('SP_ERROR_')
class Error(Exception):
...
super(Error, self).__init__(message)
# ... rest of the code ... |
e01d45e3ee39023814bca75b1344477e42865b0b | ds_max_priority_queue.py | ds_max_priority_queue.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class MaxPriorityQueue(object):
"""Max Priority Queue."""
def __init__(self):
pass
def main():
pass
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def parent(i):
return i // 2
def left(i):
return 2 * i
def right(i):
return 2 * i + 1
class MaxPriorityQueue(object):
"""Max Priority Queue."""
def __init__(self):
pass
def main():
pass
if __name__ == '__main__':
main()
| Add parent(), left() & right() | Add parent(), left() & right()
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+
+
+ def parent(i):
+ return i // 2
+
+ def left(i):
+ return 2 * i
+
+ def right(i):
+ return 2 * i + 1
class MaxPriorityQueue(object):
"""Max Priority Queue."""
def __init__(self):
pass
def main():
pass
if __name__ == '__main__':
main()
| Add parent(), left() & right() | ## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class MaxPriorityQueue(object):
"""Max Priority Queue."""
def __init__(self):
pass
def main():
pass
if __name__ == '__main__':
main()
## Instruction:
Add parent(), left() & right()
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def parent(i):
return i // 2
def left(i):
return 2 * i
def right(i):
return 2 * i + 1
class MaxPriorityQueue(object):
"""Max Priority Queue."""
def __init__(self):
pass
def main():
pass
if __name__ == '__main__':
main()
| # ... existing code ...
from __future__ import print_function
def parent(i):
return i // 2
def left(i):
return 2 * i
def right(i):
return 2 * i + 1
# ... rest of the code ... |
f64447ca0e1442552b4a854fec5a8f847d2165cd | numpy/_array_api/_types.py | numpy/_array_api/_types.py |
__all__ = ['Literal', 'Optional', 'Tuple', 'Union', 'array', 'device',
'dtype', 'SupportsDLPack', 'SupportsBufferProtocol', 'PyCapsule']
from typing import Literal, Optional, Tuple, Union, TypeVar
import numpy as np
array = np.ndarray
device = TypeVar('device')
dtype = Literal[np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16,
np.uint32, np.uint64, np.float32, np.float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
|
__all__ = ['Literal', 'Optional', 'Tuple', 'Union', 'array', 'device',
'dtype', 'SupportsDLPack', 'SupportsBufferProtocol', 'PyCapsule']
from typing import Literal, Optional, Tuple, Union, TypeVar
from . import (ndarray, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
array = ndarray
device = TypeVar('device')
dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
| Use the array API types for the array API type annotations | Use the array API types for the array API type annotations
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy |
__all__ = ['Literal', 'Optional', 'Tuple', 'Union', 'array', 'device',
'dtype', 'SupportsDLPack', 'SupportsBufferProtocol', 'PyCapsule']
from typing import Literal, Optional, Tuple, Union, TypeVar
- import numpy as np
+ from . import (ndarray, int8, int16, int32, int64, uint8, uint16, uint32,
+ uint64, float32, float64)
- array = np.ndarray
+ array = ndarray
device = TypeVar('device')
- dtype = Literal[np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16,
+ dtype = Literal[int8, int16, int32, int64, uint8, uint16,
- np.uint32, np.uint64, np.float32, np.float64]
+ uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
| Use the array API types for the array API type annotations | ## Code Before:
__all__ = ['Literal', 'Optional', 'Tuple', 'Union', 'array', 'device',
'dtype', 'SupportsDLPack', 'SupportsBufferProtocol', 'PyCapsule']
from typing import Literal, Optional, Tuple, Union, TypeVar
import numpy as np
array = np.ndarray
device = TypeVar('device')
dtype = Literal[np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16,
np.uint32, np.uint64, np.float32, np.float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
## Instruction:
Use the array API types for the array API type annotations
## Code After:
__all__ = ['Literal', 'Optional', 'Tuple', 'Union', 'array', 'device',
'dtype', 'SupportsDLPack', 'SupportsBufferProtocol', 'PyCapsule']
from typing import Literal, Optional, Tuple, Union, TypeVar
from . import (ndarray, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
array = ndarray
device = TypeVar('device')
dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
| ...
from . import (ndarray, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
array = ndarray
device = TypeVar('device')
dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
... |
d28e884d832b63bef1434476a378de9b7e333264 | samples/WavGenerator.py | samples/WavGenerator.py |
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
|
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
| Add a main function with command line arguments | Add a main function with command line arguments
Now able to generate wave files from command line | Python | mit | parrisha/raspi-visualizer |
import wave
+ import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
-
+
- filename = 'Sample_'
+ filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
- filename += str(test_freq) + 'Hz@' + str(test_amp)
+ filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
+
+ if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
+ parser.add_argument('--freqs', nargs='+', type=int)
+ parser.add_argument('--amps', nargs='+', type=int)
+ args = parser.parse_args()
+
+ generate_sample_file(args.freqs, args.amps)
+
| Add a main function with command line arguments | ## Code Before:
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
## Instruction:
Add a main function with command line arguments
## Code After:
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
| // ... existing code ...
import wave
import argparse
import numpy as np
// ... modified code ...
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
...
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
...
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
// ... rest of the code ... |
450a1f64a21afce008392e321fff2d268bb9fc41 | setup.py | setup.py | from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
ALGPATH = "clusterpy/core/toolboxes/cluster/componentsAlg/"
ALGPKG = "clusterpy.core.toolboxes.cluster.componentsAlg."
CLUSPATH = "clusterpy/core/toolboxes/cluster/"
CLUSPKG = "clusterpy.core.toolboxes.cluster."
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
ext_modules = [Extension(CLUSPKG+"arisel", [CLUSPATH+"arisel.pyx"],
extra_link_args=['-fopenmp'],
extra_compile_args=['-fopenmp']
),
Extension(ALGPKG+"distanceFunctions", [ALGPATH+"distanceFunctions.pyx"]),
Extension(ALGPKG+"dist2Regions", [ALGPATH+"dist2Regions.pyx"]),
Extension(ALGPKG+"selectionTypeFunctions", [ALGPATH+"selectionTypeFunctions.pyx"]),
Extension(ALGPKG+"init", [ALGPATH+"init.pyx"]),
Extension(ALGPKG+"objFunctions", [ALGPATH+"objFunctions.pyx"])
],
cmdclass = {'build_ext': build_ext}
)
| from distutils.core import setup
from distutils.extension import Extension
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
)
| Remove cython Extension builder and build_ext from Setup | Remove cython Extension builder and build_ext from Setup
| Python | bsd-3-clause | clusterpy/clusterpy,clusterpy/clusterpy | from distutils.core import setup
from distutils.extension import Extension
- from Cython.Distutils import build_ext
-
- import numpy
-
- ALGPATH = "clusterpy/core/toolboxes/cluster/componentsAlg/"
- ALGPKG = "clusterpy.core.toolboxes.cluster.componentsAlg."
-
- CLUSPATH = "clusterpy/core/toolboxes/cluster/"
- CLUSPKG = "clusterpy.core.toolboxes.cluster."
-
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
- ext_modules = [Extension(CLUSPKG+"arisel", [CLUSPATH+"arisel.pyx"],
- extra_link_args=['-fopenmp'],
- extra_compile_args=['-fopenmp']
- ),
- Extension(ALGPKG+"distanceFunctions", [ALGPATH+"distanceFunctions.pyx"]),
- Extension(ALGPKG+"dist2Regions", [ALGPATH+"dist2Regions.pyx"]),
- Extension(ALGPKG+"selectionTypeFunctions", [ALGPATH+"selectionTypeFunctions.pyx"]),
- Extension(ALGPKG+"init", [ALGPATH+"init.pyx"]),
- Extension(ALGPKG+"objFunctions", [ALGPATH+"objFunctions.pyx"])
- ],
- cmdclass = {'build_ext': build_ext}
-
-
)
| Remove cython Extension builder and build_ext from Setup | ## Code Before:
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
ALGPATH = "clusterpy/core/toolboxes/cluster/componentsAlg/"
ALGPKG = "clusterpy.core.toolboxes.cluster.componentsAlg."
CLUSPATH = "clusterpy/core/toolboxes/cluster/"
CLUSPKG = "clusterpy.core.toolboxes.cluster."
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
ext_modules = [Extension(CLUSPKG+"arisel", [CLUSPATH+"arisel.pyx"],
extra_link_args=['-fopenmp'],
extra_compile_args=['-fopenmp']
),
Extension(ALGPKG+"distanceFunctions", [ALGPATH+"distanceFunctions.pyx"]),
Extension(ALGPKG+"dist2Regions", [ALGPATH+"dist2Regions.pyx"]),
Extension(ALGPKG+"selectionTypeFunctions", [ALGPATH+"selectionTypeFunctions.pyx"]),
Extension(ALGPKG+"init", [ALGPATH+"init.pyx"]),
Extension(ALGPKG+"objFunctions", [ALGPATH+"objFunctions.pyx"])
],
cmdclass = {'build_ext': build_ext}
)
## Instruction:
Remove cython Extension builder and build_ext from Setup
## Code After:
from distutils.core import setup
from distutils.extension import Extension
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
)
| ...
from distutils.extension import Extension
...
'clusterpy.core.toolboxes.cluster.componentsAlg'],
)
... |
12c2c7f20e46dce54990d5cf4c0e51ab02d549c4 | adder/__init__.py | adder/__init__.py | """adder is an amazing module which adds things"""
def add(first, second):
"""The power of add is that it adds its arguments"""
return first + second
| """A mighty module to add things to each other"""
def add(first, second):
"""The power of add is that it adds its arguments"""
return first + second
| Make the docstring match the github description | Make the docstring match the github description | Python | mit | jamesmcdonald/adder | - """adder is an amazing module which adds things"""
+ """A mighty module to add things to each other"""
def add(first, second):
"""The power of add is that it adds its arguments"""
return first + second
| Make the docstring match the github description | ## Code Before:
"""adder is an amazing module which adds things"""
def add(first, second):
"""The power of add is that it adds its arguments"""
return first + second
## Instruction:
Make the docstring match the github description
## Code After:
"""A mighty module to add things to each other"""
def add(first, second):
"""The power of add is that it adds its arguments"""
return first + second
| ...
"""A mighty module to add things to each other"""
... |
99c00b309e89ceb32528c217e308b91f94a56e2b | cogs/command_log.py | cogs/command_log.py | import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
def setup(liara):
liara.add_cog(CommandLog())
| import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
| Make the command log more detailed | Make the command log more detailed
| Python | mit | Thessia/Liara | import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
+ kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
+ args = 'with arguments {} '.format(kwargs) if kwargs else ''
- self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
+ self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
+ .format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
| Make the command log more detailed | ## Code Before:
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
def setup(liara):
liara.add_cog(CommandLog())
## Instruction:
Make the command log more detailed
## Code After:
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
| ...
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
... |
efe30ee01d3b1eb46cd7d986beba09ec47a51e14 | app/api/cruds/weekday_crud.py | app/api/cruds/weekday_crud.py | from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return Weekday(weekday=weekday)
except ValidationError as e:
return Weekday(weekday=None, errors=get_errors(e))
| from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
| Fix errors on create weekday | Fix errors on create weekday
| Python | mit | teamtaverna/core | from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
- return Weekday(weekday=weekday)
+ return CreateWeekday(weekday=weekday)
except ValidationError as e:
- return Weekday(weekday=None, errors=get_errors(e))
+ return CreateWeekday(weekday=None, errors=get_errors(e))
| Fix errors on create weekday | ## Code Before:
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return Weekday(weekday=weekday)
except ValidationError as e:
return Weekday(weekday=None, errors=get_errors(e))
## Instruction:
Fix errors on create weekday
## Code After:
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
| # ... existing code ...
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
# ... rest of the code ... |
c54a1286200ce62ef5eddef436428c2244e94798 | totemlogs/elasticsearch.py | totemlogs/elasticsearch.py | from __future__ import absolute_import
from future.builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
from functools import wraps
import logging
from elasticsearch import Elasticsearch
from conf.appconfig import SEARCH_SETTINGS
MAPPING_LOCATION = './conf/index-mapping.json'
logger = logging.getLogger(__name__)
def using_search(fun):
"""
Function wrapper that automatically passes elastic search instance to
wrapped function.
:param fun: Function to be wrapped
:return: Wrapped function.
"""
@wraps(fun)
def outer(*args, **kwargs):
kwargs.setdefault('es', get_search_client())
kwargs.setdefault('idx', SEARCH_SETTINGS['default-index'])
return fun(*args, **kwargs)
return outer
def get_search_client():
"""
Creates the elasticsearch client instance using SEARCH_SETTINGS
:return: Instance of Elasticsearch
:rtype: elasticsearch.Elasticsearch
"""
return Elasticsearch(hosts=SEARCH_SETTINGS['host'],
port=SEARCH_SETTINGS['port'])
| from __future__ import absolute_import
from future.builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
from functools import wraps
import logging
from elasticsearch import Elasticsearch
from conf.appconfig import SEARCH_SETTINGS
MAPPING_LOCATION = './conf/index-mapping.json'
logger = logging.getLogger(__name__)
def using_search(fun):
"""
Function wrapper that automatically passes elastic search instance to
wrapped function.
:param fun: Function to be wrapped
:return: Wrapped function.
"""
@wraps(fun)
def outer(*args, **kwargs):
kwargs.setdefault('es', get_search_client())
kwargs.setdefault('idx', SEARCH_SETTINGS['default-index'])
return fun(*args, **kwargs)
return outer
def get_search_client():
"""
Creates the elasticsearch client instance using SEARCH_SETTINGS
:return: Instance of Elasticsearch
:rtype: elasticsearch.Elasticsearch
"""
return Elasticsearch(hosts=SEARCH_SETTINGS['host'],
port=SEARCH_SETTINGS['port'],
send_get_body_as='POST')
| Use POST instead of GET Request for ES Search API (Issue with query string size) | Use POST instead of GET Request for ES Search API (Issue with query string size)
| Python | mit | totem/totem-logs,totem/totem-logs,totem/totem-logs,totem/totem-logs | from __future__ import absolute_import
from future.builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
from functools import wraps
import logging
from elasticsearch import Elasticsearch
from conf.appconfig import SEARCH_SETTINGS
MAPPING_LOCATION = './conf/index-mapping.json'
logger = logging.getLogger(__name__)
def using_search(fun):
"""
Function wrapper that automatically passes elastic search instance to
wrapped function.
:param fun: Function to be wrapped
:return: Wrapped function.
"""
@wraps(fun)
def outer(*args, **kwargs):
kwargs.setdefault('es', get_search_client())
kwargs.setdefault('idx', SEARCH_SETTINGS['default-index'])
return fun(*args, **kwargs)
return outer
def get_search_client():
"""
Creates the elasticsearch client instance using SEARCH_SETTINGS
:return: Instance of Elasticsearch
:rtype: elasticsearch.Elasticsearch
"""
return Elasticsearch(hosts=SEARCH_SETTINGS['host'],
- port=SEARCH_SETTINGS['port'])
+ port=SEARCH_SETTINGS['port'],
+ send_get_body_as='POST')
| Use POST instead of GET Request for ES Search API (Issue with query string size) | ## Code Before:
from __future__ import absolute_import
from future.builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
from functools import wraps
import logging
from elasticsearch import Elasticsearch
from conf.appconfig import SEARCH_SETTINGS
MAPPING_LOCATION = './conf/index-mapping.json'
logger = logging.getLogger(__name__)
def using_search(fun):
"""
Function wrapper that automatically passes elastic search instance to
wrapped function.
:param fun: Function to be wrapped
:return: Wrapped function.
"""
@wraps(fun)
def outer(*args, **kwargs):
kwargs.setdefault('es', get_search_client())
kwargs.setdefault('idx', SEARCH_SETTINGS['default-index'])
return fun(*args, **kwargs)
return outer
def get_search_client():
"""
Creates the elasticsearch client instance using SEARCH_SETTINGS
:return: Instance of Elasticsearch
:rtype: elasticsearch.Elasticsearch
"""
return Elasticsearch(hosts=SEARCH_SETTINGS['host'],
port=SEARCH_SETTINGS['port'])
## Instruction:
Use POST instead of GET Request for ES Search API (Issue with query string size)
## Code After:
from __future__ import absolute_import
from future.builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
from functools import wraps
import logging
from elasticsearch import Elasticsearch
from conf.appconfig import SEARCH_SETTINGS
MAPPING_LOCATION = './conf/index-mapping.json'
logger = logging.getLogger(__name__)
def using_search(fun):
"""
Function wrapper that automatically passes elastic search instance to
wrapped function.
:param fun: Function to be wrapped
:return: Wrapped function.
"""
@wraps(fun)
def outer(*args, **kwargs):
kwargs.setdefault('es', get_search_client())
kwargs.setdefault('idx', SEARCH_SETTINGS['default-index'])
return fun(*args, **kwargs)
return outer
def get_search_client():
"""
Creates the elasticsearch client instance using SEARCH_SETTINGS
:return: Instance of Elasticsearch
:rtype: elasticsearch.Elasticsearch
"""
return Elasticsearch(hosts=SEARCH_SETTINGS['host'],
port=SEARCH_SETTINGS['port'],
send_get_body_as='POST')
| ...
return Elasticsearch(hosts=SEARCH_SETTINGS['host'],
port=SEARCH_SETTINGS['port'],
send_get_body_as='POST')
... |
687f48ca94b67321a1576a1dbb1d7ae89fe6f0b7 | tests/test_pubannotation.py | tests/test_pubannotation.py |
import kindred
def test_pubannotation_groST():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
def test_pubannotation_wikiPain():
corpus = kindred.pubannotation.load('WikiPainGoldStandard')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 49
assert relationCount == 715
assert entityCount == 878
if __name__ == '__main__':
test_pubannotation()
|
import kindred
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
if __name__ == '__main__':
test_pubannotation()
| Remove one of the pubannotation tests as their data seems to change | Remove one of the pubannotation tests as their data seems to change
| Python | mit | jakelever/kindred,jakelever/kindred |
import kindred
- def test_pubannotation_groST():
+ def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
- def test_pubannotation_wikiPain():
- corpus = kindred.pubannotation.load('WikiPainGoldStandard')
-
- assert isinstance(corpus,kindred.Corpus)
-
- fileCount = len(corpus.documents)
- entityCount = sum([ len(d.entities) for d in corpus.documents ])
- relationCount = sum([ len(d.relations) for d in corpus.documents ])
-
- assert fileCount == 49
- assert relationCount == 715
- assert entityCount == 878
-
if __name__ == '__main__':
test_pubannotation()
| Remove one of the pubannotation tests as their data seems to change | ## Code Before:
import kindred
def test_pubannotation_groST():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
def test_pubannotation_wikiPain():
corpus = kindred.pubannotation.load('WikiPainGoldStandard')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 49
assert relationCount == 715
assert entityCount == 878
if __name__ == '__main__':
test_pubannotation()
## Instruction:
Remove one of the pubannotation tests as their data seems to change
## Code After:
import kindred
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
if __name__ == '__main__':
test_pubannotation()
| // ... existing code ...
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
// ... modified code ...
if __name__ == '__main__':
// ... rest of the code ... |
3382b5003eadec99f0816d9190038bd2caf6c412 | system_maintenance/urls.py | system_maintenance/urls.py | from django.conf.urls import patterns, url
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = patterns('',
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', 'django.contrib.auth.views.login', {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
)
| from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = [
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', auth_views.login, {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', auth_views.logout, {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
]
| Resolve Django 1.10 deprecation warnings | Resolve Django 1.10 deprecation warnings
| Python | bsd-3-clause | mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance | - from django.conf.urls import patterns, url
+ from django.conf.urls import url
+ from django.contrib.auth import views as auth_views
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
- urlpatterns = patterns('',
+ urlpatterns = [
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
- url(r'^authentication/$', 'django.contrib.auth.views.login', {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
+ url(r'^authentication/$', auth_views.login, {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
- url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/system_maintenance/'}, name='logout'),
+ url(r'^logout/$', auth_views.logout, {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
- )
+ ]
| Resolve Django 1.10 deprecation warnings | ## Code Before:
from django.conf.urls import patterns, url
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = patterns('',
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', 'django.contrib.auth.views.login', {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
)
## Instruction:
Resolve Django 1.10 deprecation warnings
## Code After:
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .views import (DocumentationRecordListView, DocumentationRecordDetailView,
MaintenanceRecordDetailView, MaintenanceRecordListView,
system_maintenance_home_view)
urlpatterns = [
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', auth_views.login, {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', auth_views.logout, {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
]
| ...
from django.conf.urls import url
from django.contrib.auth import views as auth_views
...
urlpatterns = [
url(r'^$', system_maintenance_home_view, name='system_maintenance_home_view'),
url(r'^authentication/$', auth_views.login, {'template_name': 'system_maintenance/authentication.html'}, name='authentication'),
url(r'^documentation/$', DocumentationRecordListView.as_view(), name='documentation_record_list'),
...
url(r'^documentation/(?P<pk>\d+)/$', DocumentationRecordDetailView.as_view(), name='documentation_record_detail'),
url(r'^logout/$', auth_views.logout, {'next_page': '/system_maintenance/'}, name='logout'),
url(r'^records/$', MaintenanceRecordListView.as_view(), name='maintenance_record_list'),
...
url(r'^records/(?P<pk>\d+)/$', MaintenanceRecordDetailView.as_view(), name='maintenance_record_detail'),
]
... |
91b01e37897ea20f6486118e4dd595439f81006b | ktane/Model/Modules/WiresModule.py | ktane/Model/Modules/WiresModule.py | from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
NotImplementedError
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
| from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
counter = 0
for idx, value in enumerate(sequence):
if value != WireColors.MISSING.value:
counter += 1
if counter == n:
return idx
return None
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
| Implement Wires helper method get_nth_wire_position | Implement Wires helper method get_nth_wire_position
| Python | mit | hanzikl/ktane-controller | from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
- NotImplementedError
+ counter = 0
+ for idx, value in enumerate(sequence):
+ if value != WireColors.MISSING.value:
+ counter += 1
+ if counter == n:
+ return idx
+
+ return None
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
| Implement Wires helper method get_nth_wire_position | ## Code Before:
from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
NotImplementedError
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
## Instruction:
Implement Wires helper method get_nth_wire_position
## Code After:
from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
counter = 0
for idx, value in enumerate(sequence):
if value != WireColors.MISSING.value:
counter += 1
if counter == n:
return idx
return None
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
| # ... existing code ...
def get_nth_wire_position(sequence, n):
counter = 0
for idx, value in enumerate(sequence):
if value != WireColors.MISSING.value:
counter += 1
if counter == n:
return idx
return None
# ... rest of the code ... |
d971fbb4dc3b69e012b212cd54b6e8511571e1f5 | graphene/core/classtypes/uniontype.py | graphene/core/classtypes/uniontype.py | import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=lambda instance, info: cls._resolve_type(schema, instance, info),
description=cls._meta.description,
)
| from functools import partial
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
)
| Update to use partial instead of lambda function | Update to use partial instead of lambda function | Python | mit | sjhewitt/graphene,graphql-python/graphene,sjhewitt/graphene,Globegitter/graphene,graphql-python/graphene,Globegitter/graphene | + from functools import partial
+
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
- resolve_type=lambda instance, info: cls._resolve_type(schema, instance, info),
+ resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
)
| Update to use partial instead of lambda function | ## Code Before:
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=lambda instance, info: cls._resolve_type(schema, instance, info),
description=cls._meta.description,
)
## Instruction:
Update to use partial instead of lambda function
## Code After:
from functools import partial
import six
from graphql.core.type import GraphQLUnionType
from .base import FieldsClassType, FieldsClassTypeMeta, FieldsOptions
class UnionTypeOptions(FieldsOptions):
def __init__(self, *args, **kwargs):
super(UnionTypeOptions, self).__init__(*args, **kwargs)
self.types = []
class UnionTypeMeta(FieldsClassTypeMeta):
options_class = UnionTypeOptions
def get_options(cls, meta):
return cls.options_class(meta, types=[])
class UnionType(six.with_metaclass(UnionTypeMeta, FieldsClassType)):
class Meta:
abstract = True
@classmethod
def _resolve_type(cls, schema, instance, *args):
return schema.T(instance.__class__)
@classmethod
def internal_type(cls, schema):
if cls._meta.abstract:
raise Exception("Abstract ObjectTypes don't have a specific type.")
return GraphQLUnionType(
cls._meta.type_name,
types=list(map(schema.T, cls._meta.types)),
resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
)
| ...
from functools import partial
import six
...
types=list(map(schema.T, cls._meta.types)),
resolve_type=partial(cls._resolve_type, schema),
description=cls._meta.description,
... |
0595cc06357a572ef604d6c3e0b560974720524c | spacy/tests/regression/test_issue595.py | spacy/tests/regression/test_issue595.py | import pytest
import spacy
@pytest.mark.models
def test_not_lemmatize_base_forms():
nlp = spacy.load('en', parser=False)
doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
| from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
| Change test595 to mock data, instead of requiring model. | Change test595 to mock data, instead of requiring model.
| Python | mit | honnibal/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,raphael0202/spaCy,recognai/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,recognai/spaCy,raphael0202/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,banglakit/spaCy,raphael0202/spaCy,aikramer2/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,explosion/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,raphael0202/spaCy,recognai/spaCy,banglakit/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,raphael0202/spaCy,honnibal/spaCy,explosion/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy | + from __future__ import unicode_literals
import pytest
- import spacy
+ from ...symbols import POS, VERB, VerbForm_inf
+ from ...tokens import Doc
+ from ...vocab import Vocab
+ from ...lemmatizer import Lemmatizer
- @pytest.mark.models
+ @pytest.fixture
+ def index():
+ return {'verb': {}}
+
+ @pytest.fixture
+ def exceptions():
+ return {'verb': {}}
+
+ @pytest.fixture
+ def rules():
+ return {"verb": [["ed", "e"]]}
+
+ @pytest.fixture
+ def lemmatizer(index, exceptions, rules):
+ return Lemmatizer(index, exceptions, rules)
+
+
+ @pytest.fixture
+ def tag_map():
+ return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
+
+
+ @pytest.fixture
+ def vocab(lemmatizer, tag_map):
+ return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
+
+
- def test_not_lemmatize_base_forms():
+ def test_not_lemmatize_base_forms(vocab, lemmatizer):
+ doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
- nlp = spacy.load('en', parser=False)
- doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
| Change test595 to mock data, instead of requiring model. | ## Code Before:
import pytest
import spacy
@pytest.mark.models
def test_not_lemmatize_base_forms():
nlp = spacy.load('en', parser=False)
doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
## Instruction:
Change test595 to mock data, instead of requiring model.
## Code After:
from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
| // ... existing code ...
from __future__ import unicode_literals
import pytest
// ... modified code ...
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
...
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
// ... rest of the code ... |
8723611817a982907f3f0a98ed4678d587597002 | src/appleseed.python/test/runtests.py | src/appleseed.python/test/runtests.py |
import unittest
from testdict2dict import *
from testentitymap import *
from testentityvector import *
unittest.TestProgram(testRunner = unittest.TextTestRunner())
|
import unittest
from testbasis import *
from testdict2dict import *
from testentitymap import *
from testentityvector import *
unittest.TestProgram(testRunner = unittest.TextTestRunner())
| Add new unit tests to collection | Add new unit tests to collection
| Python | mit | pjessesco/appleseed,dictoon/appleseed,gospodnetic/appleseed,Aakash1312/appleseed,Vertexwahn/appleseed,aiivashchenko/appleseed,luisbarrancos/appleseed,appleseedhq/appleseed,Vertexwahn/appleseed,aytekaman/appleseed,glebmish/appleseed,aytekaman/appleseed,pjessesco/appleseed,est77/appleseed,dictoon/appleseed,luisbarrancos/appleseed,gospodnetic/appleseed,Biart95/appleseed,glebmish/appleseed,appleseedhq/appleseed,Vertexwahn/appleseed,appleseedhq/appleseed,dictoon/appleseed,aiivashchenko/appleseed,gospodnetic/appleseed,gospodnetic/appleseed,Biart95/appleseed,luisbarrancos/appleseed,Vertexwahn/appleseed,Biart95/appleseed,dictoon/appleseed,aiivashchenko/appleseed,aiivashchenko/appleseed,Aakash1312/appleseed,Biart95/appleseed,Aakash1312/appleseed,Aakash1312/appleseed,dictoon/appleseed,appleseedhq/appleseed,Aakash1312/appleseed,aytekaman/appleseed,pjessesco/appleseed,est77/appleseed,pjessesco/appleseed,glebmish/appleseed,aiivashchenko/appleseed,Vertexwahn/appleseed,aytekaman/appleseed,luisbarrancos/appleseed,pjessesco/appleseed,luisbarrancos/appleseed,aytekaman/appleseed,est77/appleseed,est77/appleseed,gospodnetic/appleseed,Biart95/appleseed,glebmish/appleseed,glebmish/appleseed,est77/appleseed,appleseedhq/appleseed |
import unittest
+ from testbasis import *
from testdict2dict import *
from testentitymap import *
from testentityvector import *
unittest.TestProgram(testRunner = unittest.TextTestRunner())
| Add new unit tests to collection | ## Code Before:
import unittest
from testdict2dict import *
from testentitymap import *
from testentityvector import *
unittest.TestProgram(testRunner = unittest.TextTestRunner())
## Instruction:
Add new unit tests to collection
## Code After:
import unittest
from testbasis import *
from testdict2dict import *
from testentitymap import *
from testentityvector import *
unittest.TestProgram(testRunner = unittest.TextTestRunner())
| ...
from testbasis import *
from testdict2dict import *
... |
c7c1fa91a0ec213bd648f2f50f95f5652891d3ab | main/readability_graph.py | main/readability_graph.py | import graph
from corpus.mysql.reddit import RedditMySQLCorpus
import cred
if __name__ == '__main__':
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
result = corpus.run_sql('SELECT ari FROM comment_feature_read', None)
print('Got results')
values = [ result[i]['ari'] for i in result ]
graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values')
| import graph
from corpus.mysql.reddit import RedditMySQLCorpus
import cred
if __name__ == '__main__':
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None)
print('Got results')
values = [ result[i]['ari'] for i in result ]
graph.hist('data/ari_hist', values, 'ARI', 'Frequency',
'Frequency of ARI values')
values = [ result[i]['flesch_reading_ease'] for i in result ]
graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency',
'Frequency of Flesch Reading Ease values')
values = [ result[i]['flesch_kincaid_grade_level'] for i in result ]
graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency',
'Frequency of Flesch Kincaid Grade Level values')
values = [ result[i]['gunning_fog_index'] for i in result ]
graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency',
'Frequency of Gunning Fog Index values')
values = [ result[i]['smog_index'] for i in result ]
graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency',
'Frequency of Smog Index values')
values = [ result[i]['coleman_liau_index'] for i in result ]
graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency',
'Frequency of Coleman Liau Index values')
values = [ result[i]['lix'] for i in result ]
graph.hist('data/lix_hist', values, 'LIX', 'Frequency',
'Frequency of LIX values')
values = [ result[i]['rix'] for i in result ]
graph.hist('data/rix_hist', values, 'RIX', 'Frequency',
'Frequency of RIX values')
| Add other statistical measures for graphing | Add other statistical measures for graphing
| Python | mit | worldwise001/stylometry | import graph
from corpus.mysql.reddit import RedditMySQLCorpus
import cred
if __name__ == '__main__':
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
- result = corpus.run_sql('SELECT ari FROM comment_feature_read', None)
+ result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None)
print('Got results')
+
values = [ result[i]['ari'] for i in result ]
- graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values')
+ graph.hist('data/ari_hist', values, 'ARI', 'Frequency',
+ 'Frequency of ARI values')
+ values = [ result[i]['flesch_reading_ease'] for i in result ]
+ graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency',
+ 'Frequency of Flesch Reading Ease values')
+
+ values = [ result[i]['flesch_kincaid_grade_level'] for i in result ]
+ graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency',
+ 'Frequency of Flesch Kincaid Grade Level values')
+
+ values = [ result[i]['gunning_fog_index'] for i in result ]
+ graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency',
+ 'Frequency of Gunning Fog Index values')
+
+ values = [ result[i]['smog_index'] for i in result ]
+ graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency',
+ 'Frequency of Smog Index values')
+
+ values = [ result[i]['coleman_liau_index'] for i in result ]
+ graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency',
+ 'Frequency of Coleman Liau Index values')
+
+ values = [ result[i]['lix'] for i in result ]
+ graph.hist('data/lix_hist', values, 'LIX', 'Frequency',
+ 'Frequency of LIX values')
+
+ values = [ result[i]['rix'] for i in result ]
+ graph.hist('data/rix_hist', values, 'RIX', 'Frequency',
+ 'Frequency of RIX values')
+ | Add other statistical measures for graphing | ## Code Before:
import graph
from corpus.mysql.reddit import RedditMySQLCorpus
import cred
if __name__ == '__main__':
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
result = corpus.run_sql('SELECT ari FROM comment_feature_read', None)
print('Got results')
values = [ result[i]['ari'] for i in result ]
graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values')
## Instruction:
Add other statistical measures for graphing
## Code After:
import graph
from corpus.mysql.reddit import RedditMySQLCorpus
import cred
if __name__ == '__main__':
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None)
print('Got results')
values = [ result[i]['ari'] for i in result ]
graph.hist('data/ari_hist', values, 'ARI', 'Frequency',
'Frequency of ARI values')
values = [ result[i]['flesch_reading_ease'] for i in result ]
graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency',
'Frequency of Flesch Reading Ease values')
values = [ result[i]['flesch_kincaid_grade_level'] for i in result ]
graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency',
'Frequency of Flesch Kincaid Grade Level values')
values = [ result[i]['gunning_fog_index'] for i in result ]
graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency',
'Frequency of Gunning Fog Index values')
values = [ result[i]['smog_index'] for i in result ]
graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency',
'Frequency of Smog Index values')
values = [ result[i]['coleman_liau_index'] for i in result ]
graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency',
'Frequency of Coleman Liau Index values')
values = [ result[i]['lix'] for i in result ]
graph.hist('data/lix_hist', values, 'LIX', 'Frequency',
'Frequency of LIX values')
values = [ result[i]['rix'] for i in result ]
graph.hist('data/rix_hist', values, 'RIX', 'Frequency',
'Frequency of RIX values')
| # ... existing code ...
result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None)
print('Got results')
values = [ result[i]['ari'] for i in result ]
graph.hist('data/ari_hist', values, 'ARI', 'Frequency',
'Frequency of ARI values')
values = [ result[i]['flesch_reading_ease'] for i in result ]
graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency',
'Frequency of Flesch Reading Ease values')
values = [ result[i]['flesch_kincaid_grade_level'] for i in result ]
graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency',
'Frequency of Flesch Kincaid Grade Level values')
values = [ result[i]['gunning_fog_index'] for i in result ]
graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency',
'Frequency of Gunning Fog Index values')
values = [ result[i]['smog_index'] for i in result ]
graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency',
'Frequency of Smog Index values')
values = [ result[i]['coleman_liau_index'] for i in result ]
graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency',
'Frequency of Coleman Liau Index values')
values = [ result[i]['lix'] for i in result ]
graph.hist('data/lix_hist', values, 'LIX', 'Frequency',
'Frequency of LIX values')
values = [ result[i]['rix'] for i in result ]
graph.hist('data/rix_hist', values, 'RIX', 'Frequency',
'Frequency of RIX values')
# ... rest of the code ... |
e4f7deee8c4154781c2e945bfc14cf2028586dc1 | hellopython/print_method/__init__.py | hellopython/print_method/__init__.py | import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
| import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
| Add a title to the print_method problem | Add a title to the print_method problem
| Python | mit | pyschool/hipyschool | import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
+ title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
| Add a title to the print_method problem | ## Code Before:
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
## Instruction:
Add a title to the print_method problem
## Code After:
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
| # ... existing code ...
class Problem(BaseProblem):
title = 'Print method'
# ... rest of the code ... |
65b418b8eaa8f57fdd3c8207168451da20b452bf | src/python/rgplot/RgChart.py | src/python/rgplot/RgChart.py | import matplotlib.pyplot as plt
#class RgChart(object):
#__metaclass__ = ABCMeta
class RgChart(object):
def with_grids(self):
self._ax.xaxis.grid(True)
self._ax.yaxis.grid(True)
return self
def save_as(self, filename):
self._create_plot()
self._fig.savefig(filename)
plt.close(self._fig) # close on save to avoid memory issues
def with_ygrid(self):
self._ax.yaxis.grid(True)
return self
def with_title(self, title = None):
if title is None:
plt.title(self._title)
else:
plt.title(title)
return self
def with_xlabel(self, xlabel = None):
if xlabel is None:
plt.xlabel(self._xlabel)
else:
plt.xlabel(xlabel)
return self
def with_ylabel(self, ylabel = None):
if ylabel is None:
plt.ylabel(self._ylabel)
else:
plt.ylabel(ylabel)
return self
def with_ylim(self, lim):
self._ax.set_ylim(lim)
return self
def wo_xticks(self):
self._ax.get_xaxis().set_ticks([])
return self
def wo_yticks(self):
self._ax.get_yaxis().set_ticks([])
return self
def _create_plot(self):
pass
| import matplotlib.pyplot as plt
#class RgChart(object):
#__metaclass__ = ABCMeta
class RgChart(object):
TITLE_Y_OFFSET = 1.08
def with_grids(self):
self._ax.xaxis.grid(True)
self._ax.yaxis.grid(True)
return self
def save_as(self, filename):
self._create_plot()
self._fig.savefig(filename)
plt.close(self._fig) # close on save to avoid memory issues
def with_ygrid(self):
self._ax.yaxis.grid(True)
return self
def with_title(self, title = None, y_offset = RgChart.TITLE_Y_OFFSET):
if title is None:
plt.title(self._title, y = y_offset)
else:
plt.title(title, y = y_offset)
return self
def with_xlabel(self, xlabel = None):
if xlabel is None:
plt.xlabel(self._xlabel)
else:
plt.xlabel(xlabel)
return self
def with_ylabel(self, ylabel = None):
if ylabel is None:
plt.ylabel(self._ylabel)
else:
plt.ylabel(ylabel)
return self
def with_ylog(self):
self._ax.set_yscale('log')
return self
def with_ylim(self, lim):
self._ax.set_ylim(lim)
return self
def wo_xticks(self):
self._ax.get_xaxis().set_ticks([])
return self
def wo_yticks(self):
self._ax.get_yaxis().set_ticks([])
return self
def _create_plot(self):
pass
| Add y log option and title offset | Add y log option and title offset
| Python | mit | vjuranek/rg-offline-plotting,vjuranek/rg-offline-plotting | import matplotlib.pyplot as plt
#class RgChart(object):
#__metaclass__ = ABCMeta
class RgChart(object):
+
+ TITLE_Y_OFFSET = 1.08
def with_grids(self):
self._ax.xaxis.grid(True)
self._ax.yaxis.grid(True)
return self
def save_as(self, filename):
self._create_plot()
self._fig.savefig(filename)
plt.close(self._fig) # close on save to avoid memory issues
def with_ygrid(self):
self._ax.yaxis.grid(True)
return self
- def with_title(self, title = None):
+ def with_title(self, title = None, y_offset = RgChart.TITLE_Y_OFFSET):
if title is None:
- plt.title(self._title)
+ plt.title(self._title, y = y_offset)
else:
- plt.title(title)
+ plt.title(title, y = y_offset)
return self
def with_xlabel(self, xlabel = None):
if xlabel is None:
plt.xlabel(self._xlabel)
else:
plt.xlabel(xlabel)
return self
def with_ylabel(self, ylabel = None):
if ylabel is None:
plt.ylabel(self._ylabel)
else:
plt.ylabel(ylabel)
return self
+ def with_ylog(self):
+ self._ax.set_yscale('log')
+ return self
+
def with_ylim(self, lim):
self._ax.set_ylim(lim)
return self
def wo_xticks(self):
self._ax.get_xaxis().set_ticks([])
return self
def wo_yticks(self):
self._ax.get_yaxis().set_ticks([])
return self
def _create_plot(self):
pass
| Add y log option and title offset | ## Code Before:
import matplotlib.pyplot as plt
#class RgChart(object):
#__metaclass__ = ABCMeta
class RgChart(object):
def with_grids(self):
self._ax.xaxis.grid(True)
self._ax.yaxis.grid(True)
return self
def save_as(self, filename):
self._create_plot()
self._fig.savefig(filename)
plt.close(self._fig) # close on save to avoid memory issues
def with_ygrid(self):
self._ax.yaxis.grid(True)
return self
def with_title(self, title = None):
if title is None:
plt.title(self._title)
else:
plt.title(title)
return self
def with_xlabel(self, xlabel = None):
if xlabel is None:
plt.xlabel(self._xlabel)
else:
plt.xlabel(xlabel)
return self
def with_ylabel(self, ylabel = None):
if ylabel is None:
plt.ylabel(self._ylabel)
else:
plt.ylabel(ylabel)
return self
def with_ylim(self, lim):
self._ax.set_ylim(lim)
return self
def wo_xticks(self):
self._ax.get_xaxis().set_ticks([])
return self
def wo_yticks(self):
self._ax.get_yaxis().set_ticks([])
return self
def _create_plot(self):
pass
## Instruction:
Add y log option and title offset
## Code After:
import matplotlib.pyplot as plt
#class RgChart(object):
#__metaclass__ = ABCMeta
class RgChart(object):
TITLE_Y_OFFSET = 1.08
def with_grids(self):
self._ax.xaxis.grid(True)
self._ax.yaxis.grid(True)
return self
def save_as(self, filename):
self._create_plot()
self._fig.savefig(filename)
plt.close(self._fig) # close on save to avoid memory issues
def with_ygrid(self):
self._ax.yaxis.grid(True)
return self
def with_title(self, title = None, y_offset = RgChart.TITLE_Y_OFFSET):
if title is None:
plt.title(self._title, y = y_offset)
else:
plt.title(title, y = y_offset)
return self
def with_xlabel(self, xlabel = None):
if xlabel is None:
plt.xlabel(self._xlabel)
else:
plt.xlabel(xlabel)
return self
def with_ylabel(self, ylabel = None):
if ylabel is None:
plt.ylabel(self._ylabel)
else:
plt.ylabel(ylabel)
return self
def with_ylog(self):
self._ax.set_yscale('log')
return self
def with_ylim(self, lim):
self._ax.set_ylim(lim)
return self
def wo_xticks(self):
self._ax.get_xaxis().set_ticks([])
return self
def wo_yticks(self):
self._ax.get_yaxis().set_ticks([])
return self
def _create_plot(self):
pass
| # ... existing code ...
class RgChart(object):
TITLE_Y_OFFSET = 1.08
# ... modified code ...
def with_title(self, title = None, y_offset = RgChart.TITLE_Y_OFFSET):
if title is None:
plt.title(self._title, y = y_offset)
else:
plt.title(title, y = y_offset)
return self
...
def with_ylog(self):
self._ax.set_yscale('log')
return self
def with_ylim(self, lim):
# ... rest of the code ... |
2adfcea14f292bacfbae906a70d6395304acf607 | addons/bestja_volunteer_pesel/models.py | addons/bestja_volunteer_pesel/models.py | from operator import mul
from openerp import models, fields, api, exceptions
class Volunteer(models.Model):
_inherit = 'res.users'
pesel = fields.Char(string=u"PESEL")
def __init__(self, pool, cr):
super(Volunteer, self).__init__(pool, cr)
self._add_permitted_fields(level='owner', fields={'pesel'})
@api.one
@api.constrains('pesel')
def _check_pesel(self):
if not self.pesel:
return
try:
digits = map(int, self.pesel)
except ValueError:
raise exceptions.ValidationError("Numer PESEL może składać się wyłącznie z cyfr!")
weights = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3)
control_sum = -(sum(map(mul, digits[:-1], weights))) % 10
if len(digits) != 11 or control_sum != digits[-1]:
raise exceptions.ValidationError("Niepoprawny numer PESEL.")
| from operator import mul
from openerp import models, fields, api, exceptions
class Volunteer(models.Model):
_inherit = 'res.users'
pesel = fields.Char(string=u"PESEL")
def __init__(self, pool, cr):
super(Volunteer, self).__init__(pool, cr)
self._add_permitted_fields(level='owner', fields={'pesel'})
@api.one
@api.constrains('pesel')
def _check_pesel(self):
if not self.pesel:
return
try:
digits = map(int, self.pesel)
except ValueError:
raise exceptions.ValidationError("Numer PESEL może składać się wyłącznie z cyfr!")
if len(digits) != 11:
raise exceptions.ValidationError("Numer PESEL musi składać się z 11 cyfr!")
weights = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3)
control_sum = -(sum(map(mul, digits[:-1], weights))) % 10
if control_sum != digits[-1]:
raise exceptions.ValidationError("Niepoprawny numer PESEL.")
| Fix a problem with PESEL validation | Fix a problem with PESEL validation
| Python | agpl-3.0 | KrzysiekJ/bestja,ludwiktrammer/bestja,EE/bestja,ludwiktrammer/bestja,EE/bestja,EE/bestja,KrzysiekJ/bestja,ludwiktrammer/bestja,KrzysiekJ/bestja | from operator import mul
from openerp import models, fields, api, exceptions
class Volunteer(models.Model):
_inherit = 'res.users'
pesel = fields.Char(string=u"PESEL")
def __init__(self, pool, cr):
super(Volunteer, self).__init__(pool, cr)
self._add_permitted_fields(level='owner', fields={'pesel'})
@api.one
@api.constrains('pesel')
def _check_pesel(self):
if not self.pesel:
return
try:
digits = map(int, self.pesel)
except ValueError:
raise exceptions.ValidationError("Numer PESEL może składać się wyłącznie z cyfr!")
+ if len(digits) != 11:
+ raise exceptions.ValidationError("Numer PESEL musi składać się z 11 cyfr!")
+
weights = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3)
control_sum = -(sum(map(mul, digits[:-1], weights))) % 10
- if len(digits) != 11 or control_sum != digits[-1]:
+ if control_sum != digits[-1]:
raise exceptions.ValidationError("Niepoprawny numer PESEL.")
| Fix a problem with PESEL validation | ## Code Before:
from operator import mul
from openerp import models, fields, api, exceptions
class Volunteer(models.Model):
_inherit = 'res.users'
pesel = fields.Char(string=u"PESEL")
def __init__(self, pool, cr):
super(Volunteer, self).__init__(pool, cr)
self._add_permitted_fields(level='owner', fields={'pesel'})
@api.one
@api.constrains('pesel')
def _check_pesel(self):
if not self.pesel:
return
try:
digits = map(int, self.pesel)
except ValueError:
raise exceptions.ValidationError("Numer PESEL może składać się wyłącznie z cyfr!")
weights = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3)
control_sum = -(sum(map(mul, digits[:-1], weights))) % 10
if len(digits) != 11 or control_sum != digits[-1]:
raise exceptions.ValidationError("Niepoprawny numer PESEL.")
## Instruction:
Fix a problem with PESEL validation
## Code After:
from operator import mul
from openerp import models, fields, api, exceptions
class Volunteer(models.Model):
_inherit = 'res.users'
pesel = fields.Char(string=u"PESEL")
def __init__(self, pool, cr):
super(Volunteer, self).__init__(pool, cr)
self._add_permitted_fields(level='owner', fields={'pesel'})
@api.one
@api.constrains('pesel')
def _check_pesel(self):
if not self.pesel:
return
try:
digits = map(int, self.pesel)
except ValueError:
raise exceptions.ValidationError("Numer PESEL może składać się wyłącznie z cyfr!")
if len(digits) != 11:
raise exceptions.ValidationError("Numer PESEL musi składać się z 11 cyfr!")
weights = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3)
control_sum = -(sum(map(mul, digits[:-1], weights))) % 10
if control_sum != digits[-1]:
raise exceptions.ValidationError("Niepoprawny numer PESEL.")
| // ... existing code ...
if len(digits) != 11:
raise exceptions.ValidationError("Numer PESEL musi składać się z 11 cyfr!")
weights = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3)
// ... modified code ...
control_sum = -(sum(map(mul, digits[:-1], weights))) % 10
if control_sum != digits[-1]:
raise exceptions.ValidationError("Niepoprawny numer PESEL.")
// ... rest of the code ... |
9dc068f947cbd5ca29b324436496d2d78f55edf7 | src/trajectory/trajectory.py | src/trajectory/trajectory.py | import math
from geometry_msgs.msg import Point
class NegativeTimeException(Exception):
pass
class Trajectory:
def __init__(self):
self.position = Point()
def get_position_at(self, t):
if t < 0:
raise NegativeTimeException()
| import math
from twisted.conch.insults.insults import Vector
from geometry_msgs.msg import Point
class NegativeTimeException(Exception):
pass
class Trajectory:
def __init__(self):
self.position = Point()
def get_position_at(self, t):
if t < 0:
raise NegativeTimeException()
def abs_vector(self):
return (self.x * self.x + self.y * self.y) ** 0.5
def sub_point(self, other):
return Vector(self.x - other.x, self.y - other.y)
Vector.__abs__ = abs_vector
Point.__sub__ = sub_point
| Implement __abs__ and __sub__ dunder methods | feat: Implement __abs__ and __sub__ dunder methods
Implement methods which are required to apply assertEqual and assertAlmostEqual to points. | Python | mit | bit0001/trajectory_tracking,bit0001/trajectory_tracking | import math
+ from twisted.conch.insults.insults import Vector
from geometry_msgs.msg import Point
class NegativeTimeException(Exception):
pass
class Trajectory:
def __init__(self):
self.position = Point()
def get_position_at(self, t):
if t < 0:
raise NegativeTimeException()
+ def abs_vector(self):
+ return (self.x * self.x + self.y * self.y) ** 0.5
+
+
+ def sub_point(self, other):
+ return Vector(self.x - other.x, self.y - other.y)
+
+
+ Vector.__abs__ = abs_vector
+ Point.__sub__ = sub_point
+ | Implement __abs__ and __sub__ dunder methods | ## Code Before:
import math
from geometry_msgs.msg import Point
class NegativeTimeException(Exception):
pass
class Trajectory:
def __init__(self):
self.position = Point()
def get_position_at(self, t):
if t < 0:
raise NegativeTimeException()
## Instruction:
Implement __abs__ and __sub__ dunder methods
## Code After:
import math
from twisted.conch.insults.insults import Vector
from geometry_msgs.msg import Point
class NegativeTimeException(Exception):
pass
class Trajectory:
def __init__(self):
self.position = Point()
def get_position_at(self, t):
if t < 0:
raise NegativeTimeException()
def abs_vector(self):
return (self.x * self.x + self.y * self.y) ** 0.5
def sub_point(self, other):
return Vector(self.x - other.x, self.y - other.y)
Vector.__abs__ = abs_vector
Point.__sub__ = sub_point
| ...
import math
from twisted.conch.insults.insults import Vector
...
raise NegativeTimeException()
def abs_vector(self):
return (self.x * self.x + self.y * self.y) ** 0.5
def sub_point(self, other):
return Vector(self.x - other.x, self.y - other.y)
Vector.__abs__ = abs_vector
Point.__sub__ = sub_point
... |
9127e56a26e836c7e2a66359a9f9b67e6c7f8474 | ovp_users/tests/test_filters.py | ovp_users/tests/test_filters.py | from django.test import TestCase
from ovp_users.recover_password import RecoveryTokenFilter
from ovp_users.recover_password import RecoverPasswordFilter
def test_filter(c):
obj = c()
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
def TestPasswordRecoveryFilters(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and
# make sure it throws no error
test_filter(RecoveryTokenFilter)
test_filter(RecoverPasswordFilter)
| from django.test import TestCase
from ovp_users.recover_password import RecoveryTokenFilter
from ovp_users.recover_password import RecoverPasswordFilter
def test_filter(c):
obj = c()
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
def PasswordRecoveryFiltersTestCase(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and
# make sure it throws no error
test_filter(RecoveryTokenFilter)
test_filter(RecoverPasswordFilter)
| Fix PasswordRecovery test case name | Fix PasswordRecovery test case name
| Python | agpl-3.0 | OpenVolunteeringPlatform/django-ovp-users,OpenVolunteeringPlatform/django-ovp-users | from django.test import TestCase
from ovp_users.recover_password import RecoveryTokenFilter
from ovp_users.recover_password import RecoverPasswordFilter
def test_filter(c):
obj = c()
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
- def TestPasswordRecoveryFilters(TestCase):
+ def PasswordRecoveryFiltersTestCase(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and
# make sure it throws no error
test_filter(RecoveryTokenFilter)
test_filter(RecoverPasswordFilter)
| Fix PasswordRecovery test case name | ## Code Before:
from django.test import TestCase
from ovp_users.recover_password import RecoveryTokenFilter
from ovp_users.recover_password import RecoverPasswordFilter
def test_filter(c):
obj = c()
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
def TestPasswordRecoveryFilters(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and
# make sure it throws no error
test_filter(RecoveryTokenFilter)
test_filter(RecoverPasswordFilter)
## Instruction:
Fix PasswordRecovery test case name
## Code After:
from django.test import TestCase
from ovp_users.recover_password import RecoveryTokenFilter
from ovp_users.recover_password import RecoverPasswordFilter
def test_filter(c):
obj = c()
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
def PasswordRecoveryFiltersTestCase(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and
# make sure it throws no error
test_filter(RecoveryTokenFilter)
test_filter(RecoverPasswordFilter)
| ...
def PasswordRecoveryFiltersTestCase(TestCase):
def test_filters():
... |
95c5b9c139bf69ac11338a4b2eaa9b8179d27284 | tests/test_async.py | tests/test_async.py | from asyncio import Future, gather, new_event_loop, sleep
from twisted.internet.defer import Deferred, ensureDeferred
from pyee import EventEmitter
def test_asyncio_emit():
"""Test that event_emitters can handle wrapping coroutines as used with
asyncio.
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(0.1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
assert result
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
loop.close()
def test_twisted_emit():
"""Test that event_emitters can handle wrapping coroutines when using
twisted and ensureDeferred.
"""
ee = EventEmitter(scheduler=ensureDeferred)
should_call = Deferred()
@ee.on('event')
async def event_handler():
should_call.callback(True)
@should_call.addCallback
def _done(result):
assert result
@should_call.addErrback
def _err(exc):
raise exc
ee.emit('event')
| from asyncio import Future, gather, new_event_loop, sleep
from mock import Mock
from twisted.internet.defer import ensureDeferred
from pyee import EventEmitter
def test_asyncio_emit():
"""Test that event_emitters can handle wrapping coroutines as used with
asyncio.
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(0.1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
assert result
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
loop.close()
def test_twisted_emit():
"""Test that event_emitters can handle wrapping coroutines when using
twisted and ensureDeferred.
"""
ee = EventEmitter(scheduler=ensureDeferred)
should_call = Mock()
@ee.on('event')
async def event_handler():
should_call(True)
ee.emit('event')
should_call.assert_called_once()
| Replace my deferred with a mock | Replace my deferred with a mock
| Python | mit | jfhbrook/pyee | from asyncio import Future, gather, new_event_loop, sleep
+ from mock import Mock
- from twisted.internet.defer import Deferred, ensureDeferred
+ from twisted.internet.defer import ensureDeferred
from pyee import EventEmitter
def test_asyncio_emit():
"""Test that event_emitters can handle wrapping coroutines as used with
asyncio.
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(0.1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
assert result
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
loop.close()
def test_twisted_emit():
"""Test that event_emitters can handle wrapping coroutines when using
twisted and ensureDeferred.
"""
ee = EventEmitter(scheduler=ensureDeferred)
- should_call = Deferred()
+ should_call = Mock()
@ee.on('event')
async def event_handler():
- should_call.callback(True)
+ should_call(True)
-
- @should_call.addCallback
- def _done(result):
- assert result
-
- @should_call.addErrback
- def _err(exc):
- raise exc
ee.emit('event')
+ should_call.assert_called_once()
+ | Replace my deferred with a mock | ## Code Before:
from asyncio import Future, gather, new_event_loop, sleep
from twisted.internet.defer import Deferred, ensureDeferred
from pyee import EventEmitter
def test_asyncio_emit():
"""Test that event_emitters can handle wrapping coroutines as used with
asyncio.
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(0.1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
assert result
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
loop.close()
def test_twisted_emit():
"""Test that event_emitters can handle wrapping coroutines when using
twisted and ensureDeferred.
"""
ee = EventEmitter(scheduler=ensureDeferred)
should_call = Deferred()
@ee.on('event')
async def event_handler():
should_call.callback(True)
@should_call.addCallback
def _done(result):
assert result
@should_call.addErrback
def _err(exc):
raise exc
ee.emit('event')
## Instruction:
Replace my deferred with a mock
## Code After:
from asyncio import Future, gather, new_event_loop, sleep
from mock import Mock
from twisted.internet.defer import ensureDeferred
from pyee import EventEmitter
def test_asyncio_emit():
"""Test that event_emitters can handle wrapping coroutines as used with
asyncio.
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(0.1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
assert result
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
loop.close()
def test_twisted_emit():
"""Test that event_emitters can handle wrapping coroutines when using
twisted and ensureDeferred.
"""
ee = EventEmitter(scheduler=ensureDeferred)
should_call = Mock()
@ee.on('event')
async def event_handler():
should_call(True)
ee.emit('event')
should_call.assert_called_once()
| // ... existing code ...
from asyncio import Future, gather, new_event_loop, sleep
from mock import Mock
from twisted.internet.defer import ensureDeferred
// ... modified code ...
should_call = Mock()
...
async def event_handler():
should_call(True)
...
should_call.assert_called_once()
// ... rest of the code ... |
9f05a8917ee6fd01a334ef2e1e57062be8ef13af | byceps/config_defaults.py | byceps/config_defaults.py |
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
| Enable DBMS pool pre-pinging to avoid connection errors | Enable DBMS pool pre-pinging to avoid connection errors
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
+
+ # Avoid connection errors after database becomes temporarily
+ # unreachable, then becomes reachable again.
+ SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
| Enable DBMS pool pre-pinging to avoid connection errors | ## Code Before:
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
## Instruction:
Enable DBMS pool pre-pinging to avoid connection errors
## Code After:
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
| // ... existing code ...
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
// ... rest of the code ... |
0155ed7c37fd4cafa2650911d4f902a3a8982761 | test/test_bot.py | test/test_bot.py | import re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
| import asyncio
import re
import unittest
from unittest import mock
from gather.bot import ListenerBot
def async_test(f):
# http://stackoverflow.com/a/23036785/304210
def wrapper(*args, **kwargs):
coro = asyncio.coroutine(f)
future = coro(*args, **kwargs)
loop = asyncio.get_event_loop()
loop.run_until_complete(future)
return wrapper
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
@async_test
def test_on_message_from_bot(self):
bot = ListenerBot()
bot.username = 'testuser'
regex = r'^test'
action = mock.Mock()
bot.actions = {regex: (re.compile(regex, re.IGNORECASE), action)}
bot.on_message(mock.Mock(), mock.Mock, 'test')
action.assert_not_called()
if __name__ == '__main__':
unittest.main()
| Add a test for on_message | Add a test for on_message
| Python | mit | veryhappythings/discord-gather | + import asyncio
import re
import unittest
+ from unittest import mock
from gather.bot import ListenerBot
+
+
+ def async_test(f):
+ # http://stackoverflow.com/a/23036785/304210
+ def wrapper(*args, **kwargs):
+ coro = asyncio.coroutine(f)
+ future = coro(*args, **kwargs)
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(future)
+ return wrapper
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
- action = unittest.mock.Mock()
+ action = mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
+ @async_test
+ def test_on_message_from_bot(self):
+ bot = ListenerBot()
+ bot.username = 'testuser'
+ regex = r'^test'
+ action = mock.Mock()
+ bot.actions = {regex: (re.compile(regex, re.IGNORECASE), action)}
+ bot.on_message(mock.Mock(), mock.Mock, 'test')
+ action.assert_not_called()
+
if __name__ == '__main__':
unittest.main()
| Add a test for on_message | ## Code Before:
import re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add a test for on_message
## Code After:
import asyncio
import re
import unittest
from unittest import mock
from gather.bot import ListenerBot
def async_test(f):
# http://stackoverflow.com/a/23036785/304210
def wrapper(*args, **kwargs):
coro = asyncio.coroutine(f)
future = coro(*args, **kwargs)
loop = asyncio.get_event_loop()
loop.run_until_complete(future)
return wrapper
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
@async_test
def test_on_message_from_bot(self):
bot = ListenerBot()
bot.username = 'testuser'
regex = r'^test'
action = mock.Mock()
bot.actions = {regex: (re.compile(regex, re.IGNORECASE), action)}
bot.on_message(mock.Mock(), mock.Mock, 'test')
action.assert_not_called()
if __name__ == '__main__':
unittest.main()
| // ... existing code ...
import asyncio
import re
// ... modified code ...
import unittest
from unittest import mock
from gather.bot import ListenerBot
def async_test(f):
# http://stackoverflow.com/a/23036785/304210
def wrapper(*args, **kwargs):
coro = asyncio.coroutine(f)
future = coro(*args, **kwargs)
loop = asyncio.get_event_loop()
loop.run_until_complete(future)
return wrapper
...
regex = r'^test'
action = mock.Mock()
bot.register_action(regex, action)
...
@async_test
def test_on_message_from_bot(self):
bot = ListenerBot()
bot.username = 'testuser'
regex = r'^test'
action = mock.Mock()
bot.actions = {regex: (re.compile(regex, re.IGNORECASE), action)}
bot.on_message(mock.Mock(), mock.Mock, 'test')
action.assert_not_called()
// ... rest of the code ... |
0fa002e66f82eff593514c2249c4229604ec0f0a | server.py | server.py | import web
import cPickle as pickle
import json
urls = (
'/latest', 'latest',
'/history', 'history'
)
class latest:
def GET(self):
try:
latest = pickle.load(open("latest.p", "r"))
return json.dumps(latest)
except:
return "Could not read latest data"
class history:
def GET(self):
try:
history = pickle.load(open("history.p", "r"))
return json.dumps(history)
except:
return "Could not read historic data"
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
| import web
import cPickle as pickle
import json
urls = (
'/latest', 'latest',
'/history', 'history'
)
class latest:
def GET(self):
try:
with open("latest.p", 'r') as f:
latest = pickle.load(f)
return json.dumps(latest)
except:
return "Could not read latest data"
class history:
def GET(self):
try:
with open("history.p", 'r') as f:
history = pickle.load(f)
return json.dumps(history)
except:
return "Could not read historic data"
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
| Make sure to close the file afterwards | Make sure to close the file afterwards
| Python | mit | martindisch/SensorTag,martindisch/SensorTag,martindisch/SensorTag,martindisch/SensorTag | import web
import cPickle as pickle
import json
urls = (
'/latest', 'latest',
'/history', 'history'
)
class latest:
def GET(self):
try:
- latest = pickle.load(open("latest.p", "r"))
+ with open("latest.p", 'r') as f:
+ latest = pickle.load(f)
return json.dumps(latest)
except:
return "Could not read latest data"
class history:
def GET(self):
try:
- history = pickle.load(open("history.p", "r"))
+ with open("history.p", 'r') as f:
+ history = pickle.load(f)
return json.dumps(history)
except:
return "Could not read historic data"
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
| Make sure to close the file afterwards | ## Code Before:
import web
import cPickle as pickle
import json
urls = (
'/latest', 'latest',
'/history', 'history'
)
class latest:
def GET(self):
try:
latest = pickle.load(open("latest.p", "r"))
return json.dumps(latest)
except:
return "Could not read latest data"
class history:
def GET(self):
try:
history = pickle.load(open("history.p", "r"))
return json.dumps(history)
except:
return "Could not read historic data"
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
## Instruction:
Make sure to close the file afterwards
## Code After:
import web
import cPickle as pickle
import json
urls = (
'/latest', 'latest',
'/history', 'history'
)
class latest:
def GET(self):
try:
with open("latest.p", 'r') as f:
latest = pickle.load(f)
return json.dumps(latest)
except:
return "Could not read latest data"
class history:
def GET(self):
try:
with open("history.p", 'r') as f:
history = pickle.load(f)
return json.dumps(history)
except:
return "Could not read historic data"
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
| ...
try:
with open("latest.p", 'r') as f:
latest = pickle.load(f)
return json.dumps(latest)
...
try:
with open("history.p", 'r') as f:
history = pickle.load(f)
return json.dumps(history)
... |
cae7a57304e207f319e9bb2e52837ee207d0d96e | mcdowell/src/main/python/ch1/ch1.py | mcdowell/src/main/python/ch1/ch1.py | def unique(string):
counter = {}
for c in string:
if c in counter:
counter[c] += 1
else:
counter[c] = 1
print(counter)
for k in counter:
if counter[k] > 1:
return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
| def unique(string):
counter = {}
for c in string:
if c in counter:
return False
else:
counter[c] = 1
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
| Add is_permutation function. Simplifiy unique function. | Add is_permutation function. Simplifiy unique function.
| Python | mit | jamesewoo/tigeruppercut,jamesewoo/tigeruppercut | def unique(string):
counter = {}
for c in string:
if c in counter:
- counter[c] += 1
+ return False
else:
counter[c] = 1
- print(counter)
- for k in counter:
- if counter[k] > 1:
- return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
+ def is_permutation(str1, str2):
+ if len(str1) != len(str2):
+ return False
+ counter = {}
+ for i in range(len(str1)):
+ if str1[i] in counter:
+ counter[str1[i]] += 1
+ else:
+ counter[str1[i]] = 1
+ if str2[i] in counter:
+ counter[str2[i]] -= 1
+ else:
+ counter[str2[i]] = -1
+ for k in counter:
+ if counter[k] != 0:
+ return False
+ else:
+ return True
| Add is_permutation function. Simplifiy unique function. | ## Code Before:
def unique(string):
counter = {}
for c in string:
if c in counter:
counter[c] += 1
else:
counter[c] = 1
print(counter)
for k in counter:
if counter[k] > 1:
return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
## Instruction:
Add is_permutation function. Simplifiy unique function.
## Code After:
def unique(string):
counter = {}
for c in string:
if c in counter:
return False
else:
counter[c] = 1
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
| // ... existing code ...
if c in counter:
return False
else:
// ... modified code ...
counter[c] = 1
else:
...
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
// ... rest of the code ... |
0da7c7931f7abc8775087aab5054cfea63120f60 | scripts/dumpcmaps.py | scripts/dumpcmaps.py | import numpy as np
import Image
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname) | import numpy as np
import Image
import scipy.io as sio
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname) | Add all the J* colormaps | Add all the J* colormaps
| Python | bsd-2-clause | gallantlab/pycortex,smerdis/pycortex,smerdis/pycortex,CVML/pycortex,gallantlab/pycortex,CVML/pycortex,smerdis/pycortex,CVML/pycortex,CVML/pycortex,CVML/pycortex,gallantlab/pycortex,smerdis/pycortex,smerdis/pycortex,gallantlab/pycortex,gallantlab/pycortex | import numpy as np
import Image
+ import scipy.io as sio
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname) | Add all the J* colormaps | ## Code Before:
import numpy as np
import Image
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname)
## Instruction:
Add all the J* colormaps
## Code After:
import numpy as np
import Image
import scipy.io as sio
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname) | # ... existing code ...
import Image
import scipy.io as sio
# ... rest of the code ... |
3e3f7b827e226146ec7d3efe523f1f900ac4e99a | sjconfparts/type.py | sjconfparts/type.py | class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true":
return True
elif str_object == "no" or str_object == "off" or str_object == "false":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| Allow “enabled“, “enable”, “disabled“, “disable” as boolean values | Allow “enabled“, “enable”, “disabled“, “disable” as boolean values
| Python | lgpl-2.1 | SmartJog/sjconf,SmartJog/sjconf | class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
- if str_object == "yes" or str_object == "on" or str_object == "true":
+ if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
- elif str_object == "no" or str_object == "off" or str_object == "false":
+ elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| Allow “enabled“, “enable”, “disabled“, “disable” as boolean values | ## Code Before:
class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true":
return True
elif str_object == "no" or str_object == "off" or str_object == "false":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
## Instruction:
Allow “enabled“, “enable”, “disabled“, “disable” as boolean values
## Code After:
class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| ...
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
... |
260cd3b96df3a4746560db0032d7b6042c55d7fc | integration-test/976-fractional-pois.py | integration-test/976-fractional-pois.py | assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
| assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
# Test that source and min_zoom are set properly for boundaries, roads, transit, and water
assert_has_feature(
5, 9, 12, 'boundaries',
{ 'min_zoom': 0 , 'id': 8024,
'source': 'naturalearthdata.com',
'name': 'New Jersey - Pennsylvania' })
assert_has_feature(
5, 9, 12, 'roads',
{ 'min_zoom': 5 , 'id': 90,
'source': 'naturalearthdata.com' })
# There is no transit data from Natural Earth
assert_has_feature(
5, 9, 12, 'water',
{ 'min_zoom': 0 , 'id': 1144,
'source': 'naturalearthdata.com',
'name': 'John H. Kerr Reservoir' })
# https://www.openstreetmap.org/relation/224951
# https://www.openstreetmap.org/relation/61320
assert_has_feature(
9, 150, 192, 'boundaries',
{ 'min_zoom': 8, 'id': -224951,
'source': 'openstretmap.org',
'name': 'New Jersey - New York' })
assert_has_feature(
9, 150, 192, 'roads',
{ 'min_zoom': 8, 'sort_key': 381,
'source': 'openstretmap.org',
'kind': 'Major Road',
'network': 'US:NJ:Hudson' })
assert_has_feature(
9, 150, 192, 'transit',
{ 'min_zoom': 5, 'ref': '54-57',
'source': 'openstretmap.org',
'name': 'Vermonter' })
assert_has_feature(
9, 150, 192, 'water',
{ 'min_zoom': 0, 'id': 10613,
'source': 'openstretmapdata.com',
'kind': 'ocean',
'name': '' })
| Add tests for source and min_zoom | Add tests for source and min_zoom
| Python | mit | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
+ # Test that source and min_zoom are set properly for boundaries, roads, transit, and water
+ assert_has_feature(
+ 5, 9, 12, 'boundaries',
+ { 'min_zoom': 0 , 'id': 8024,
+ 'source': 'naturalearthdata.com',
+ 'name': 'New Jersey - Pennsylvania' })
+
+ assert_has_feature(
+ 5, 9, 12, 'roads',
+ { 'min_zoom': 5 , 'id': 90,
+ 'source': 'naturalearthdata.com' })
+
+ # There is no transit data from Natural Earth
+
+ assert_has_feature(
+ 5, 9, 12, 'water',
+ { 'min_zoom': 0 , 'id': 1144,
+ 'source': 'naturalearthdata.com',
+ 'name': 'John H. Kerr Reservoir' })
+
+ # https://www.openstreetmap.org/relation/224951
+ # https://www.openstreetmap.org/relation/61320
+ assert_has_feature(
+ 9, 150, 192, 'boundaries',
+ { 'min_zoom': 8, 'id': -224951,
+ 'source': 'openstretmap.org',
+ 'name': 'New Jersey - New York' })
+
+ assert_has_feature(
+ 9, 150, 192, 'roads',
+ { 'min_zoom': 8, 'sort_key': 381,
+ 'source': 'openstretmap.org',
+ 'kind': 'Major Road',
+ 'network': 'US:NJ:Hudson' })
+
+ assert_has_feature(
+ 9, 150, 192, 'transit',
+ { 'min_zoom': 5, 'ref': '54-57',
+ 'source': 'openstretmap.org',
+ 'name': 'Vermonter' })
+
+ assert_has_feature(
+ 9, 150, 192, 'water',
+ { 'min_zoom': 0, 'id': 10613,
+ 'source': 'openstretmapdata.com',
+ 'kind': 'ocean',
+ 'name': '' })
+ | Add tests for source and min_zoom | ## Code Before:
assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
## Instruction:
Add tests for source and min_zoom
## Code After:
assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
# Test that source and min_zoom are set properly for boundaries, roads, transit, and water
assert_has_feature(
5, 9, 12, 'boundaries',
{ 'min_zoom': 0 , 'id': 8024,
'source': 'naturalearthdata.com',
'name': 'New Jersey - Pennsylvania' })
assert_has_feature(
5, 9, 12, 'roads',
{ 'min_zoom': 5 , 'id': 90,
'source': 'naturalearthdata.com' })
# There is no transit data from Natural Earth
assert_has_feature(
5, 9, 12, 'water',
{ 'min_zoom': 0 , 'id': 1144,
'source': 'naturalearthdata.com',
'name': 'John H. Kerr Reservoir' })
# https://www.openstreetmap.org/relation/224951
# https://www.openstreetmap.org/relation/61320
assert_has_feature(
9, 150, 192, 'boundaries',
{ 'min_zoom': 8, 'id': -224951,
'source': 'openstretmap.org',
'name': 'New Jersey - New York' })
assert_has_feature(
9, 150, 192, 'roads',
{ 'min_zoom': 8, 'sort_key': 381,
'source': 'openstretmap.org',
'kind': 'Major Road',
'network': 'US:NJ:Hudson' })
assert_has_feature(
9, 150, 192, 'transit',
{ 'min_zoom': 5, 'ref': '54-57',
'source': 'openstretmap.org',
'name': 'Vermonter' })
assert_has_feature(
9, 150, 192, 'water',
{ 'min_zoom': 0, 'id': 10613,
'source': 'openstretmapdata.com',
'kind': 'ocean',
'name': '' })
| // ... existing code ...
{ 'id': 147689077, 'min_zoom': 15.68 })
# Test that source and min_zoom are set properly for boundaries, roads, transit, and water
assert_has_feature(
5, 9, 12, 'boundaries',
{ 'min_zoom': 0 , 'id': 8024,
'source': 'naturalearthdata.com',
'name': 'New Jersey - Pennsylvania' })
assert_has_feature(
5, 9, 12, 'roads',
{ 'min_zoom': 5 , 'id': 90,
'source': 'naturalearthdata.com' })
# There is no transit data from Natural Earth
assert_has_feature(
5, 9, 12, 'water',
{ 'min_zoom': 0 , 'id': 1144,
'source': 'naturalearthdata.com',
'name': 'John H. Kerr Reservoir' })
# https://www.openstreetmap.org/relation/224951
# https://www.openstreetmap.org/relation/61320
assert_has_feature(
9, 150, 192, 'boundaries',
{ 'min_zoom': 8, 'id': -224951,
'source': 'openstretmap.org',
'name': 'New Jersey - New York' })
assert_has_feature(
9, 150, 192, 'roads',
{ 'min_zoom': 8, 'sort_key': 381,
'source': 'openstretmap.org',
'kind': 'Major Road',
'network': 'US:NJ:Hudson' })
assert_has_feature(
9, 150, 192, 'transit',
{ 'min_zoom': 5, 'ref': '54-57',
'source': 'openstretmap.org',
'name': 'Vermonter' })
assert_has_feature(
9, 150, 192, 'water',
{ 'min_zoom': 0, 'id': 10613,
'source': 'openstretmapdata.com',
'kind': 'ocean',
'name': '' })
// ... rest of the code ... |
1e2086b868861034d89138349c4da909f380f19e | feedback/views.py | feedback/views.py | from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Feedback
class FeedbackSerializer(serializers.ModelSerializer):
class Meta:
model = Feedback
@method_decorator(csrf_exempt, name='dispatch')
class FeedbackView(APIView):
@csrf_exempt
def post(self, request, format=None):
if self.request.user.is_authenticated():
user = self.request.user
else:
user = None
if 'user' in request.data:
del request.data['user']
user_agent = request.data.get('user_agent')
if not user_agent:
user_agent = request.META.get('HTTP_USER_AGENT', None)
serializer = FeedbackSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=user, user_agent=user_agent)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Feedback
class FeedbackSerializer(serializers.ModelSerializer):
class Meta:
model = Feedback
fields = '__all__'
@method_decorator(csrf_exempt, name='dispatch')
class FeedbackView(APIView):
@csrf_exempt
def post(self, request, format=None):
if self.request.user.is_authenticated():
user = self.request.user
else:
user = None
if 'user' in request.data:
del request.data['user']
user_agent = request.data.get('user_agent')
if not user_agent:
user_agent = request.META.get('HTTP_USER_AGENT', None)
serializer = FeedbackSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=user, user_agent=user_agent)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| Make feedback compatible with DRF >3.3.0 | Make feedback compatible with DRF >3.3.0
| Python | mit | City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel | from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Feedback
class FeedbackSerializer(serializers.ModelSerializer):
class Meta:
model = Feedback
+ fields = '__all__'
@method_decorator(csrf_exempt, name='dispatch')
class FeedbackView(APIView):
@csrf_exempt
def post(self, request, format=None):
if self.request.user.is_authenticated():
user = self.request.user
else:
user = None
if 'user' in request.data:
del request.data['user']
user_agent = request.data.get('user_agent')
if not user_agent:
user_agent = request.META.get('HTTP_USER_AGENT', None)
serializer = FeedbackSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=user, user_agent=user_agent)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| Make feedback compatible with DRF >3.3.0 | ## Code Before:
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Feedback
class FeedbackSerializer(serializers.ModelSerializer):
class Meta:
model = Feedback
@method_decorator(csrf_exempt, name='dispatch')
class FeedbackView(APIView):
@csrf_exempt
def post(self, request, format=None):
if self.request.user.is_authenticated():
user = self.request.user
else:
user = None
if 'user' in request.data:
del request.data['user']
user_agent = request.data.get('user_agent')
if not user_agent:
user_agent = request.META.get('HTTP_USER_AGENT', None)
serializer = FeedbackSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=user, user_agent=user_agent)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
## Instruction:
Make feedback compatible with DRF >3.3.0
## Code After:
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Feedback
class FeedbackSerializer(serializers.ModelSerializer):
class Meta:
model = Feedback
fields = '__all__'
@method_decorator(csrf_exempt, name='dispatch')
class FeedbackView(APIView):
@csrf_exempt
def post(self, request, format=None):
if self.request.user.is_authenticated():
user = self.request.user
else:
user = None
if 'user' in request.data:
del request.data['user']
user_agent = request.data.get('user_agent')
if not user_agent:
user_agent = request.META.get('HTTP_USER_AGENT', None)
serializer = FeedbackSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=user, user_agent=user_agent)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| // ... existing code ...
model = Feedback
fields = '__all__'
// ... rest of the code ... |
054503e406146eeff5f8d5437eb7db581eaeb0f2 | oscar_adyen/__init__.py | oscar_adyen/__init__.py |
from __future__ import unicode_literals
from django.conf.urls import url
from django_adyen import urlpatterns
from . import views
urlpatterns = [
url(r'^payment-done/$', views.PaymentResultView.as_view(),
name='payment-result'),
url(r'^notify/$', views.NotificationView.as_view(),
name='payment-notification')
] + urlpatterns
urls = urlpatterns, 'oscar-adyen', 'oscar-adyen'
|
from __future__ import unicode_literals
from django.conf.urls import url
from django_adyen import urlpatterns
urlpatterns = [
url(r'^payment-done/$', 'oscar_adyen.views.payment_result',
name='payment-result'),
url(r'^notify/$', 'oscar_adyen.views.notification',
name='payment-notification')
] + urlpatterns
urls = urlpatterns, 'oscar-adyen', 'oscar-adyen'
| Allow importing oscar_adyen.mixins without importing oscar_adyen.views | Allow importing oscar_adyen.mixins without importing oscar_adyen.views
| Python | mit | machtfit/adyen |
from __future__ import unicode_literals
from django.conf.urls import url
from django_adyen import urlpatterns
- from . import views
-
urlpatterns = [
- url(r'^payment-done/$', views.PaymentResultView.as_view(),
+ url(r'^payment-done/$', 'oscar_adyen.views.payment_result',
name='payment-result'),
- url(r'^notify/$', views.NotificationView.as_view(),
+ url(r'^notify/$', 'oscar_adyen.views.notification',
name='payment-notification')
] + urlpatterns
urls = urlpatterns, 'oscar-adyen', 'oscar-adyen'
| Allow importing oscar_adyen.mixins without importing oscar_adyen.views | ## Code Before:
from __future__ import unicode_literals
from django.conf.urls import url
from django_adyen import urlpatterns
from . import views
urlpatterns = [
url(r'^payment-done/$', views.PaymentResultView.as_view(),
name='payment-result'),
url(r'^notify/$', views.NotificationView.as_view(),
name='payment-notification')
] + urlpatterns
urls = urlpatterns, 'oscar-adyen', 'oscar-adyen'
## Instruction:
Allow importing oscar_adyen.mixins without importing oscar_adyen.views
## Code After:
from __future__ import unicode_literals
from django.conf.urls import url
from django_adyen import urlpatterns
urlpatterns = [
url(r'^payment-done/$', 'oscar_adyen.views.payment_result',
name='payment-result'),
url(r'^notify/$', 'oscar_adyen.views.notification',
name='payment-notification')
] + urlpatterns
urls = urlpatterns, 'oscar-adyen', 'oscar-adyen'
| // ... existing code ...
urlpatterns = [
url(r'^payment-done/$', 'oscar_adyen.views.payment_result',
name='payment-result'),
url(r'^notify/$', 'oscar_adyen.views.notification',
name='payment-notification')
// ... rest of the code ... |
f339af2e48f0e485f13d368dad47f541264c4f58 | web/processors/user.py | web/processors/user.py | from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors():
ambassadors = []
aambassadors = User.objects.filter(groups__name='ambassadors')
for ambassador in aambassadors:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
for code, name in list(countries):
readable_name = unicode(name)
found_ambassadors = []
for ambassador in ambassadors:
if ambassador.country == code:
found_ambassadors.append(ambassador)
countries_ambassadors.append((readable_name,found_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
| from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors():
ambassadors = []
aambassadors = User.objects.filter(groups__name='ambassadors').order_by('date_joined')
for ambassador in aambassadors:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
for code, name in list(countries):
readable_name = unicode(name)
found_ambassadors = []
for ambassador in ambassadors:
if ambassador.country == code:
found_ambassadors.append(ambassador)
countries_ambassadors.append((readable_name,found_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
| Sort listed ambassadors by date_joined | Sort listed ambassadors by date_joined
| Python | mit | ercchy/coding-events,michelesr/coding-events,joseihf/coding-events,ioana-chiorean/coding-events,joseihf/coding-events,codeeu/coding-events,michelesr/coding-events,joseihf/coding-events,ercchy/coding-events,codeeu/coding-events,ercchy/coding-events,michelesr/coding-events,ioana-chiorean/coding-events,codeeu/coding-events,codeeu/coding-events,ioana-chiorean/coding-events,ioana-chiorean/coding-events,codeeu/coding-events,ercchy/coding-events,ercchy/coding-events,michelesr/coding-events,joseihf/coding-events,joseihf/coding-events,michelesr/coding-events,ioana-chiorean/coding-events | from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors():
ambassadors = []
- aambassadors = User.objects.filter(groups__name='ambassadors')
+ aambassadors = User.objects.filter(groups__name='ambassadors').order_by('date_joined')
for ambassador in aambassadors:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
for code, name in list(countries):
readable_name = unicode(name)
found_ambassadors = []
for ambassador in ambassadors:
if ambassador.country == code:
found_ambassadors.append(ambassador)
countries_ambassadors.append((readable_name,found_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
| Sort listed ambassadors by date_joined | ## Code Before:
from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors():
ambassadors = []
aambassadors = User.objects.filter(groups__name='ambassadors')
for ambassador in aambassadors:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
for code, name in list(countries):
readable_name = unicode(name)
found_ambassadors = []
for ambassador in ambassadors:
if ambassador.country == code:
found_ambassadors.append(ambassador)
countries_ambassadors.append((readable_name,found_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
## Instruction:
Sort listed ambassadors by date_joined
## Code After:
from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors():
ambassadors = []
aambassadors = User.objects.filter(groups__name='ambassadors').order_by('date_joined')
for ambassador in aambassadors:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
for code, name in list(countries):
readable_name = unicode(name)
found_ambassadors = []
for ambassador in ambassadors:
if ambassador.country == code:
found_ambassadors.append(ambassador)
countries_ambassadors.append((readable_name,found_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
| # ... existing code ...
ambassadors = []
aambassadors = User.objects.filter(groups__name='ambassadors').order_by('date_joined')
for ambassador in aambassadors:
# ... rest of the code ... |
53dc86ace10f73832c0cbca9fcbc0389999a0e1c | hyperion/util/convenience.py | hyperion/util/convenience.py | class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
| import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
| Deal with the case of large radii for optically thin temperature radius | Deal with the case of large radii for optically thin temperature radius
| Python | bsd-2-clause | hyperion-rt/hyperion,bluescarni/hyperion,hyperion-rt/hyperion,astrofrog/hyperion,astrofrog/hyperion,bluescarni/hyperion,hyperion-rt/hyperion | + import numpy as np
+
+
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
- return self.value * rstar \
- * (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
+ x = (self.temperature / tstar) ** 4. \
- * dust.kappa_planck_temperature(self.temperature) \
+ * dust.kappa_planck_temperature(self.temperature) \
- / dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
+ / dust.kappa_planck_spectrum(nu, fnu)
+ if x < 0.001:
+ r = self.value * rstar / 2. / np.sqrt(x)
+ else:
+ r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
+ return r
| Deal with the case of large radii for optically thin temperature radius | ## Code Before:
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
## Instruction:
Deal with the case of large radii for optically thin temperature radius
## Code After:
import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
| ...
import numpy as np
class OptThinRadius(object):
...
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
... |
458d61ffb5161394f8080cea59716b2f9cb492f3 | nbgrader_config.py | nbgrader_config.py | c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
c.ClearSolutions.code_stub = {'python': '##### Implement this part of the code #####\nraise NotImplementedError()'}
| c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
c.ClearSolutions.code_stub = {'python': '''##### Implement this part of the code #####
raise NotImplementedError("Code not implemented, follow the instructions.")'''}
| Add error message for not implemented error | Add error message for not implemented error
| Python | mit | pbutenee/ml-tutorial,pbutenee/ml-tutorial | c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
- c.ClearSolutions.code_stub = {'python': '##### Implement this part of the code #####\nraise NotImplementedError()'}
+ c.ClearSolutions.code_stub = {'python': '''##### Implement this part of the code #####
+ raise NotImplementedError("Code not implemented, follow the instructions.")'''}
| Add error message for not implemented error | ## Code Before:
c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
c.ClearSolutions.code_stub = {'python': '##### Implement this part of the code #####\nraise NotImplementedError()'}
## Instruction:
Add error message for not implemented error
## Code After:
c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
c.ClearSolutions.code_stub = {'python': '''##### Implement this part of the code #####
raise NotImplementedError("Code not implemented, follow the instructions.")'''}
| # ... existing code ...
]
c.ClearSolutions.code_stub = {'python': '''##### Implement this part of the code #####
raise NotImplementedError("Code not implemented, follow the instructions.")'''}
# ... rest of the code ... |
32820375c4552a9648612ea0dddfbf524e672c0e | virtool/indexes/models.py | virtool/indexes/models.py | import enum
from sqlalchemy import Column, Integer, String, Enum
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String)
reference = Column(String)
type = Column(Enum(IndexType))
size = Column(Integer)
def __repr__(self):
return f"<IndexFile(id={self.id}, name={self.name}, reference={self.reference}, type={self.type}, " \
f"size={self.size} "
| import enum
from sqlalchemy import Column, Integer, String, Enum
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String)
index = Column(String)
type = Column(Enum(IndexType))
size = Column(Integer)
def __repr__(self):
return f"<IndexFile(id={self.id}, name={self.name}, index={self.index}, type={self.type}, " \
f"size={self.size} "
| Update IndexFile model to have 'index' column instead of 'reference' | Update IndexFile model to have 'index' column instead of 'reference'
| Python | mit | virtool/virtool,igboyes/virtool,igboyes/virtool,virtool/virtool | import enum
from sqlalchemy import Column, Integer, String, Enum
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String)
- reference = Column(String)
+ index = Column(String)
type = Column(Enum(IndexType))
size = Column(Integer)
def __repr__(self):
- return f"<IndexFile(id={self.id}, name={self.name}, reference={self.reference}, type={self.type}, " \
+ return f"<IndexFile(id={self.id}, name={self.name}, index={self.index}, type={self.type}, " \
f"size={self.size} "
| Update IndexFile model to have 'index' column instead of 'reference' | ## Code Before:
import enum
from sqlalchemy import Column, Integer, String, Enum
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String)
reference = Column(String)
type = Column(Enum(IndexType))
size = Column(Integer)
def __repr__(self):
return f"<IndexFile(id={self.id}, name={self.name}, reference={self.reference}, type={self.type}, " \
f"size={self.size} "
## Instruction:
Update IndexFile model to have 'index' column instead of 'reference'
## Code After:
import enum
from sqlalchemy import Column, Integer, String, Enum
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String)
index = Column(String)
type = Column(Enum(IndexType))
size = Column(Integer)
def __repr__(self):
return f"<IndexFile(id={self.id}, name={self.name}, index={self.index}, type={self.type}, " \
f"size={self.size} "
| ...
name = Column(String)
index = Column(String)
type = Column(Enum(IndexType))
...
def __repr__(self):
return f"<IndexFile(id={self.id}, name={self.name}, index={self.index}, type={self.type}, " \
f"size={self.size} "
... |
f7341acf0717d238073a688c6047e18b524efab1 | qmpy/configuration/resources/__init__.py | qmpy/configuration/resources/__init__.py | import yaml
import os, os.path
loc = os.path.dirname(os.path.abspath(__file__))
hosts = yaml.load(open(loc+'/hosts.yml'))
projects = yaml.load(open(loc+'/projects.yml'))
allocations = yaml.load(open(loc+'/allocations.yml'))
users = yaml.load(open(loc+'/users.yml'))
| import yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
| Use OS-agnostic path joining operations | Use OS-agnostic path joining operations
| Python | mit | wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy | import yaml
- import os, os.path
+ import os
loc = os.path.dirname(os.path.abspath(__file__))
+ with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
+ hosts = yaml.load(fr)
- hosts = yaml.load(open(loc+'/hosts.yml'))
- projects = yaml.load(open(loc+'/projects.yml'))
- allocations = yaml.load(open(loc+'/allocations.yml'))
- users = yaml.load(open(loc+'/users.yml'))
+ with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
+ projects = yaml.load(fr)
+
+ with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
+ allocations = yaml.load(fr)
+
+ with open(os.path.join(loc, 'users.yml'), 'r') as fr:
+ users = yaml.load(fr)
+ | Use OS-agnostic path joining operations | ## Code Before:
import yaml
import os, os.path
loc = os.path.dirname(os.path.abspath(__file__))
hosts = yaml.load(open(loc+'/hosts.yml'))
projects = yaml.load(open(loc+'/projects.yml'))
allocations = yaml.load(open(loc+'/allocations.yml'))
users = yaml.load(open(loc+'/users.yml'))
## Instruction:
Use OS-agnostic path joining operations
## Code After:
import yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
| // ... existing code ...
import yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
// ... modified code ...
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
// ... rest of the code ... |
1abfdea38e868d68c532961459d2b4cbef5a9b71 | src/zeit/website/section.py | src/zeit/website/section.py | import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
| import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
zope.interface.noLongerProvides(content,
zeit.website.interfaces.IWebsiteSection)
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
| Remove iface, if rebrush_contetn ist not set | Remove iface, if rebrush_contetn ist not set
| Python | bsd-3-clause | ZeitOnline/zeit.website | import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
+ zope.interface.noLongerProvides(content,
+ zeit.website.interfaces.IWebsiteSection)
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
| Remove iface, if rebrush_contetn ist not set | ## Code Before:
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
## Instruction:
Remove iface, if rebrush_contetn ist not set
## Code After:
import zeit.website.interfaces
from zeit.cms.section.interfaces import ISectionMarker
import grokcore.component as grok
import zeit.cms.checkout.interfaces
import zeit.cms.content.interfaces
import zope.interface
@grok.subscribe(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.checkout.interfaces.IBeforeCheckinEvent)
def provide_website_content(content, event):
content = zope.security.proxy.getObject(content)
if not content.rebrush_website_content:
zope.interface.noLongerProvides(content,
zeit.website.interfaces.IWebsiteSection)
return
for iface in zope.interface.providedBy(content):
if issubclass(iface, ISectionMarker):
zope.interface.noLongerProvides(content, iface)
zope.interface.alsoProvides(content, zeit.website.interfaces.IWebsiteSection)
| # ... existing code ...
if not content.rebrush_website_content:
zope.interface.noLongerProvides(content,
zeit.website.interfaces.IWebsiteSection)
return
# ... rest of the code ... |
fce1b1bdb5a39bbe57b750cd453a9697b8447d6b | chat.py | chat.py | import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
# if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
| import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
| Correct position of comment :) | Correct position of comment :)
| Python | bsd-3-clause | arturosevilla/notification-server-example,arturosevilla/notification-server-example | import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
- # if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
+ # if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
| Correct position of comment :) | ## Code Before:
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
# if chatroom doesn't exist create it!
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
## Instruction:
Correct position of comment :)
## Code After:
import re
from redis import Redis
import json
from datetime import datetime
def is_valid_chatroom(chatroom):
return re.match('[A-Za-z_\\d]+$', chatroom) is not None
def get_redis():
return Redis()
def get_conversation(chatroom):
if chatroom is None or len(chatroom) == 0:
return None
storage = get_redis()
return [
json.loads(m)
for m in storage.lrange('notifexample:' + chatroom, 0, -1)
]
def send_message(chatroom, user_id, name, message):
if '<script>' in message:
message += '-- Not this time DefConFags'
storage = get_redis()
now = datetime.now()
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
'notifexample:' + chatroom,
json.dumps({
'author': name,
'userID': user_id,
'message': message,
'createdOn': created_on
})
)
| # ... existing code ...
return None
storage = get_redis()
# ... modified code ...
created_on = now.strftime('%Y-%m-%d %H:%M:%S')
# if chatroom doesn't exist create it!
storage.rpush(
# ... rest of the code ... |
46ebeba28f8fbb9d43457aa3fa539b29048a581b | netbox/users/api/views.py | netbox/users/api/views.py | from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user'))
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
| from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name')
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
| Set default ordering for user and group API endpoints | Set default ordering for user and group API endpoints
| Python | apache-2.0 | digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox | from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
- queryset = RestrictedQuerySet(model=User).prefetch_related('groups')
+ queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
- queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user'))
+ queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name')
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
| Set default ordering for user and group API endpoints | ## Code Before:
from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user'))
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
## Instruction:
Set default ordering for user and group API endpoints
## Code After:
from django.contrib.auth.models import Group, User
from django.db.models import Count
from users import filters
from users.models import ObjectPermission
from utilities.api import ModelViewSet
from utilities.querysets import RestrictedQuerySet
from . import serializers
#
# Users and groups
#
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username')
serializer_class = serializers.UserSerializer
filterset_class = filters.UserFilterSet
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name')
serializer_class = serializers.GroupSerializer
filterset_class = filters.GroupFilterSet
#
# ObjectPermissions
#
class ObjectPermissionViewSet(ModelViewSet):
queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users')
serializer_class = serializers.ObjectPermissionSerializer
filterset_class = filters.ObjectPermissionFilterSet
| ...
class UserViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username')
serializer_class = serializers.UserSerializer
...
class GroupViewSet(ModelViewSet):
queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name')
serializer_class = serializers.GroupSerializer
... |
df4d4f2972d8d1a91ce4353343c6279580985e3c | index.py | index.py | from __future__ import division
import urllib.request as request, json, os.path
import json, time
if os.path.exists('config/config.json'):
config_file = open('config/config.json')
config = json.load(config_file)
else:
print('Please copy the config.json file to config-local.json and fill in the file.')
exit()
print(time.strftime("%x") + ": Eagle woke up")
total_volume = 0
symbols = ','.join(config['currencies'])
url = "http://api.coinlayer.com/api/live?access_key=" + config['coinlayer'] + "&target=EUR&symbols=" + symbols
with request.urlopen(url) as response:
rates = json.loads(response.read().decode('utf-8'))['rates']
for currency in config['currencies'].keys():
if currency not in rates:
print("Cryptocurrency", currency, "does not exist.")
continue
total_volume += rates[currency] * config['currencies'][currency]['balance']
print("Total euro : " + str(total_volume) + " eur")
| from __future__ import division
import urllib.request as request, json, os.path
import json, time
if os.path.exists('config/config.json'):
config_file = open('config/config.json')
config = json.load(config_file)
else:
print('Please copy the config.json.template file to config.json and fill in the file.')
exit()
print(time.strftime("%x") + ": Eagle woke up")
total_volume = 0
symbols = ','.join(config['currencies'])
url = "http://api.coinlayer.com/api/live?access_key=" + config['coinlayer'] + "&target=EUR&symbols=" + symbols
with request.urlopen(url) as response:
rates = json.loads(response.read().decode('utf-8'))['rates']
for currency in config['currencies'].keys():
if currency not in rates:
print("Cryptocurrency", currency, "does not exist.")
continue
total_volume += rates[currency] * config['currencies'][currency]['balance']
print("Total euro : " + str(total_volume) + " eur")
| Change print statement about config | Change print statement about config
| Python | mit | pkakelas/eagle | from __future__ import division
import urllib.request as request, json, os.path
import json, time
if os.path.exists('config/config.json'):
config_file = open('config/config.json')
config = json.load(config_file)
else:
- print('Please copy the config.json file to config-local.json and fill in the file.')
+ print('Please copy the config.json.template file to config.json and fill in the file.')
exit()
print(time.strftime("%x") + ": Eagle woke up")
total_volume = 0
symbols = ','.join(config['currencies'])
url = "http://api.coinlayer.com/api/live?access_key=" + config['coinlayer'] + "&target=EUR&symbols=" + symbols
with request.urlopen(url) as response:
rates = json.loads(response.read().decode('utf-8'))['rates']
for currency in config['currencies'].keys():
if currency not in rates:
print("Cryptocurrency", currency, "does not exist.")
continue
total_volume += rates[currency] * config['currencies'][currency]['balance']
print("Total euro : " + str(total_volume) + " eur")
| Change print statement about config | ## Code Before:
from __future__ import division
import urllib.request as request, json, os.path
import json, time
if os.path.exists('config/config.json'):
config_file = open('config/config.json')
config = json.load(config_file)
else:
print('Please copy the config.json file to config-local.json and fill in the file.')
exit()
print(time.strftime("%x") + ": Eagle woke up")
total_volume = 0
symbols = ','.join(config['currencies'])
url = "http://api.coinlayer.com/api/live?access_key=" + config['coinlayer'] + "&target=EUR&symbols=" + symbols
with request.urlopen(url) as response:
rates = json.loads(response.read().decode('utf-8'))['rates']
for currency in config['currencies'].keys():
if currency not in rates:
print("Cryptocurrency", currency, "does not exist.")
continue
total_volume += rates[currency] * config['currencies'][currency]['balance']
print("Total euro : " + str(total_volume) + " eur")
## Instruction:
Change print statement about config
## Code After:
from __future__ import division
import urllib.request as request, json, os.path
import json, time
if os.path.exists('config/config.json'):
config_file = open('config/config.json')
config = json.load(config_file)
else:
print('Please copy the config.json.template file to config.json and fill in the file.')
exit()
print(time.strftime("%x") + ": Eagle woke up")
total_volume = 0
symbols = ','.join(config['currencies'])
url = "http://api.coinlayer.com/api/live?access_key=" + config['coinlayer'] + "&target=EUR&symbols=" + symbols
with request.urlopen(url) as response:
rates = json.loads(response.read().decode('utf-8'))['rates']
for currency in config['currencies'].keys():
if currency not in rates:
print("Cryptocurrency", currency, "does not exist.")
continue
total_volume += rates[currency] * config['currencies'][currency]['balance']
print("Total euro : " + str(total_volume) + " eur")
| ...
else:
print('Please copy the config.json.template file to config.json and fill in the file.')
exit()
... |
5de08e3b7be029a3a10dab9e4a259b046488d4af | examples/django_app/tests/test_integration.py | examples/django_app/tests/test_integration.py | from django.test import TestCase
from django.core.urlresolvers import reverse
import unittest
class ApiIntegrationTestCase(TestCase):
def setUp(self):
super(ApiIntegrationTestCase, self).setUp()
self.api_url = reverse('chatterbot:chatterbot')
def _get_json(self, response):
import json
return json.loads(response.content)
def test_get_recent_statements_empty(self):
response = self.client.get(self.api_url)
data = self._get_json(response)
unittest.SkipTest('This test needs to be created.')
def test_get_recent_statements(self):
response = self.client.post(
self.api_url,
{'text': 'How are you?'},
format='json'
)
response = self.client.get(self.api_url)
data = self._get_json(response)
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 1)
self.assertEqual(len(data['recent_statements'][0]), 2)
self.assertIn('text', data['recent_statements'][0][0])
self.assertIn('text', data['recent_statements'][0][1])
| from django.test import TestCase
from django.core.urlresolvers import reverse
import unittest
class ApiIntegrationTestCase(TestCase):
def setUp(self):
super(ApiIntegrationTestCase, self).setUp()
self.api_url = reverse('chatterbot:chatterbot')
def tearDown(self):
super(ApiIntegrationTestCase, self).tearDown()
from chatterbot.ext.django_chatterbot.views import ChatterBotView
# Clear the response queue between tests
ChatterBotView.chatterbot.recent_statements.queue = []
def _get_json(self, response):
import json
return json.loads(response.content)
def test_get_recent_statements_empty(self):
response = self.client.get(self.api_url)
data = self._get_json(response)
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 0)
def test_get_recent_statements(self):
response = self.client.post(
self.api_url,
{'text': 'How are you?'},
format='json'
)
response = self.client.get(self.api_url)
data = self._get_json(response)
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 1)
self.assertEqual(len(data['recent_statements'][0]), 2)
self.assertIn('text', data['recent_statements'][0][0])
self.assertIn('text', data['recent_statements'][0][1])
| Add test method to clear response queue. | Add test method to clear response queue.
| Python | bsd-3-clause | Reinaesaya/OUIRL-ChatBot,Gustavo6046/ChatterBot,maclogan/VirtualPenPal,davizucon/ChatterBot,gunthercox/ChatterBot,Reinaesaya/OUIRL-ChatBot,vkosuri/ChatterBot | from django.test import TestCase
from django.core.urlresolvers import reverse
import unittest
class ApiIntegrationTestCase(TestCase):
def setUp(self):
super(ApiIntegrationTestCase, self).setUp()
self.api_url = reverse('chatterbot:chatterbot')
+ def tearDown(self):
+ super(ApiIntegrationTestCase, self).tearDown()
+ from chatterbot.ext.django_chatterbot.views import ChatterBotView
+
+ # Clear the response queue between tests
+ ChatterBotView.chatterbot.recent_statements.queue = []
+
def _get_json(self, response):
import json
return json.loads(response.content)
def test_get_recent_statements_empty(self):
response = self.client.get(self.api_url)
data = self._get_json(response)
- unittest.SkipTest('This test needs to be created.')
+ self.assertIn('recent_statements', data)
+ self.assertEqual(len(data['recent_statements']), 0)
def test_get_recent_statements(self):
response = self.client.post(
self.api_url,
{'text': 'How are you?'},
format='json'
)
response = self.client.get(self.api_url)
data = self._get_json(response)
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 1)
self.assertEqual(len(data['recent_statements'][0]), 2)
self.assertIn('text', data['recent_statements'][0][0])
- self.assertIn('text', data['recent_statements'][0][1])
+ self.assertIn('text', data['recent_statements'][0][1])
| Add test method to clear response queue. | ## Code Before:
from django.test import TestCase
from django.core.urlresolvers import reverse
import unittest
class ApiIntegrationTestCase(TestCase):
def setUp(self):
super(ApiIntegrationTestCase, self).setUp()
self.api_url = reverse('chatterbot:chatterbot')
def _get_json(self, response):
import json
return json.loads(response.content)
def test_get_recent_statements_empty(self):
response = self.client.get(self.api_url)
data = self._get_json(response)
unittest.SkipTest('This test needs to be created.')
def test_get_recent_statements(self):
response = self.client.post(
self.api_url,
{'text': 'How are you?'},
format='json'
)
response = self.client.get(self.api_url)
data = self._get_json(response)
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 1)
self.assertEqual(len(data['recent_statements'][0]), 2)
self.assertIn('text', data['recent_statements'][0][0])
self.assertIn('text', data['recent_statements'][0][1])
## Instruction:
Add test method to clear response queue.
## Code After:
from django.test import TestCase
from django.core.urlresolvers import reverse
import unittest
class ApiIntegrationTestCase(TestCase):
def setUp(self):
super(ApiIntegrationTestCase, self).setUp()
self.api_url = reverse('chatterbot:chatterbot')
def tearDown(self):
super(ApiIntegrationTestCase, self).tearDown()
from chatterbot.ext.django_chatterbot.views import ChatterBotView
# Clear the response queue between tests
ChatterBotView.chatterbot.recent_statements.queue = []
def _get_json(self, response):
import json
return json.loads(response.content)
def test_get_recent_statements_empty(self):
response = self.client.get(self.api_url)
data = self._get_json(response)
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 0)
def test_get_recent_statements(self):
response = self.client.post(
self.api_url,
{'text': 'How are you?'},
format='json'
)
response = self.client.get(self.api_url)
data = self._get_json(response)
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 1)
self.assertEqual(len(data['recent_statements'][0]), 2)
self.assertIn('text', data['recent_statements'][0][0])
self.assertIn('text', data['recent_statements'][0][1])
| // ... existing code ...
def tearDown(self):
super(ApiIntegrationTestCase, self).tearDown()
from chatterbot.ext.django_chatterbot.views import ChatterBotView
# Clear the response queue between tests
ChatterBotView.chatterbot.recent_statements.queue = []
def _get_json(self, response):
// ... modified code ...
self.assertIn('recent_statements', data)
self.assertEqual(len(data['recent_statements']), 0)
...
self.assertIn('text', data['recent_statements'][0][0])
self.assertIn('text', data['recent_statements'][0][1])
// ... rest of the code ... |
16e9987e680a6a44acdb14bd7554414dfe261056 | sale_automatic_workflow/models/stock_move.py | sale_automatic_workflow/models/stock_move.py |
from odoo import api, models
class StockMove(models.Model):
_inherit = 'stock.move'
@api.model
def _prepare_picking_assign(self, move):
values = super(StockMove, self)._prepare_picking_assign(move)
if move.procurement_id.sale_line_id:
sale = move.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values
|
from odoo import api, models
class StockMove(models.Model):
_inherit = 'stock.move'
@api.multi
def _get_new_picking_values(self):
values = super(StockMove, self)._get_new_picking_values()
if self.procurement_id.sale_line_id:
sale = self.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values
| Fix API type & method name for picking values | Fix API type & method name for picking values
| Python | agpl-3.0 | kittiu/sale-workflow,kittiu/sale-workflow |
from odoo import api, models
class StockMove(models.Model):
_inherit = 'stock.move'
- @api.model
+ @api.multi
- def _prepare_picking_assign(self, move):
+ def _get_new_picking_values(self):
- values = super(StockMove, self)._prepare_picking_assign(move)
+ values = super(StockMove, self)._get_new_picking_values()
- if move.procurement_id.sale_line_id:
+ if self.procurement_id.sale_line_id:
- sale = move.procurement_id.sale_line_id.order_id
+ sale = self.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values
| Fix API type & method name for picking values | ## Code Before:
from odoo import api, models
class StockMove(models.Model):
_inherit = 'stock.move'
@api.model
def _prepare_picking_assign(self, move):
values = super(StockMove, self)._prepare_picking_assign(move)
if move.procurement_id.sale_line_id:
sale = move.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values
## Instruction:
Fix API type & method name for picking values
## Code After:
from odoo import api, models
class StockMove(models.Model):
_inherit = 'stock.move'
@api.multi
def _get_new_picking_values(self):
values = super(StockMove, self)._get_new_picking_values()
if self.procurement_id.sale_line_id:
sale = self.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values
| ...
@api.multi
def _get_new_picking_values(self):
values = super(StockMove, self)._get_new_picking_values()
if self.procurement_id.sale_line_id:
sale = self.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
... |
62d9fdfe0ad3fc37286aa19a87e2890aaf90f639 | tasks/check_rd2_enablement.py | tasks/check_rd2_enablement.py | import simple_salesforce
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class is_rd2_enabled(BaseSalesforceApiTask):
def _run_task(self):
try:
settings = self.sf.query(
"SELECT IsRecurringDonations2Enabled__c "
"FROM npe03__Recurring_Donations_Settings__c "
"WHERE SetupOwnerId IN (SELECT Id FROM Organization)"
)
except simple_salesforce.exceptions.SalesforceMalformedRequest:
# The field does not exist in the target org, meaning it's
# pre-RD2
self.return_values = False
return
if settings.get("records"):
if settings["records"][0]["IsRecurringDonations2Enabled__c"]:
self.return_values = True
self.return_values = False | import simple_salesforce
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class is_rd2_enabled(BaseSalesforceApiTask):
def _run_task(self):
try:
settings = self.sf.query(
"SELECT IsRecurringDonations2Enabled__c "
"FROM npe03__Recurring_Donations_Settings__c "
"WHERE SetupOwnerId IN (SELECT Id FROM Organization)"
)
except simple_salesforce.exceptions.SalesforceMalformedRequest:
# The field does not exist in the target org, meaning it's
# pre-RD2
self.return_values = False
return
if settings.get("records"):
if settings["records"][0]["IsRecurringDonations2Enabled__c"]:
self.return_values = True
return
self.return_values = False | Correct bug in preflight check | Correct bug in preflight check
| Python | bsd-3-clause | SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus | import simple_salesforce
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class is_rd2_enabled(BaseSalesforceApiTask):
def _run_task(self):
try:
settings = self.sf.query(
"SELECT IsRecurringDonations2Enabled__c "
"FROM npe03__Recurring_Donations_Settings__c "
"WHERE SetupOwnerId IN (SELECT Id FROM Organization)"
)
except simple_salesforce.exceptions.SalesforceMalformedRequest:
# The field does not exist in the target org, meaning it's
# pre-RD2
self.return_values = False
return
if settings.get("records"):
if settings["records"][0]["IsRecurringDonations2Enabled__c"]:
self.return_values = True
+ return
self.return_values = False | Correct bug in preflight check | ## Code Before:
import simple_salesforce
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class is_rd2_enabled(BaseSalesforceApiTask):
def _run_task(self):
try:
settings = self.sf.query(
"SELECT IsRecurringDonations2Enabled__c "
"FROM npe03__Recurring_Donations_Settings__c "
"WHERE SetupOwnerId IN (SELECT Id FROM Organization)"
)
except simple_salesforce.exceptions.SalesforceMalformedRequest:
# The field does not exist in the target org, meaning it's
# pre-RD2
self.return_values = False
return
if settings.get("records"):
if settings["records"][0]["IsRecurringDonations2Enabled__c"]:
self.return_values = True
self.return_values = False
## Instruction:
Correct bug in preflight check
## Code After:
import simple_salesforce
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class is_rd2_enabled(BaseSalesforceApiTask):
def _run_task(self):
try:
settings = self.sf.query(
"SELECT IsRecurringDonations2Enabled__c "
"FROM npe03__Recurring_Donations_Settings__c "
"WHERE SetupOwnerId IN (SELECT Id FROM Organization)"
)
except simple_salesforce.exceptions.SalesforceMalformedRequest:
# The field does not exist in the target org, meaning it's
# pre-RD2
self.return_values = False
return
if settings.get("records"):
if settings["records"][0]["IsRecurringDonations2Enabled__c"]:
self.return_values = True
return
self.return_values = False | ...
self.return_values = True
return
... |
8c228a79450c49ee1d494ca1e3cf61ea7bcc8177 | setup.py | setup.py |
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import os
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
os.environ["STEAM_API_KEY"] = self.key
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
| Set API key directly in test runner | Set API key directly in test runner
| Python | isc | miedzinski/steamodd,Lagg/steamodd |
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
- import os
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
- os.environ["STEAM_API_KEY"] = self.key
+ steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
| Set API key directly in test runner | ## Code Before:
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import os
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
os.environ["STEAM_API_KEY"] = self.key
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
## Instruction:
Set API key directly in test runner
## Code After:
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
| ...
from unittest import TestLoader, TextTestRunner
import steam
...
else:
steam.api.key.set(self.key)
... |
6fa6ef07dd18794b75d63ffa2a5be83e2ec9b674 | bit/count_ones.py | bit/count_ones.py |
def count_ones(n):
"""
:type n: int
:rtype: int
"""
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
|
def count_ones(n):
"""
:type n: int
:rtype: int
"""
if n < 0:
return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| Check if the input is negative | Check if the input is negative
As the comments mention, the code would work only for unsigned integers.
If a negative integer is provided as input, then the code runs into an
infinite loop. To avoid this, we are checking if the input is negative.
If yes, then return control before loop is entered.
| Python | mit | amaozhao/algorithms,keon/algorithms |
def count_ones(n):
"""
:type n: int
:rtype: int
"""
+ if n < 0:
+ return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| Check if the input is negative | ## Code Before:
def count_ones(n):
"""
:type n: int
:rtype: int
"""
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
## Instruction:
Check if the input is negative
## Code After:
def count_ones(n):
"""
:type n: int
:rtype: int
"""
if n < 0:
return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| ...
"""
if n < 0:
return
counter = 0
... |
6fd5e51a797f3d85954f6a4c97eacc008b0e4d48 | tohu/v5/namespace.py | tohu/v5/namespace.py | from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c) | from mako.template import Template
import textwrap
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __repr__(self):
s = Template(textwrap.dedent("""\
<TohuNameSpace:
%for name, g in items:
${name}: ${g}
%endfor
>
""")).render(items=self.generators.items())
return s
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c) | Add repr method for TohuNamespace | Add repr method for TohuNamespace
| Python | mit | maxalbert/tohu | + from mako.template import Template
+ import textwrap
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
+
+ def __repr__(self):
+ s = Template(textwrap.dedent("""\
+ <TohuNameSpace:
+ %for name, g in items:
+ ${name}: ${g}
+ %endfor
+ >
+ """)).render(items=self.generators.items())
+ return s
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c) | Add repr method for TohuNamespace | ## Code Before:
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c)
## Instruction:
Add repr method for TohuNamespace
## Code After:
from mako.template import Template
import textwrap
from bidict import bidict, ValueDuplicationError
def is_anonymous(name):
return name.startswith("ANONYMOUS_ANONYMOUS_ANONYMOUS_")
class TohuNamespaceError(Exception):
"""
Custom exception.
"""
class TohuNamespace:
def __init__(self):
self.generators = bidict()
def __repr__(self):
s = Template(textwrap.dedent("""\
<TohuNameSpace:
%for name, g in items:
${name}: ${g}
%endfor
>
""")).render(items=self.generators.items())
return s
def __len__(self):
return len(self.generators)
def __getitem__(self, key):
return self.generators[key]
def add_generator(self, g, name=None):
if name is None:
name = f"ANONYMOUS_ANONYMOUS_ANONYMOUS_{g.tohu_id}"
if name in self.generators and self.generators[name] is not g:
raise TohuNamespaceError("A different generator with the same name already exists.")
try:
self.generators[name] = g
except ValueDuplicationError:
existing_name = self.generators.inv[g]
if is_anonymous(existing_name) and not is_anonymous(name):
self.generators.inv[g] = name
def add_generator_with_dependencies(self, g, name=None):
self.add_generator(g, name=name)
for c in g._input_generators:
self.add_generator(c) | // ... existing code ...
from mako.template import Template
import textwrap
from bidict import bidict, ValueDuplicationError
// ... modified code ...
self.generators = bidict()
def __repr__(self):
s = Template(textwrap.dedent("""\
<TohuNameSpace:
%for name, g in items:
${name}: ${g}
%endfor
>
""")).render(items=self.generators.items())
return s
// ... rest of the code ... |
51c0ae7b647a9ea354928f80acbcabef778bedd5 | icekit/page_types/articles/models.py | icekit/page_types/articles/models.py | from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('parent', 'slug', 'publishing_linked'),)
| from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
| Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order. | Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order.
The first field should be the one most likely to uniquely identify an
object.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
- unique_together = (('parent', 'slug', 'publishing_linked'),)
+ unique_together = (('slug', 'parent', 'publishing_linked'), )
| Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order. | ## Code Before:
from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('parent', 'slug', 'publishing_linked'),)
## Instruction:
Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order.
## Code After:
from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
| // ... existing code ...
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
// ... rest of the code ... |
dc2c960bb937cc287dedf95d407ed2e95f3f6724 | sigma_files/serializers.py | sigma_files/serializers.py | from rest_framework import serializers
from sigma.utils import CurrentUserCreateOnlyDefault
from sigma_files.models import Image
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
file = serializers.ImageField(max_length=255)
height = serializers.IntegerField(source='file.height', read_only=True)
width = serializers.IntegerField(source='file.width', read_only=True)
owner = serializers.PrimaryKeyRelatedField(read_only=True, default=CurrentUserCreateOnlyDefault())
| from rest_framework import serializers
from dry_rest_permissions.generics import DRYPermissionsField
from sigma.utils import CurrentUserCreateOnlyDefault
from sigma_files.models import Image
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
file = serializers.ImageField(max_length=255)
height = serializers.IntegerField(source='file.height', read_only=True)
width = serializers.IntegerField(source='file.width', read_only=True)
owner = serializers.PrimaryKeyRelatedField(read_only=True, default=CurrentUserCreateOnlyDefault())
permissions = DRYPermissionsField(actions=['read', 'write'])
| Add permissions field on ImageSerializer | Add permissions field on ImageSerializer
| Python | agpl-3.0 | ProjetSigma/backend,ProjetSigma/backend | from rest_framework import serializers
+ from dry_rest_permissions.generics import DRYPermissionsField
from sigma.utils import CurrentUserCreateOnlyDefault
from sigma_files.models import Image
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
file = serializers.ImageField(max_length=255)
height = serializers.IntegerField(source='file.height', read_only=True)
width = serializers.IntegerField(source='file.width', read_only=True)
owner = serializers.PrimaryKeyRelatedField(read_only=True, default=CurrentUserCreateOnlyDefault())
+ permissions = DRYPermissionsField(actions=['read', 'write'])
| Add permissions field on ImageSerializer | ## Code Before:
from rest_framework import serializers
from sigma.utils import CurrentUserCreateOnlyDefault
from sigma_files.models import Image
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
file = serializers.ImageField(max_length=255)
height = serializers.IntegerField(source='file.height', read_only=True)
width = serializers.IntegerField(source='file.width', read_only=True)
owner = serializers.PrimaryKeyRelatedField(read_only=True, default=CurrentUserCreateOnlyDefault())
## Instruction:
Add permissions field on ImageSerializer
## Code After:
from rest_framework import serializers
from dry_rest_permissions.generics import DRYPermissionsField
from sigma.utils import CurrentUserCreateOnlyDefault
from sigma_files.models import Image
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
file = serializers.ImageField(max_length=255)
height = serializers.IntegerField(source='file.height', read_only=True)
width = serializers.IntegerField(source='file.width', read_only=True)
owner = serializers.PrimaryKeyRelatedField(read_only=True, default=CurrentUserCreateOnlyDefault())
permissions = DRYPermissionsField(actions=['read', 'write'])
| # ... existing code ...
from rest_framework import serializers
from dry_rest_permissions.generics import DRYPermissionsField
# ... modified code ...
owner = serializers.PrimaryKeyRelatedField(read_only=True, default=CurrentUserCreateOnlyDefault())
permissions = DRYPermissionsField(actions=['read', 'write'])
# ... rest of the code ... |
142e361d2bcfbdc15939ad33c600bf943025f7b1 | api/v1/serializers/no_project_serializer.py | api/v1/serializers/no_project_serializer.py | from core.models.user import AtmosphereUser
from core.query import only_current, only_current_source
from rest_framework import serializers
from .application_serializer import ApplicationSerializer
from .instance_serializer import InstanceSerializer
from .volume_serializer import VolumeSerializer
class NoProjectSerializer(serializers.ModelSerializer):
applications = serializers.SerializerMethodField('get_user_applications')
instances = serializers.SerializerMethodField('get_user_instances')
volumes = serializers.SerializerMethodField('get_user_volumes')
def get_user_applications(self, atmo_user):
return [ApplicationSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.application_set.filter(only_current(), projects=None)]
def get_user_instances(self, atmo_user):
return [InstanceSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.instance_set.filter(only_current(),
source__provider__active=True,
projects=None)]
def get_user_volumes(self, atmo_user):
return [VolumeSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.volume_set().filter(*only_current_source(),
instance_source__provider__active=True, projects=None)]
class Meta:
model = AtmosphereUser
fields = ('applications', 'instances', 'volumes')
| from core.models.user import AtmosphereUser
from core.query import only_current, only_current_source
from rest_framework import serializers
from .instance_serializer import InstanceSerializer
from .volume_serializer import VolumeSerializer
class NoProjectSerializer(serializers.ModelSerializer):
instances = serializers.SerializerMethodField('get_user_instances')
volumes = serializers.SerializerMethodField('get_user_volumes')
def get_user_instances(self, atmo_user):
return [InstanceSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.instance_set.filter(only_current(),
source__provider__active=True,
projects=None)]
def get_user_volumes(self, atmo_user):
return [VolumeSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.volume_set().filter(*only_current_source(),
instance_source__provider__active=True, projects=None)]
class Meta:
model = AtmosphereUser
fields = ('instances', 'volumes')
| Remove final references to application | Remove final references to application
| Python | apache-2.0 | CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend | from core.models.user import AtmosphereUser
from core.query import only_current, only_current_source
from rest_framework import serializers
- from .application_serializer import ApplicationSerializer
from .instance_serializer import InstanceSerializer
from .volume_serializer import VolumeSerializer
class NoProjectSerializer(serializers.ModelSerializer):
- applications = serializers.SerializerMethodField('get_user_applications')
instances = serializers.SerializerMethodField('get_user_instances')
volumes = serializers.SerializerMethodField('get_user_volumes')
-
- def get_user_applications(self, atmo_user):
- return [ApplicationSerializer(
- item,
- context={'request': self.context.get('request')}).data for item in
- atmo_user.application_set.filter(only_current(), projects=None)]
def get_user_instances(self, atmo_user):
return [InstanceSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.instance_set.filter(only_current(),
source__provider__active=True,
projects=None)]
def get_user_volumes(self, atmo_user):
return [VolumeSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.volume_set().filter(*only_current_source(),
instance_source__provider__active=True, projects=None)]
class Meta:
model = AtmosphereUser
- fields = ('applications', 'instances', 'volumes')
+ fields = ('instances', 'volumes')
| Remove final references to application | ## Code Before:
from core.models.user import AtmosphereUser
from core.query import only_current, only_current_source
from rest_framework import serializers
from .application_serializer import ApplicationSerializer
from .instance_serializer import InstanceSerializer
from .volume_serializer import VolumeSerializer
class NoProjectSerializer(serializers.ModelSerializer):
applications = serializers.SerializerMethodField('get_user_applications')
instances = serializers.SerializerMethodField('get_user_instances')
volumes = serializers.SerializerMethodField('get_user_volumes')
def get_user_applications(self, atmo_user):
return [ApplicationSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.application_set.filter(only_current(), projects=None)]
def get_user_instances(self, atmo_user):
return [InstanceSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.instance_set.filter(only_current(),
source__provider__active=True,
projects=None)]
def get_user_volumes(self, atmo_user):
return [VolumeSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.volume_set().filter(*only_current_source(),
instance_source__provider__active=True, projects=None)]
class Meta:
model = AtmosphereUser
fields = ('applications', 'instances', 'volumes')
## Instruction:
Remove final references to application
## Code After:
from core.models.user import AtmosphereUser
from core.query import only_current, only_current_source
from rest_framework import serializers
from .instance_serializer import InstanceSerializer
from .volume_serializer import VolumeSerializer
class NoProjectSerializer(serializers.ModelSerializer):
instances = serializers.SerializerMethodField('get_user_instances')
volumes = serializers.SerializerMethodField('get_user_volumes')
def get_user_instances(self, atmo_user):
return [InstanceSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.instance_set.filter(only_current(),
source__provider__active=True,
projects=None)]
def get_user_volumes(self, atmo_user):
return [VolumeSerializer(
item,
context={'request': self.context.get('request')}).data for item in
atmo_user.volume_set().filter(*only_current_source(),
instance_source__provider__active=True, projects=None)]
class Meta:
model = AtmosphereUser
fields = ('instances', 'volumes')
| ...
from rest_framework import serializers
from .instance_serializer import InstanceSerializer
...
class NoProjectSerializer(serializers.ModelSerializer):
instances = serializers.SerializerMethodField('get_user_instances')
...
volumes = serializers.SerializerMethodField('get_user_volumes')
...
model = AtmosphereUser
fields = ('instances', 'volumes')
... |
71cffcb8a8ec7e36dc389a5aa6dc2cc9769a9e97 | distutils/tests/test_ccompiler.py | distutils/tests/test_ccompiler.py | import os
import sys
import platform
import textwrap
import sysconfig
import pytest
from distutils import ccompiler
def _make_strs(paths):
"""
Convert paths to strings for legacy compatibility.
"""
if sys.version_info > (3, 8) and platform.system() != "Windows":
return paths
return list(map(os.fspath, paths))
@pytest.fixture
def c_file(tmp_path):
c_file = tmp_path / 'foo.c'
gen_headers = ('Python.h',)
is_windows = platform.system() == "Windows"
plat_headers = ('windows.h',) * is_windows
all_headers = gen_headers + plat_headers
headers = '\n'.join(f'#include <{header}>\n' for header in all_headers)
payload = (
textwrap.dedent(
"""
#headers
void PyInit_foo(void) {}
"""
)
.lstrip()
.replace('#headers', headers)
)
c_file.write_text(payload)
return c_file
def test_set_include_dirs(c_file):
"""
Extensions should build even if set_include_dirs is invoked.
In particular, compiler-specific paths should not be overridden.
"""
compiler = ccompiler.new_compiler()
python = sysconfig.get_paths()['include']
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
| import os
import sys
import platform
import textwrap
import sysconfig
import pytest
from distutils import ccompiler
def _make_strs(paths):
"""
Convert paths to strings for legacy compatibility.
"""
if sys.version_info > (3, 8) and platform.system() != "Windows":
return paths
return list(map(os.fspath, paths))
@pytest.fixture
def c_file(tmp_path):
c_file = tmp_path / 'foo.c'
gen_headers = ('Python.h',)
is_windows = platform.system() == "Windows"
plat_headers = ('windows.h',) * is_windows
all_headers = gen_headers + plat_headers
headers = '\n'.join(f'#include <{header}>\n' for header in all_headers)
payload = (
textwrap.dedent(
"""
#headers
void PyInit_foo(void) {}
"""
)
.lstrip()
.replace('#headers', headers)
)
c_file.write_text(payload)
return c_file
def test_set_include_dirs(c_file):
"""
Extensions should build even if set_include_dirs is invoked.
In particular, compiler-specific paths should not be overridden.
"""
compiler = ccompiler.new_compiler()
python = sysconfig.get_paths()['include']
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
# do it again, setting include dirs after any initialization
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
| Extend the test to compile a second time after setting include dirs again. | Extend the test to compile a second time after setting include dirs again.
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | import os
import sys
import platform
import textwrap
import sysconfig
import pytest
from distutils import ccompiler
def _make_strs(paths):
"""
Convert paths to strings for legacy compatibility.
"""
if sys.version_info > (3, 8) and platform.system() != "Windows":
return paths
return list(map(os.fspath, paths))
@pytest.fixture
def c_file(tmp_path):
c_file = tmp_path / 'foo.c'
gen_headers = ('Python.h',)
is_windows = platform.system() == "Windows"
plat_headers = ('windows.h',) * is_windows
all_headers = gen_headers + plat_headers
headers = '\n'.join(f'#include <{header}>\n' for header in all_headers)
payload = (
textwrap.dedent(
"""
#headers
void PyInit_foo(void) {}
"""
)
.lstrip()
.replace('#headers', headers)
)
c_file.write_text(payload)
return c_file
def test_set_include_dirs(c_file):
"""
Extensions should build even if set_include_dirs is invoked.
In particular, compiler-specific paths should not be overridden.
"""
compiler = ccompiler.new_compiler()
python = sysconfig.get_paths()['include']
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
+ # do it again, setting include dirs after any initialization
+ compiler.set_include_dirs([python])
+ compiler.compile(_make_strs([c_file]))
+ | Extend the test to compile a second time after setting include dirs again. | ## Code Before:
import os
import sys
import platform
import textwrap
import sysconfig
import pytest
from distutils import ccompiler
def _make_strs(paths):
"""
Convert paths to strings for legacy compatibility.
"""
if sys.version_info > (3, 8) and platform.system() != "Windows":
return paths
return list(map(os.fspath, paths))
@pytest.fixture
def c_file(tmp_path):
c_file = tmp_path / 'foo.c'
gen_headers = ('Python.h',)
is_windows = platform.system() == "Windows"
plat_headers = ('windows.h',) * is_windows
all_headers = gen_headers + plat_headers
headers = '\n'.join(f'#include <{header}>\n' for header in all_headers)
payload = (
textwrap.dedent(
"""
#headers
void PyInit_foo(void) {}
"""
)
.lstrip()
.replace('#headers', headers)
)
c_file.write_text(payload)
return c_file
def test_set_include_dirs(c_file):
"""
Extensions should build even if set_include_dirs is invoked.
In particular, compiler-specific paths should not be overridden.
"""
compiler = ccompiler.new_compiler()
python = sysconfig.get_paths()['include']
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
## Instruction:
Extend the test to compile a second time after setting include dirs again.
## Code After:
import os
import sys
import platform
import textwrap
import sysconfig
import pytest
from distutils import ccompiler
def _make_strs(paths):
"""
Convert paths to strings for legacy compatibility.
"""
if sys.version_info > (3, 8) and platform.system() != "Windows":
return paths
return list(map(os.fspath, paths))
@pytest.fixture
def c_file(tmp_path):
c_file = tmp_path / 'foo.c'
gen_headers = ('Python.h',)
is_windows = platform.system() == "Windows"
plat_headers = ('windows.h',) * is_windows
all_headers = gen_headers + plat_headers
headers = '\n'.join(f'#include <{header}>\n' for header in all_headers)
payload = (
textwrap.dedent(
"""
#headers
void PyInit_foo(void) {}
"""
)
.lstrip()
.replace('#headers', headers)
)
c_file.write_text(payload)
return c_file
def test_set_include_dirs(c_file):
"""
Extensions should build even if set_include_dirs is invoked.
In particular, compiler-specific paths should not be overridden.
"""
compiler = ccompiler.new_compiler()
python = sysconfig.get_paths()['include']
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
# do it again, setting include dirs after any initialization
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
| // ... existing code ...
compiler.compile(_make_strs([c_file]))
# do it again, setting include dirs after any initialization
compiler.set_include_dirs([python])
compiler.compile(_make_strs([c_file]))
// ... rest of the code ... |
c1dff6850a0d39c39b0c337f4f5473efb77fc075 | tests/utils/test_forms.py | tests/utils/test_forms.py | import unittest
from app import create_app, db
from app.utils.forms import RedirectForm
class TestRedirectForm(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.redirect_form = RedirectForm()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(self.redirect_form.is_safe_url('http://externalsite.com'))
self.assertTrue(self.redirect_form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
self.assertTrue(self.redirect_form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
self.assertIsNone(self.redirect_form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
self.assertEquals(self.redirect_form.get_redirect_target(), 'safe_internal_link')
| import unittest
from app import create_app, db
from app.utils.forms import RedirectForm
class FormTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class TestRedirectForm(FormTestCase):
def setUp(self):
super().setUp()
self.form = RedirectForm()
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(self.form.is_safe_url('http://externalsite.com'))
self.assertTrue(self.form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
self.assertTrue(self.form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
self.assertIsNone(self.form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
self.assertEquals(self.form.get_redirect_target(), 'safe_internal_link')
| Move setUp and tearDown methods into general FormTestCase class | Move setUp and tearDown methods into general FormTestCase class
| Python | mit | Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger | import unittest
from app import create_app, db
from app.utils.forms import RedirectForm
- class TestRedirectForm(unittest.TestCase):
+ class FormTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
- self.redirect_form = RedirectForm()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
+
+ class TestRedirectForm(FormTestCase):
+ def setUp(self):
+ super().setUp()
+ self.form = RedirectForm()
+
def test_is_safe_url(self):
with self.app.test_request_context():
- self.assertFalse(self.redirect_form.is_safe_url('http://externalsite.com'))
+ self.assertFalse(self.form.is_safe_url('http://externalsite.com'))
- self.assertTrue(self.redirect_form.is_safe_url('http://' + self.app.config[
+ self.assertTrue(self.form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
- self.assertTrue(self.redirect_form.is_safe_url('safe_internal_link'))
+ self.assertTrue(self.form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
- self.assertIsNone(self.redirect_form.get_redirect_target())
+ self.assertIsNone(self.form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
- self.assertEquals(self.redirect_form.get_redirect_target(), 'safe_internal_link')
+ self.assertEquals(self.form.get_redirect_target(), 'safe_internal_link')
| Move setUp and tearDown methods into general FormTestCase class | ## Code Before:
import unittest
from app import create_app, db
from app.utils.forms import RedirectForm
class TestRedirectForm(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.redirect_form = RedirectForm()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(self.redirect_form.is_safe_url('http://externalsite.com'))
self.assertTrue(self.redirect_form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
self.assertTrue(self.redirect_form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
self.assertIsNone(self.redirect_form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
self.assertEquals(self.redirect_form.get_redirect_target(), 'safe_internal_link')
## Instruction:
Move setUp and tearDown methods into general FormTestCase class
## Code After:
import unittest
from app import create_app, db
from app.utils.forms import RedirectForm
class FormTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class TestRedirectForm(FormTestCase):
def setUp(self):
super().setUp()
self.form = RedirectForm()
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(self.form.is_safe_url('http://externalsite.com'))
self.assertTrue(self.form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
self.assertTrue(self.form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
self.assertIsNone(self.form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
self.assertEquals(self.form.get_redirect_target(), 'safe_internal_link')
| # ... existing code ...
class FormTestCase(unittest.TestCase):
def setUp(self):
# ... modified code ...
self.app_ctx = self.app.app_context()
self.app_ctx.push()
...
class TestRedirectForm(FormTestCase):
def setUp(self):
super().setUp()
self.form = RedirectForm()
def test_is_safe_url(self):
...
with self.app.test_request_context():
self.assertFalse(self.form.is_safe_url('http://externalsite.com'))
self.assertTrue(self.form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
self.assertTrue(self.form.is_safe_url('safe_internal_link'))
...
with self.app.test_request_context('/?next=http://externalsite.com'):
self.assertIsNone(self.form.get_redirect_target())
...
with self.app.test_request_context('/?next=safe_internal_link'):
self.assertEquals(self.form.get_redirect_target(), 'safe_internal_link')
# ... rest of the code ... |
5d652eacf793dc3aa1873279708f88e16e1c0dfd | eloqua/endpoints_v2.py | eloqua/endpoints_v2.py |
mapping_table = {
'content_type': 'application/json',
'path_prefix': '/API/REST/2.0',
# Campaigns
'get_campaign': {
'method': 'GET',
'path': '/assets/campaign/{{campaign_id}}',
'valid_params': ['depth']
},
'list_campaigns': {
'method': 'GET',
'path': '/assets/campaigns',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
'create_campaign': {
'method': 'POST',
'path': '/assets/campaign',
'status': 201
},
# Campaign folders - UNDOCUMENTED
'get_campaign_folder': {
'method': 'GET',
'path': '/assets/campaign/folder/{{campaign_folder_id}}',
'valid_params': ['depth']
},
'list_campaign_folders': {
'method': 'GET',
'path': '/assets/campaign/folders',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
}
|
mapping_table = {
'content_type': 'application/json',
'path_prefix': '/API/REST/2.0',
# Campaigns
'get_campaign': {
'method': 'GET',
'path': '/assets/campaign/{{campaign_id}}',
'valid_params': ['depth']
},
'list_campaigns': {
'method': 'GET',
'path': '/assets/campaigns',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
'create_campaign': {
'method': 'POST',
'path': '/assets/campaign',
'status': 201
},
'activate_campaign': {
'method': 'POST',
'path': '/assets/campaign/active/{{campaign_id}}',
'valid_params': ['activateNow','scheduledFor','runAsUserId']
},
# Campaign folders - UNDOCUMENTED
'get_campaign_folder': {
'method': 'GET',
'path': '/assets/campaign/folder/{{campaign_folder_id}}',
'valid_params': ['depth']
},
'list_campaign_folders': {
'method': 'GET',
'path': '/assets/campaign/folders',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
}
| Add operation to activate campaign. | Add operation to activate campaign.
| Python | mit | alexcchan/eloqua |
mapping_table = {
'content_type': 'application/json',
'path_prefix': '/API/REST/2.0',
# Campaigns
'get_campaign': {
'method': 'GET',
'path': '/assets/campaign/{{campaign_id}}',
'valid_params': ['depth']
},
'list_campaigns': {
'method': 'GET',
'path': '/assets/campaigns',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
'create_campaign': {
'method': 'POST',
'path': '/assets/campaign',
'status': 201
},
+ 'activate_campaign': {
+ 'method': 'POST',
+ 'path': '/assets/campaign/active/{{campaign_id}}',
+ 'valid_params': ['activateNow','scheduledFor','runAsUserId']
+ },
# Campaign folders - UNDOCUMENTED
'get_campaign_folder': {
'method': 'GET',
'path': '/assets/campaign/folder/{{campaign_folder_id}}',
'valid_params': ['depth']
},
'list_campaign_folders': {
'method': 'GET',
'path': '/assets/campaign/folders',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
}
| Add operation to activate campaign. | ## Code Before:
mapping_table = {
'content_type': 'application/json',
'path_prefix': '/API/REST/2.0',
# Campaigns
'get_campaign': {
'method': 'GET',
'path': '/assets/campaign/{{campaign_id}}',
'valid_params': ['depth']
},
'list_campaigns': {
'method': 'GET',
'path': '/assets/campaigns',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
'create_campaign': {
'method': 'POST',
'path': '/assets/campaign',
'status': 201
},
# Campaign folders - UNDOCUMENTED
'get_campaign_folder': {
'method': 'GET',
'path': '/assets/campaign/folder/{{campaign_folder_id}}',
'valid_params': ['depth']
},
'list_campaign_folders': {
'method': 'GET',
'path': '/assets/campaign/folders',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
}
## Instruction:
Add operation to activate campaign.
## Code After:
mapping_table = {
'content_type': 'application/json',
'path_prefix': '/API/REST/2.0',
# Campaigns
'get_campaign': {
'method': 'GET',
'path': '/assets/campaign/{{campaign_id}}',
'valid_params': ['depth']
},
'list_campaigns': {
'method': 'GET',
'path': '/assets/campaigns',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
'create_campaign': {
'method': 'POST',
'path': '/assets/campaign',
'status': 201
},
'activate_campaign': {
'method': 'POST',
'path': '/assets/campaign/active/{{campaign_id}}',
'valid_params': ['activateNow','scheduledFor','runAsUserId']
},
# Campaign folders - UNDOCUMENTED
'get_campaign_folder': {
'method': 'GET',
'path': '/assets/campaign/folder/{{campaign_folder_id}}',
'valid_params': ['depth']
},
'list_campaign_folders': {
'method': 'GET',
'path': '/assets/campaign/folders',
'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt']
},
}
| # ... existing code ...
},
'activate_campaign': {
'method': 'POST',
'path': '/assets/campaign/active/{{campaign_id}}',
'valid_params': ['activateNow','scheduledFor','runAsUserId']
},
# ... rest of the code ... |
96a08a9c7b11ce96de1c2034efcc19622c4eb419 | drillion/ship_keys.py | drillion/ship_keys.py | from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.LEFT], right=[key.D, key.RIGHT],
thrust=[key.W, key.UP], fire=[key.S, key.DOWN])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.LEFT], right=[key.RIGHT],
thrust=[key.UP], fire=[key.DOWN])
| from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
thrust=[key.W, key.I], fire=[key.S, key.K])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
fire=[key.K])
| Change second ship controls to IJKL | Change second ship controls to IJKL
| Python | mit | elemel/drillion | from pyglet.window import key
- PLAYER_SHIP_KEYS = dict(left=[key.A, key.LEFT], right=[key.D, key.RIGHT],
+ PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
- thrust=[key.W, key.UP], fire=[key.S, key.DOWN])
+ thrust=[key.W, key.I], fire=[key.S, key.K])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
- PLAYER_2_SHIP_KEYS = dict(left=[key.LEFT], right=[key.RIGHT],
+ PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
- thrust=[key.UP], fire=[key.DOWN])
+ fire=[key.K])
| Change second ship controls to IJKL | ## Code Before:
from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.LEFT], right=[key.D, key.RIGHT],
thrust=[key.W, key.UP], fire=[key.S, key.DOWN])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.LEFT], right=[key.RIGHT],
thrust=[key.UP], fire=[key.DOWN])
## Instruction:
Change second ship controls to IJKL
## Code After:
from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
thrust=[key.W, key.I], fire=[key.S, key.K])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
fire=[key.K])
| # ... existing code ...
PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
thrust=[key.W, key.I], fire=[key.S, key.K])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
# ... modified code ...
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
fire=[key.K])
# ... rest of the code ... |
938725a3693ee885a761e5ba07e75d2b94d78661 | pytask/profile/urls.py | pytask/profile/urls.py | from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notf/browse/$', 'browse_notifications',
name='edit_profile'),
url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
name='view_notification'),
url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
name='delete_notification'),
url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
| from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
| Fix styling issue in URLConf. | Fix styling issue in URLConf.
| Python | agpl-3.0 | madhusudancs/pytask,madhusudancs/pytask,madhusudancs/pytask | from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
- url(r'^notf/browse/$', 'browse_notifications',
+ url(r'^notification/browse/$', 'browse_notifications',
- name='edit_profile'),
- url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
- name='view_notification'),
- url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
- name='delete_notification'),
- url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
- name='unread_notification'),
+ name='browse_notifications'),
+ url(r'^notification/view/(?P<notification_id>\d+)$',
+ 'view_notification', name='view_notification'),
+ url(r'^notification/delete/(?P<notification_id>\d+)$',
+ 'delete_notification', name='delete_notification'),
+ url(r'^notification/unread/(?P<notification_id>\d+)$',
+ 'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
| Fix styling issue in URLConf. | ## Code Before:
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notf/browse/$', 'browse_notifications',
name='edit_profile'),
url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
name='view_notification'),
url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
name='delete_notification'),
url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
## Instruction:
Fix styling issue in URLConf.
## Code After:
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
| ...
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
... |
28e67e04a88b0195184bf43f013c11ea7f320c4f | conveyor/processor.py | conveyor/processor.py | from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
class BulkProcessor(BaseProcessor):
def process(self):
pass
| from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
class BulkProcessor(BaseProcessor):
def process(self):
pass
| Add a method for getting a list of releases to fetch | Add a method for getting a list of releases to fetch
| Python | bsd-2-clause | crateio/carrier | from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
+ def get_releases(self, name, version=None):
+ if version is None:
+ return set(self.client.package_releases(name, True))
+ else:
+ return set([version])
+
class BulkProcessor(BaseProcessor):
def process(self):
pass
| Add a method for getting a list of releases to fetch | ## Code Before:
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
class BulkProcessor(BaseProcessor):
def process(self):
pass
## Instruction:
Add a method for getting a list of releases to fetch
## Code After:
from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
class BulkProcessor(BaseProcessor):
def process(self):
pass
| ...
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
... |
c34817c2740e860493692b630a11fdb7acab76aa | tests/test_simple_features.py | tests/test_simple_features.py | from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(POINTS))
features = wordgraph.describe(datapoints)
assert "" in features
| from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
| Test case for monotonically decreasing graphs | Test case for monotonically decreasing graphs
Generate time series data for values that decrease monotonically over
time.
| Python | apache-2.0 | tleeuwenburg/wordgraph,tleeuwenburg/wordgraph | from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
- datapoints = time_values(float(i) for i in range(POINTS))
+ datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
+ def test_monotonic_down_per_second():
+ datapoints = time_values(10.0 - i for i in range(10))
+ features = wordgraph.describe(datapoints)
+ assert "" in features
+ | Test case for monotonically decreasing graphs | ## Code Before:
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(POINTS))
features = wordgraph.describe(datapoints)
assert "" in features
## Instruction:
Test case for monotonically decreasing graphs
## Code After:
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
| // ... existing code ...
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
// ... modified code ...
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
// ... rest of the code ... |
18d973d71255d389369cc4450f721512a13ad6cb | src/impl/geocoder.py | src/impl/geocoder.py | import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
return loc.address
| from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
| Add in-memory geohash cache for reverse geocoding. | Add in-memory geohash cache for reverse geocoding.
| Python | mit | cbigler/jackrabbit-googlev3-geocoder | + from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
- def __init__(self, api_key=None, client_id=None, secret_key=None):
+ def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
+ self._reverse_cache_geohash_length = reverse_cache_geohash
+ self._reverse_cache = {}
+
+ def _using_cache(self):
+ return 0 < self._reverse_cache_geohash_length <= 12
+
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
+ addr = None
- self._reverse_limiter.wait()
- loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
- return loc.address
+ # try to get the address from the local cache, if we're using it
+ if self._using_cache():
+ ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
+ addr = self._reverse_cache.get(ghash)
+
+ # if we didn't get the address from the cache, or we're not using the cache
+ # then get it from Google
+ if not addr:
+ self._reverse_limiter.wait()
+ loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
+ addr = loc.address
+
+ # if we're using the cache, save the value we just got back
+ if addr and self._using_cache():
+ self._reverse_cache[ghash] = addr
+
+ return addr
+ | Add in-memory geohash cache for reverse geocoding. | ## Code Before:
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
return loc.address
## Instruction:
Add in-memory geohash cache for reverse geocoding.
## Code After:
from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
| # ... existing code ...
from Geohash import geohash
import geopy
# ... modified code ...
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
...
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
...
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
# ... rest of the code ... |
46df020f5f349ac02c509e334ffd7e1f5970915b | detectem/exceptions.py | detectem/exceptions.py | class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(msg)
| class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
| Fix in tests for exception messages | Fix in tests for exception messages
| Python | mit | spectresearch/detectem | class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
- super().__init__(msg)
+ super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
- super().__init__(msg)
+ super().__init__(self.msg)
| Fix in tests for exception messages | ## Code Before:
class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(msg)
## Instruction:
Fix in tests for exception messages
## Code After:
class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
| // ... existing code ...
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
// ... modified code ...
self.msg = msg
super().__init__(self.msg)
// ... rest of the code ... |
b624552af638652147ca8b5e49ca109a4723dca1 | MoMMI/Modules/development.py | MoMMI/Modules/development.py | from discord import Message
from typing import re as typing_re
from MoMMI.commands import command
from MoMMI.master import master
from MoMMI.server import MChannel
@command("reload", "reload", roles=["owner"])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
await master.reload_modules()
@command("modules", "modules", roles=["owner"])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
msg = "```"
for module in channel.server.master.modules.values():
msg += f"{module.name}:\n"
for handler in module.handlers.values():
msg += f"* {handler.name} ({type(handler)})\n"
msg += "```"
await channel.send(msg)
| from discord import Message
from typing import re as typing_re
from MoMMI.commands import command
from MoMMI.master import master
from MoMMI.server import MChannel
from MoMMI.role import MRoleType
@command("reload", "reload", roles=[MRoleType.OWNER])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
await master.reload_modules()
@command("modules", "modules", roles=[MRoleType.OWNER])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
msg = "```"
for module in channel.server.master.modules.values():
msg += f"{module.name}:\n"
for handler in module.handlers.values():
msg += f"* {handler.name} ({type(handler)})\n"
msg += "```"
await channel.send(msg)
| Fix dev commands using string roles. | Fix dev commands using string roles.
| Python | mit | PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI | from discord import Message
from typing import re as typing_re
from MoMMI.commands import command
from MoMMI.master import master
from MoMMI.server import MChannel
+ from MoMMI.role import MRoleType
- @command("reload", "reload", roles=["owner"])
+ @command("reload", "reload", roles=[MRoleType.OWNER])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
await master.reload_modules()
- @command("modules", "modules", roles=["owner"])
+ @command("modules", "modules", roles=[MRoleType.OWNER])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
msg = "```"
for module in channel.server.master.modules.values():
msg += f"{module.name}:\n"
for handler in module.handlers.values():
msg += f"* {handler.name} ({type(handler)})\n"
msg += "```"
await channel.send(msg)
| Fix dev commands using string roles. | ## Code Before:
from discord import Message
from typing import re as typing_re
from MoMMI.commands import command
from MoMMI.master import master
from MoMMI.server import MChannel
@command("reload", "reload", roles=["owner"])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
await master.reload_modules()
@command("modules", "modules", roles=["owner"])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
msg = "```"
for module in channel.server.master.modules.values():
msg += f"{module.name}:\n"
for handler in module.handlers.values():
msg += f"* {handler.name} ({type(handler)})\n"
msg += "```"
await channel.send(msg)
## Instruction:
Fix dev commands using string roles.
## Code After:
from discord import Message
from typing import re as typing_re
from MoMMI.commands import command
from MoMMI.master import master
from MoMMI.server import MChannel
from MoMMI.role import MRoleType
@command("reload", "reload", roles=[MRoleType.OWNER])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
await master.reload_modules()
@command("modules", "modules", roles=[MRoleType.OWNER])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
msg = "```"
for module in channel.server.master.modules.values():
msg += f"{module.name}:\n"
for handler in module.handlers.values():
msg += f"* {handler.name} ({type(handler)})\n"
msg += "```"
await channel.send(msg)
| # ... existing code ...
from MoMMI.server import MChannel
from MoMMI.role import MRoleType
@command("reload", "reload", roles=[MRoleType.OWNER])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
# ... modified code ...
@command("modules", "modules", roles=[MRoleType.OWNER])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
# ... rest of the code ... |
39d7ec0fe9fdbdd152dfcc2d4280b784f6315886 | stardate/urls/index_urls.py | stardate/urls/index_urls.py | from django.conf.urls import include, url
from django.views import generic
from stardate.models import Blog
from stardate.views import (
BlogCreate,
select_backend,
process_webhook,
)
urlpatterns = [
url(r'^new/$', select_backend, name='blog-new'),
url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'),
url(r'^providers/$', select_backend, name='provider-select'),
url(r'^webhook/$', process_webhook, name='webhook'),
url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'),
]
| from django.conf.urls import include, url
from django.views import generic
from stardate.models import Blog
from stardate.views import (
BlogCreate,
select_backend,
process_webhook,
)
urlpatterns = [
url(r'^new/$', select_backend, name='blog-new'),
url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'),
url(r'^providers/$', select_backend, name='provider-select'),
url(r'^webhook/$', process_webhook, name='webhook'),
url(r'^(?P<blog_slug>[-\w]+)/', include('stardate.urls.blog_urls')),
url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'),
]
| Revert "remove blog urls from index urls" | Revert "remove blog urls from index urls"
This reverts commit f8d5541a8e5de124dcec62a32bd19a8226869622.
| Python | bsd-3-clause | blturner/django-stardate,blturner/django-stardate | from django.conf.urls import include, url
from django.views import generic
from stardate.models import Blog
from stardate.views import (
BlogCreate,
select_backend,
process_webhook,
)
urlpatterns = [
url(r'^new/$', select_backend, name='blog-new'),
url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'),
url(r'^providers/$', select_backend, name='provider-select'),
url(r'^webhook/$', process_webhook, name='webhook'),
+ url(r'^(?P<blog_slug>[-\w]+)/', include('stardate.urls.blog_urls')),
url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'),
]
| Revert "remove blog urls from index urls" | ## Code Before:
from django.conf.urls import include, url
from django.views import generic
from stardate.models import Blog
from stardate.views import (
BlogCreate,
select_backend,
process_webhook,
)
urlpatterns = [
url(r'^new/$', select_backend, name='blog-new'),
url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'),
url(r'^providers/$', select_backend, name='provider-select'),
url(r'^webhook/$', process_webhook, name='webhook'),
url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'),
]
## Instruction:
Revert "remove blog urls from index urls"
## Code After:
from django.conf.urls import include, url
from django.views import generic
from stardate.models import Blog
from stardate.views import (
BlogCreate,
select_backend,
process_webhook,
)
urlpatterns = [
url(r'^new/$', select_backend, name='blog-new'),
url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'),
url(r'^providers/$', select_backend, name='provider-select'),
url(r'^webhook/$', process_webhook, name='webhook'),
url(r'^(?P<blog_slug>[-\w]+)/', include('stardate.urls.blog_urls')),
url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'),
]
| ...
url(r'^webhook/$', process_webhook, name='webhook'),
url(r'^(?P<blog_slug>[-\w]+)/', include('stardate.urls.blog_urls')),
url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'),
... |
d01a5cdf950b7421703e2a018ee0306935e79555 | sugar/activity/__init__.py | sugar/activity/__init__.py | import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
settings.set_string_property('gtk-font-name', 'Sans 14', '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
| import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
| Move font size in the theme | Move font size in the theme
| Python | lgpl-2.1 | Daksh/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,quozl/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,godiard/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit,sugarlabs/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,tchx84/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,godiard/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit | import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
- settings.set_string_property('gtk-font-name', 'Sans 14', '')
-
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
| Move font size in the theme | ## Code Before:
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
settings.set_string_property('gtk-font-name', 'Sans 14', '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
## Instruction:
Move font size in the theme
## Code After:
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
| // ... existing code ...
def get_default_type(activity_type):
// ... rest of the code ... |
2c449a27be2e9e9ec57cc6f8e31825064195290d | modules/weather_module/weather_module.py | modules/weather_module/weather_module.py | import juliet_module
from pygame import Rect
from time import time
from os import getcwd
class weather_module(juliet_module.module):
mod_name = "weather_module"
__last_update = time()
__api = None
def __init__(self, _id, _keyfile):
print("Initializing Weather Module")
self.mod_id = _id
with open(_keyfile, 'r') as f:
self.__api = f.read()
def draw(self, surf):
"Takes a surface object and blits its data onto it"
print("Draw call of Weather Module")
def update(self):
"Update this module's internal state (do things like time updates, get weather, etc."
# print("Update call of Weather Module")
def new_module(_id = -1, _keyfile = 'modules/weather_module/api.key'):
return weather_module(_id, _keyfile)
| import juliet_module
from pygame import Rect
from time import time
import forecastio
class weather_module(juliet_module.module):
mod_name = "weather_module"
__last_update = time()
__api = None
__forecast = None
def __init__(self, _id, _keyfile):
print("Initializing Weather Module")
self.mod_id = _id
with open(_keyfile, 'r') as f:
self.__api = f.read()[:-1]
lat = 40.7127
lng = 74.0059
forecastio.load_forecast(self.__api, lat, lng, units = "us", callback=self.request_callback)
def draw(self, surf):
"Takes a surface object and blits its data onto it"
print("Draw call of Weather Module")
def update(self):
"Update this module's internal state (do things like time updates, get weather, etc."
# print("Update call of Weather Module")
def request_callback(self, forecast):
self.__forecast = forecast
print(self.__forecast.daily().summary)
def new_module(_id = -1, _keyfile = 'modules/weather_module/api.key'):
return weather_module(_id, _keyfile)
| Add test forecast.io API call | Add test forecast.io API call
| Python | bsd-2-clause | halfbro/juliet | import juliet_module
from pygame import Rect
from time import time
+ import forecastio
-
- from os import getcwd
class weather_module(juliet_module.module):
mod_name = "weather_module"
__last_update = time()
__api = None
+ __forecast = None
def __init__(self, _id, _keyfile):
print("Initializing Weather Module")
self.mod_id = _id
with open(_keyfile, 'r') as f:
- self.__api = f.read()
+ self.__api = f.read()[:-1]
+
+ lat = 40.7127
+ lng = 74.0059
+ forecastio.load_forecast(self.__api, lat, lng, units = "us", callback=self.request_callback)
def draw(self, surf):
"Takes a surface object and blits its data onto it"
print("Draw call of Weather Module")
def update(self):
"Update this module's internal state (do things like time updates, get weather, etc."
# print("Update call of Weather Module")
+ def request_callback(self, forecast):
+ self.__forecast = forecast
+ print(self.__forecast.daily().summary)
+
def new_module(_id = -1, _keyfile = 'modules/weather_module/api.key'):
return weather_module(_id, _keyfile)
| Add test forecast.io API call | ## Code Before:
import juliet_module
from pygame import Rect
from time import time
from os import getcwd
class weather_module(juliet_module.module):
mod_name = "weather_module"
__last_update = time()
__api = None
def __init__(self, _id, _keyfile):
print("Initializing Weather Module")
self.mod_id = _id
with open(_keyfile, 'r') as f:
self.__api = f.read()
def draw(self, surf):
"Takes a surface object and blits its data onto it"
print("Draw call of Weather Module")
def update(self):
"Update this module's internal state (do things like time updates, get weather, etc."
# print("Update call of Weather Module")
def new_module(_id = -1, _keyfile = 'modules/weather_module/api.key'):
return weather_module(_id, _keyfile)
## Instruction:
Add test forecast.io API call
## Code After:
import juliet_module
from pygame import Rect
from time import time
import forecastio
class weather_module(juliet_module.module):
mod_name = "weather_module"
__last_update = time()
__api = None
__forecast = None
def __init__(self, _id, _keyfile):
print("Initializing Weather Module")
self.mod_id = _id
with open(_keyfile, 'r') as f:
self.__api = f.read()[:-1]
lat = 40.7127
lng = 74.0059
forecastio.load_forecast(self.__api, lat, lng, units = "us", callback=self.request_callback)
def draw(self, surf):
"Takes a surface object and blits its data onto it"
print("Draw call of Weather Module")
def update(self):
"Update this module's internal state (do things like time updates, get weather, etc."
# print("Update call of Weather Module")
def request_callback(self, forecast):
self.__forecast = forecast
print(self.__forecast.daily().summary)
def new_module(_id = -1, _keyfile = 'modules/weather_module/api.key'):
return weather_module(_id, _keyfile)
| ...
from time import time
import forecastio
...
__api = None
__forecast = None
...
with open(_keyfile, 'r') as f:
self.__api = f.read()[:-1]
lat = 40.7127
lng = 74.0059
forecastio.load_forecast(self.__api, lat, lng, units = "us", callback=self.request_callback)
...
def request_callback(self, forecast):
self.__forecast = forecast
print(self.__forecast.daily().summary)
def new_module(_id = -1, _keyfile = 'modules/weather_module/api.key'):
... |
a06c3845b2e827ff34bdd34844db39a74826f123 | meteocalc/mimicfloat.py | meteocalc/mimicfloat.py | import operator
def math_method(name, right=False):
def wrapper(self, other):
value = self.value
math_func = getattr(operator, name)
if right:
value, other = other, value
result = math_func(value, other)
return type(self)(result, units=self.units)
return wrapper
class MimicFloat(type):
overrride_methods = ('__add__', '__sub__', '__mul__', '__truediv__')
overrride_rmethods = ('__radd__', '__rsub__', '__rmul__', '__rtruediv__')
def __new__(cls, name, bases, namespace):
for method in cls.overrride_methods:
namespace[method] = math_method(method)
for rmethod in cls.overrride_rmethods:
method = rmethod.replace('__r', '__')
namespace[rmethod] = math_method(method, right=True)
return super(MimicFloat, cls).__new__(cls, name, bases, namespace)
| from functools import wraps
import operator
def math_method(name, right=False):
math_func = getattr(operator, name)
@wraps(math_func)
def wrapper(self, other):
value = self.value
if right:
value, other = other, value
result = math_func(value, other)
return type(self)(result, units=self.units)
return wrapper
class MimicFloat(type):
math_methods = ('__add__', '__sub__', '__mul__', '__truediv__')
math_rmethods = ('__radd__', '__rsub__', '__rmul__', '__rtruediv__')
def __new__(cls, name, bases, namespace):
for method in cls.math_methods:
namespace[method] = math_method(method)
for rmethod in cls.math_rmethods:
method = rmethod.replace('__r', '__')
namespace[rmethod] = math_method(method, right=True)
return super(MimicFloat, cls).__new__(cls, name, bases, namespace)
| Make math method wrapping nicer | Make math method wrapping nicer
| Python | mit | malexer/meteocalc | + from functools import wraps
import operator
def math_method(name, right=False):
+ math_func = getattr(operator, name)
+
+ @wraps(math_func)
def wrapper(self, other):
value = self.value
- math_func = getattr(operator, name)
if right:
value, other = other, value
result = math_func(value, other)
return type(self)(result, units=self.units)
return wrapper
class MimicFloat(type):
- overrride_methods = ('__add__', '__sub__', '__mul__', '__truediv__')
+ math_methods = ('__add__', '__sub__', '__mul__', '__truediv__')
- overrride_rmethods = ('__radd__', '__rsub__', '__rmul__', '__rtruediv__')
+ math_rmethods = ('__radd__', '__rsub__', '__rmul__', '__rtruediv__')
def __new__(cls, name, bases, namespace):
- for method in cls.overrride_methods:
+ for method in cls.math_methods:
namespace[method] = math_method(method)
- for rmethod in cls.overrride_rmethods:
+ for rmethod in cls.math_rmethods:
method = rmethod.replace('__r', '__')
namespace[rmethod] = math_method(method, right=True)
return super(MimicFloat, cls).__new__(cls, name, bases, namespace)
| Make math method wrapping nicer | ## Code Before:
import operator
def math_method(name, right=False):
def wrapper(self, other):
value = self.value
math_func = getattr(operator, name)
if right:
value, other = other, value
result = math_func(value, other)
return type(self)(result, units=self.units)
return wrapper
class MimicFloat(type):
overrride_methods = ('__add__', '__sub__', '__mul__', '__truediv__')
overrride_rmethods = ('__radd__', '__rsub__', '__rmul__', '__rtruediv__')
def __new__(cls, name, bases, namespace):
for method in cls.overrride_methods:
namespace[method] = math_method(method)
for rmethod in cls.overrride_rmethods:
method = rmethod.replace('__r', '__')
namespace[rmethod] = math_method(method, right=True)
return super(MimicFloat, cls).__new__(cls, name, bases, namespace)
## Instruction:
Make math method wrapping nicer
## Code After:
from functools import wraps
import operator
def math_method(name, right=False):
math_func = getattr(operator, name)
@wraps(math_func)
def wrapper(self, other):
value = self.value
if right:
value, other = other, value
result = math_func(value, other)
return type(self)(result, units=self.units)
return wrapper
class MimicFloat(type):
math_methods = ('__add__', '__sub__', '__mul__', '__truediv__')
math_rmethods = ('__radd__', '__rsub__', '__rmul__', '__rtruediv__')
def __new__(cls, name, bases, namespace):
for method in cls.math_methods:
namespace[method] = math_method(method)
for rmethod in cls.math_rmethods:
method = rmethod.replace('__r', '__')
namespace[rmethod] = math_method(method, right=True)
return super(MimicFloat, cls).__new__(cls, name, bases, namespace)
| // ... existing code ...
from functools import wraps
import operator
// ... modified code ...
def math_method(name, right=False):
math_func = getattr(operator, name)
@wraps(math_func)
def wrapper(self, other):
...
value = self.value
...
math_methods = ('__add__', '__sub__', '__mul__', '__truediv__')
math_rmethods = ('__radd__', '__rsub__', '__rmul__', '__rtruediv__')
...
def __new__(cls, name, bases, namespace):
for method in cls.math_methods:
namespace[method] = math_method(method)
...
for rmethod in cls.math_rmethods:
method = rmethod.replace('__r', '__')
// ... rest of the code ... |
46344032e016b51e9d34b2620b72e418533374e2 | hyper/http20/frame.py | hyper/http20/frame.py | class Frame(object):
"""
The base class for all HTTP/2.0 frames.
"""
# The flags defined on this type of frame.
defined_flags = []
# The type of the frame.
type = 0
def __init__(self, stream_id):
self.stream_id = stream_id
self.flags = set()
def parse_flags(self, flag_byte):
for flag, flag_bit in self.defined_flags:
if flag_byte & flag_bit:
self.flags.add(flag)
return self.flags
def serialize(self):
raise NotImplementedError()
def _get_len(self):
raise NotImplementedError()
| # A map of type byte to frame class.
FRAMES = {
0x00: DataFrame
}
class Frame(object):
"""
The base class for all HTTP/2.0 frames.
"""
# The flags defined on this type of frame.
defined_flags = []
# The type of the frame.
type = 0
def __init__(self, stream_id):
self.stream_id = stream_id
self.flags = set()
def parse_flags(self, flag_byte):
for flag, flag_bit in self.defined_flags:
if flag_byte & flag_bit:
self.flags.add(flag)
return self.flags
def serialize(self):
raise NotImplementedError()
def _get_len(self):
raise NotImplementedError()
| Define a mapping between byte and class. | Define a mapping between byte and class.
| Python | mit | Lukasa/hyper,fredthomsen/hyper,Lukasa/hyper,masaori335/hyper,lawnmowerlatte/hyper,jdecuyper/hyper,irvind/hyper,jdecuyper/hyper,lawnmowerlatte/hyper,irvind/hyper,masaori335/hyper,plucury/hyper,plucury/hyper,fredthomsen/hyper | + # A map of type byte to frame class.
+ FRAMES = {
+ 0x00: DataFrame
+ }
+
+
class Frame(object):
"""
The base class for all HTTP/2.0 frames.
"""
# The flags defined on this type of frame.
defined_flags = []
# The type of the frame.
type = 0
def __init__(self, stream_id):
self.stream_id = stream_id
self.flags = set()
def parse_flags(self, flag_byte):
for flag, flag_bit in self.defined_flags:
if flag_byte & flag_bit:
self.flags.add(flag)
return self.flags
def serialize(self):
raise NotImplementedError()
def _get_len(self):
raise NotImplementedError()
| Define a mapping between byte and class. | ## Code Before:
class Frame(object):
"""
The base class for all HTTP/2.0 frames.
"""
# The flags defined on this type of frame.
defined_flags = []
# The type of the frame.
type = 0
def __init__(self, stream_id):
self.stream_id = stream_id
self.flags = set()
def parse_flags(self, flag_byte):
for flag, flag_bit in self.defined_flags:
if flag_byte & flag_bit:
self.flags.add(flag)
return self.flags
def serialize(self):
raise NotImplementedError()
def _get_len(self):
raise NotImplementedError()
## Instruction:
Define a mapping between byte and class.
## Code After:
# A map of type byte to frame class.
FRAMES = {
0x00: DataFrame
}
class Frame(object):
"""
The base class for all HTTP/2.0 frames.
"""
# The flags defined on this type of frame.
defined_flags = []
# The type of the frame.
type = 0
def __init__(self, stream_id):
self.stream_id = stream_id
self.flags = set()
def parse_flags(self, flag_byte):
for flag, flag_bit in self.defined_flags:
if flag_byte & flag_bit:
self.flags.add(flag)
return self.flags
def serialize(self):
raise NotImplementedError()
def _get_len(self):
raise NotImplementedError()
| # ... existing code ...
# A map of type byte to frame class.
FRAMES = {
0x00: DataFrame
}
class Frame(object):
# ... rest of the code ... |
840643522e32484b1c44352dc095e7369a44ef7b | header_swap_axis.py | header_swap_axis.py |
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
|
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
| Deal with CASA's empty header keywords | Deal with CASA's empty header keywords
| Python | mit | e-koch/ewky_scripts,e-koch/ewky_scripts |
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
+ # CASA sometimes gives empty keys? ""
+ if len(key) == 0:
+ continue
+
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
| Deal with CASA's empty header keywords | ## Code Before:
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
## Instruction:
Deal with CASA's empty header keywords
## Code After:
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
| # ... existing code ...
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
# ... rest of the code ... |
0e779581be648ca80eea6b97f9963606d85659b9 | opensfm/commands/__init__.py | opensfm/commands/__init__.py |
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
]
|
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
import export_visualsfm
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
export_visualsfm,
]
| Add exporter to VisualSfM format | Add exporter to VisualSfM format
| Python | bsd-2-clause | BrookRoberts/OpenSfM,mapillary/OpenSfM,sunbingfengPI/OpenSFM_Test,BrookRoberts/OpenSfM,sunbingfengPI/OpenSFM_Test,sunbingfengPI/OpenSFM_Test,sunbingfengPI/OpenSFM_Test,oscarlorentzon/OpenSfM,BrookRoberts/OpenSfM,oscarlorentzon/OpenSfM,oscarlorentzon/OpenSfM,oscarlorentzon/OpenSfM,mapillary/OpenSfM,mapillary/OpenSfM,BrookRoberts/OpenSfM,BrookRoberts/OpenSfM,mapillary/OpenSfM,mapillary/OpenSfM,sunbingfengPI/OpenSFM_Test,oscarlorentzon/OpenSfM |
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
+ import export_visualsfm
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
+ export_visualsfm,
]
| Add exporter to VisualSfM format | ## Code Before:
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
]
## Instruction:
Add exporter to VisualSfM format
## Code After:
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
import export_visualsfm
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
export_visualsfm,
]
| # ... existing code ...
import export_openmvs
import export_visualsfm
# ... modified code ...
export_openmvs,
export_visualsfm,
]
# ... rest of the code ... |
cb2c937fa16590a7431f450c0fc79cc68dd9984c | readthedocs/cdn/purge.py | readthedocs/cdn/purge.py | import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE')
CDN_USERNAME = getattr(settings, 'CDN_USERNAME')
CDN_KEY = getattr(settings, 'CDN_KEY')
CDN_SECET = getattr(settings, 'CDN_SECET')
CDN_ID = getattr(settings, 'CDN_ID')
def purge(files):
log.error("CDN not configured, can't purge files")
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID:
if CDN_SERVICE == 'maxcdn':
from maxcdn import MaxCDN as cdn_service
api = cdn_service(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
| import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE')
CDN_USERNAME = getattr(settings, 'CDN_USERNAME')
CDN_KEY = getattr(settings, 'CDN_KEY')
CDN_SECET = getattr(settings, 'CDN_SECET')
CDN_ID = getattr(settings, 'CDN_ID')
def purge(files):
log.error("CDN not configured, can't purge files")
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID:
if CDN_SERVICE == 'maxcdn':
from maxcdn import MaxCDN
api = MaxCDN(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
| Clean up bad logic to make it slightly less bad | Clean up bad logic to make it slightly less bad
| Python | mit | sid-kap/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,titiushko/readthedocs.org,laplaceliu/readthedocs.org,hach-que/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,safwanrahman/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,stevepiercy/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,techtonik/readthedocs.org,hach-que/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,wanghaven/readthedocs.org,soulshake/readthedocs.org,emawind84/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,mhils/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,royalwang/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,davidfischer/readthedocs.org,clarkperkins/readthedocs.org,titiushko/readthedocs.org,techtonik/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,stevepiercy/readthedocs.org,GovReady/readthedocs.org,pombredanne/readthedocs.org,royalwang/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,sid-kap/readthedocs.org,GovReady/readthedocs.org,espdev/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,Tazer/readthedocs.org,VishvajitP/readthedocs.org,fujita-shintaro/readthedocs.org,tddv/readthedocs.org,stevepiercy/readthedocs.org,fujita-shintaro/readthedocs.org,royalwang/readthedocs.org,LukasBoersma/readthedocs.org,wijerasa/readthedocs.org,sid-kap/readthedocs.org,gjtorikian/readthedocs.org,emawind84/readthedocs.org,kenwang76/readthedocs.org,CedarLogic/readthedocs.org,wanghaven/readthedocs.org,tddv/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,davidfischer/readthedocs.org,LukasBoersma/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,singingwolfboy/readthedocs.org,istresearch/readthedocs.org,emawind84/readthedocs.org,SteveViss/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,attakei/readthedocs-oauth,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,VishvajitP/readthedocs.org,stevepiercy/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,kenshinthebattosai/readthedocs.org,hach-que/readthedocs.org,gjtorikian/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,wijerasa/readthedocs.org,mhils/readthedocs.org,gjtorikian/readthedocs.org,GovReady/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,rtfd/readthedocs.org,laplaceliu/readthedocs.org,soulshake/readthedocs.org,rtfd/readthedocs.org,soulshake/readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,espdev/readthedocs.org,davidfischer/readthedocs.org,kenshinthebattosai/readthedocs.org,rtfd/readthedocs.org,GovReady/readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,espdev/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,kenshinthebattosai/readthedocs.org,SteveViss/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,techtonik/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,atsuyim/readthedocs.org | import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE')
CDN_USERNAME = getattr(settings, 'CDN_USERNAME')
CDN_KEY = getattr(settings, 'CDN_KEY')
CDN_SECET = getattr(settings, 'CDN_SECET')
CDN_ID = getattr(settings, 'CDN_ID')
def purge(files):
log.error("CDN not configured, can't purge files")
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID:
if CDN_SERVICE == 'maxcdn':
- from maxcdn import MaxCDN as cdn_service
+ from maxcdn import MaxCDN
- api = cdn_service(CDN_USERNAME, CDN_KEY, CDN_SECET)
+ api = MaxCDN(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
| Clean up bad logic to make it slightly less bad | ## Code Before:
import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE')
CDN_USERNAME = getattr(settings, 'CDN_USERNAME')
CDN_KEY = getattr(settings, 'CDN_KEY')
CDN_SECET = getattr(settings, 'CDN_SECET')
CDN_ID = getattr(settings, 'CDN_ID')
def purge(files):
log.error("CDN not configured, can't purge files")
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID:
if CDN_SERVICE == 'maxcdn':
from maxcdn import MaxCDN as cdn_service
api = cdn_service(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
## Instruction:
Clean up bad logic to make it slightly less bad
## Code After:
import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE')
CDN_USERNAME = getattr(settings, 'CDN_USERNAME')
CDN_KEY = getattr(settings, 'CDN_KEY')
CDN_SECET = getattr(settings, 'CDN_SECET')
CDN_ID = getattr(settings, 'CDN_ID')
def purge(files):
log.error("CDN not configured, can't purge files")
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID:
if CDN_SERVICE == 'maxcdn':
from maxcdn import MaxCDN
api = MaxCDN(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
| # ... existing code ...
if CDN_SERVICE == 'maxcdn':
from maxcdn import MaxCDN
api = MaxCDN(CDN_USERNAME, CDN_KEY, CDN_SECET)
# ... rest of the code ... |
8a544ac2db71d4041c77fdb0ddfe27b84b565bb5 | salt/utils/saltminionservice.py | salt/utils/saltminionservice.py | from salt.utils.winservice import Service, instart
import salt
# Import third party libs
import win32serviceutil
import win32service
import winerror
import win32api
# Import python libs
import sys
class MinionService(Service):
def start(self):
self.runflag = True
self.log("Starting the Salt Minion")
minion = salt.Minion()
minion.start()
while self.runflag:
pass
#self.sleep(10)
#self.log("I'm alive ...")
def stop(self):
self.runflag = False
self.log("Shutting down the Salt Minion")
def console_event_handler(event):
if event == 5:
# Do nothing on CTRL_LOGOFF_EVENT
return True
return False
def _main():
win32api.SetConsoleCtrlHandler(console_event_handler, 1)
servicename = 'salt-minion'
try:
status = win32serviceutil.QueryServiceStatus(servicename)
except win32service.error as details:
if details[0] == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
instart(MinionService, servicename, 'Salt Minion')
sys.exit(0)
if status[1] == win32service.SERVICE_RUNNING:
win32serviceutil.StopServiceWithDeps(servicename)
win32serviceutil.StartService(servicename)
else:
win32serviceutil.StartService(servicename)
if __name__ == '__main__':
_main()
| from salt.utils.winservice import Service, instart
import salt
# Import third party libs
import win32serviceutil
import win32service
import winerror
# Import python libs
import sys
class MinionService(Service):
def start(self):
self.runflag = True
self.log("Starting the Salt Minion")
minion = salt.Minion()
minion.start()
while self.runflag:
pass
#self.sleep(10)
#self.log("I'm alive ...")
def stop(self):
self.runflag = False
self.log("Shutting down the Salt Minion")
def _main():
servicename = 'salt-minion'
try:
status = win32serviceutil.QueryServiceStatus(servicename)
except win32service.error as details:
if details[0] == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
instart(MinionService, servicename, 'Salt Minion')
sys.exit(0)
if status[1] == win32service.SERVICE_RUNNING:
win32serviceutil.StopServiceWithDeps(servicename)
win32serviceutil.StartService(servicename)
else:
win32serviceutil.StartService(servicename)
if __name__ == '__main__':
_main()
| Revert "Catch and ignore CTRL_LOGOFF_EVENT when run as a windows service" | Revert "Catch and ignore CTRL_LOGOFF_EVENT when run as a windows service"
This reverts commit a7ddf81b37b578b1448f83b0efb4f7116de0c3fb.
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | from salt.utils.winservice import Service, instart
import salt
# Import third party libs
import win32serviceutil
import win32service
import winerror
- import win32api
# Import python libs
import sys
class MinionService(Service):
def start(self):
self.runflag = True
self.log("Starting the Salt Minion")
minion = salt.Minion()
minion.start()
while self.runflag:
pass
#self.sleep(10)
#self.log("I'm alive ...")
def stop(self):
self.runflag = False
self.log("Shutting down the Salt Minion")
- def console_event_handler(event):
- if event == 5:
- # Do nothing on CTRL_LOGOFF_EVENT
- return True
- return False
def _main():
- win32api.SetConsoleCtrlHandler(console_event_handler, 1)
servicename = 'salt-minion'
try:
status = win32serviceutil.QueryServiceStatus(servicename)
except win32service.error as details:
if details[0] == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
instart(MinionService, servicename, 'Salt Minion')
sys.exit(0)
if status[1] == win32service.SERVICE_RUNNING:
win32serviceutil.StopServiceWithDeps(servicename)
win32serviceutil.StartService(servicename)
else:
win32serviceutil.StartService(servicename)
if __name__ == '__main__':
_main()
| Revert "Catch and ignore CTRL_LOGOFF_EVENT when run as a windows service" | ## Code Before:
from salt.utils.winservice import Service, instart
import salt
# Import third party libs
import win32serviceutil
import win32service
import winerror
import win32api
# Import python libs
import sys
class MinionService(Service):
def start(self):
self.runflag = True
self.log("Starting the Salt Minion")
minion = salt.Minion()
minion.start()
while self.runflag:
pass
#self.sleep(10)
#self.log("I'm alive ...")
def stop(self):
self.runflag = False
self.log("Shutting down the Salt Minion")
def console_event_handler(event):
if event == 5:
# Do nothing on CTRL_LOGOFF_EVENT
return True
return False
def _main():
win32api.SetConsoleCtrlHandler(console_event_handler, 1)
servicename = 'salt-minion'
try:
status = win32serviceutil.QueryServiceStatus(servicename)
except win32service.error as details:
if details[0] == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
instart(MinionService, servicename, 'Salt Minion')
sys.exit(0)
if status[1] == win32service.SERVICE_RUNNING:
win32serviceutil.StopServiceWithDeps(servicename)
win32serviceutil.StartService(servicename)
else:
win32serviceutil.StartService(servicename)
if __name__ == '__main__':
_main()
## Instruction:
Revert "Catch and ignore CTRL_LOGOFF_EVENT when run as a windows service"
## Code After:
from salt.utils.winservice import Service, instart
import salt
# Import third party libs
import win32serviceutil
import win32service
import winerror
# Import python libs
import sys
class MinionService(Service):
def start(self):
self.runflag = True
self.log("Starting the Salt Minion")
minion = salt.Minion()
minion.start()
while self.runflag:
pass
#self.sleep(10)
#self.log("I'm alive ...")
def stop(self):
self.runflag = False
self.log("Shutting down the Salt Minion")
def _main():
servicename = 'salt-minion'
try:
status = win32serviceutil.QueryServiceStatus(servicename)
except win32service.error as details:
if details[0] == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
instart(MinionService, servicename, 'Salt Minion')
sys.exit(0)
if status[1] == win32service.SERVICE_RUNNING:
win32serviceutil.StopServiceWithDeps(servicename)
win32serviceutil.StartService(servicename)
else:
win32serviceutil.StartService(servicename)
if __name__ == '__main__':
_main()
| ...
import winerror
...
...
def _main():
servicename = 'salt-minion'
... |
ee98b5a5c6b82671738bc60e68ea87d838c5400f | migrations/0020_change_ds_name_to_non_uniqe.py | migrations/0020_change_ds_name_to_non_uniqe.py | from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for index_name in ['unique_name', 'data_sources_name']:
try:
print "Trying to remove data source name uniqueness index with the name: {}".format(index_name)
migrate(migrator.drop_index("data_sources", index_name))
print "Success!"
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit(1)
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
# In some cases it's a constraint:
db.database.execute_sql('ALTER TABLE data_sources DROP CONSTRAINT IF EXISTS unique_name')
# In others only an index:
db.database.execute_sql('DROP INDEX IF EXISTS data_sources_name')
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| Improve the migration for unique data source name | Improve the migration for unique data source name
| Python | bsd-2-clause | ninneko/redash,EverlyWell/redash,hudl/redash,useabode/redash,pubnative/redash,chriszs/redash,akariv/redash,alexanderlz/redash,easytaxibr/redash,pubnative/redash,amino-data/redash,rockwotj/redash,ninneko/redash,guaguadev/redash,rockwotj/redash,denisov-vlad/redash,imsally/redash,ninneko/redash,useabode/redash,crowdworks/redash,EverlyWell/redash,jmvasquez/redashtest,hudl/redash,denisov-vlad/redash,moritz9/redash,stefanseifert/redash,alexanderlz/redash,alexanderlz/redash,chriszs/redash,rockwotj/redash,M32Media/redash,vishesh92/redash,stefanseifert/redash,easytaxibr/redash,EverlyWell/redash,pubnative/redash,44px/redash,ninneko/redash,hudl/redash,amino-data/redash,M32Media/redash,moritz9/redash,guaguadev/redash,chriszs/redash,chriszs/redash,44px/redash,imsally/redash,EverlyWell/redash,amino-data/redash,44px/redash,guaguadev/redash,guaguadev/redash,denisov-vlad/redash,44px/redash,easytaxibr/redash,hudl/redash,getredash/redash,moritz9/redash,ninneko/redash,imsally/redash,imsally/redash,M32Media/redash,jmvasquez/redashtest,easytaxibr/redash,crowdworks/redash,stefanseifert/redash,akariv/redash,moritz9/redash,pubnative/redash,useabode/redash,getredash/redash,crowdworks/redash,pubnative/redash,vishesh92/redash,jmvasquez/redashtest,amino-data/redash,denisov-vlad/redash,getredash/redash,M32Media/redash,vishesh92/redash,guaguadev/redash,jmvasquez/redashtest,easytaxibr/redash,stefanseifert/redash,useabode/redash,akariv/redash,akariv/redash,akariv/redash,alexanderlz/redash,stefanseifert/redash,getredash/redash,vishesh92/redash,getredash/redash,rockwotj/redash,denisov-vlad/redash,crowdworks/redash,jmvasquez/redashtest | from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
+ # In some cases it's a constraint:
+ db.database.execute_sql('ALTER TABLE data_sources DROP CONSTRAINT IF EXISTS unique_name')
+ # In others only an index:
+ db.database.execute_sql('DROP INDEX IF EXISTS data_sources_name')
- success = False
- for index_name in ['unique_name', 'data_sources_name']:
- try:
- print "Trying to remove data source name uniqueness index with the name: {}".format(index_name)
- migrate(migrator.drop_index("data_sources", index_name))
- print "Success!"
- success = True
- break
- except peewee.ProgrammingError:
- db.close_db(None)
-
- if not success:
- print "Failed removing uniqueness constraint on data source name."
- print "Please verify its name in the schema, update the migration and run again."
- exit(1)
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
-
- | Improve the migration for unique data source name | ## Code Before:
from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for index_name in ['unique_name', 'data_sources_name']:
try:
print "Trying to remove data source name uniqueness index with the name: {}".format(index_name)
migrate(migrator.drop_index("data_sources", index_name))
print "Success!"
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit(1)
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
## Instruction:
Improve the migration for unique data source name
## Code After:
from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
# In some cases it's a constraint:
db.database.execute_sql('ALTER TABLE data_sources DROP CONSTRAINT IF EXISTS unique_name')
# In others only an index:
db.database.execute_sql('DROP INDEX IF EXISTS data_sources_name')
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| ...
# Change the uniqueness constraint on data source name to be (org, name):
# In some cases it's a constraint:
db.database.execute_sql('ALTER TABLE data_sources DROP CONSTRAINT IF EXISTS unique_name')
# In others only an index:
db.database.execute_sql('DROP INDEX IF EXISTS data_sources_name')
...
db.close_db(None)
... |
f2a7fe543aa338e81bea692b8267154e64e7478d | polling_stations/apps/file_uploads/utils.py | polling_stations/apps/file_uploads/utils.py | import os
from django.db.models import Q
from councils.models import Council, UserCouncils
def get_domain(request):
return os.environ.get("APP_DOMAIN", request.META.get("HTTP_HOST"))
def assign_councils_to_user(user):
"""
Adds rows to the join table between User and Council
"""
email_domain = user.email.rsplit("@", 1)[1]
councils = Council.objects.filter(
Q(electoral_services_email__contains=email_domain)
| Q(registration_email__contains=email_domain)
)
for council in councils:
UserCouncils.objects.update_or_create(user=user, council=council)
| import os
from django.db.models import Q
from councils.models import Council, UserCouncils
def get_domain(request):
return os.environ.get("APP_DOMAIN", request.META.get("HTTP_HOST"))
def assign_councils_to_user(user):
"""
Adds rows to the join table between User and Council
"""
email_domain = user.email.rsplit("@", 1)[1]
councils = Council.objects.using("logger").filter(
Q(electoral_services_email__contains=email_domain)
| Q(registration_email__contains=email_domain)
)
for council in councils:
UserCouncils.objects.using("logger").update_or_create(
user=user, council=council
)
| Make sure UserCouncil is created in logger db | Make sure UserCouncil is created in logger db
| Python | bsd-3-clause | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | import os
from django.db.models import Q
from councils.models import Council, UserCouncils
def get_domain(request):
return os.environ.get("APP_DOMAIN", request.META.get("HTTP_HOST"))
def assign_councils_to_user(user):
"""
Adds rows to the join table between User and Council
"""
email_domain = user.email.rsplit("@", 1)[1]
- councils = Council.objects.filter(
+ councils = Council.objects.using("logger").filter(
Q(electoral_services_email__contains=email_domain)
| Q(registration_email__contains=email_domain)
)
for council in councils:
- UserCouncils.objects.update_or_create(user=user, council=council)
+ UserCouncils.objects.using("logger").update_or_create(
+ user=user, council=council
+ )
| Make sure UserCouncil is created in logger db | ## Code Before:
import os
from django.db.models import Q
from councils.models import Council, UserCouncils
def get_domain(request):
return os.environ.get("APP_DOMAIN", request.META.get("HTTP_HOST"))
def assign_councils_to_user(user):
"""
Adds rows to the join table between User and Council
"""
email_domain = user.email.rsplit("@", 1)[1]
councils = Council.objects.filter(
Q(electoral_services_email__contains=email_domain)
| Q(registration_email__contains=email_domain)
)
for council in councils:
UserCouncils.objects.update_or_create(user=user, council=council)
## Instruction:
Make sure UserCouncil is created in logger db
## Code After:
import os
from django.db.models import Q
from councils.models import Council, UserCouncils
def get_domain(request):
return os.environ.get("APP_DOMAIN", request.META.get("HTTP_HOST"))
def assign_councils_to_user(user):
"""
Adds rows to the join table between User and Council
"""
email_domain = user.email.rsplit("@", 1)[1]
councils = Council.objects.using("logger").filter(
Q(electoral_services_email__contains=email_domain)
| Q(registration_email__contains=email_domain)
)
for council in councils:
UserCouncils.objects.using("logger").update_or_create(
user=user, council=council
)
| # ... existing code ...
email_domain = user.email.rsplit("@", 1)[1]
councils = Council.objects.using("logger").filter(
Q(electoral_services_email__contains=email_domain)
# ... modified code ...
for council in councils:
UserCouncils.objects.using("logger").update_or_create(
user=user, council=council
)
# ... rest of the code ... |
f94c946d135aed30f4d9068844b563fa94e39ff1 | test.py | test.py | from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(str(d), "{'a': 1}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
| from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
| Check recursion in str() and repr() | Check recursion in str() and repr()
| Python | mit | janrain/lazydict | from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
- d = lazydict.LazyDictionary({'a': 1})
+ d = lazydict.LazyDictionary({'a': {'b': 1}})
- self.assertEqual(str(d), "{'a': 1}")
+ self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
- d = lazydict.LazyDictionary({'a': 1})
+ d = lazydict.LazyDictionary({'a': {'b': 1}})
- self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
+ self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
| Check recursion in str() and repr() | ## Code Before:
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(str(d), "{'a': 1}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
## Instruction:
Check recursion in str() and repr()
## Code After:
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
| # ... existing code ...
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
# ... modified code ...
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
# ... rest of the code ... |
aceeac7e9dd2735add937bc7141cfdb29b6201c7 | pywatson/watson.py | pywatson/watson.py | from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson:
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.to_dict()
else:
q = Question(question_text).to_dict()
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson(object):
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.__dict__
else:
q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| Use __dict__ instead of to_dict() | Use __dict__ instead of to_dict()
| Python | mit | sherlocke/pywatson | from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
- class Watson:
+ class Watson(object):
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
- q = question.to_dict()
+ q = question.__dict__
else:
- q = Question(question_text).to_dict()
+ q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| Use __dict__ instead of to_dict() | ## Code Before:
from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson:
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.to_dict()
else:
q = Question(question_text).to_dict()
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
## Instruction:
Use __dict__ instead of to_dict()
## Code After:
from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson(object):
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.__dict__
else:
q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| # ... existing code ...
class Watson(object):
"""The Watson API adapter class"""
# ... modified code ...
if question is not None:
q = question.__dict__
else:
q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
# ... rest of the code ... |
5487126bfc3c4fd16243b9c7e00b204f2f8d7374 | tests/test_znc.py | tests/test_znc.py | def test_service_running(Service):
service = Service('znc')
assert service.is_running
def test_socket_listening(Socket):
socket = Socket('tcp://127.0.0.1:6666')
assert socket.is_listening
| from testinfra.utils.ansible_runner import AnsibleRunner
testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all')
def test_service_enabled(Service):
service = Service('znc')
assert service.is_enabled
def test_service_running(Service):
service = Service('znc')
assert service.is_running
def test_socket_listening_ipv4(Socket):
socket = Socket('tcp://0.0.0.0:6666')
assert socket.is_listening
def test_socket_listening_ipv6(Socket):
socket = Socket('tcp://:::6666')
assert not socket.is_listening
| Tweak the infratest a bit | Tweak the infratest a bit
| Python | mit | triplepoint/ansible-znc | + from testinfra.utils.ansible_runner import AnsibleRunner
+
+ testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all')
+
+
+ def test_service_enabled(Service):
+ service = Service('znc')
+ assert service.is_enabled
+
+
def test_service_running(Service):
service = Service('znc')
assert service.is_running
- def test_socket_listening(Socket):
+ def test_socket_listening_ipv4(Socket):
- socket = Socket('tcp://127.0.0.1:6666')
+ socket = Socket('tcp://0.0.0.0:6666')
assert socket.is_listening
+
+ def test_socket_listening_ipv6(Socket):
+ socket = Socket('tcp://:::6666')
+ assert not socket.is_listening
+ | Tweak the infratest a bit | ## Code Before:
def test_service_running(Service):
service = Service('znc')
assert service.is_running
def test_socket_listening(Socket):
socket = Socket('tcp://127.0.0.1:6666')
assert socket.is_listening
## Instruction:
Tweak the infratest a bit
## Code After:
from testinfra.utils.ansible_runner import AnsibleRunner
testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all')
def test_service_enabled(Service):
service = Service('znc')
assert service.is_enabled
def test_service_running(Service):
service = Service('znc')
assert service.is_running
def test_socket_listening_ipv4(Socket):
socket = Socket('tcp://0.0.0.0:6666')
assert socket.is_listening
def test_socket_listening_ipv6(Socket):
socket = Socket('tcp://:::6666')
assert not socket.is_listening
| ...
from testinfra.utils.ansible_runner import AnsibleRunner
testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all')
def test_service_enabled(Service):
service = Service('znc')
assert service.is_enabled
def test_service_running(Service):
...
def test_socket_listening_ipv4(Socket):
socket = Socket('tcp://0.0.0.0:6666')
assert socket.is_listening
def test_socket_listening_ipv6(Socket):
socket = Socket('tcp://:::6666')
assert not socket.is_listening
... |
e97dee6ec7c49cf3d33803504c7269a41c4d0a0f | authentication_app/views.py | authentication_app/views.py | from django.shortcuts import render
from django.http import HttpResponse
from .models import Greeting
# Create your views here.
def index(request):
return HttpResponse('Hello from Python!')
def db(request):
greeting = Greeting()
greeting.save()
greetings = Greeting.objects.all()
return render(request, 'db.html', {'greetings': greetings})
| from rest_framework import permissions, viewsets
from authentication_app.models import Account
from authentication_app.permissions import IsAccountOwner
from authentication_app.serializers import AccountSerializer
'''
@name : AccountViewSerializer
@desc : Defines the serializer for the account view.
'''
class AccountViewSerializer(viewsets.ModelViewSet):
lookup_field = 'username'
queryset = Account.objects.all()
serializer_class = AccountSerializer
def get_permissions(self):
if self.reqiest.method in permissions.SAFE_METHODS:
return (permissions.AllowAny(),)
if self.request.method == 'POST':
return (permissions.AllowAny(),)
return (permissions.IsAuthenticated(), IsAccountOwner(),)
def create(self, request):
serializer = self.serializer_class(data=reqiest.data)
if serializer.is_valid():
Account.objects.create_user(**serializer.validated_data)
return Response(serializer.validated_data, status=status.HTTP_201_CREATED)
return Response({
'status' : 'Bad Request',
'message' : 'Account could not be created with the received data.'
}, status=status.HTTP_400_BAD_REQUEST)
| Add the view serializer for the account model. | Add the view serializer for the account model.
| Python | mit | mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app | + from rest_framework import permissions, viewsets
- from django.shortcuts import render
- from django.http import HttpResponse
- from .models import Greeting
+ from authentication_app.models import Account
+ from authentication_app.permissions import IsAccountOwner
+ from authentication_app.serializers import AccountSerializer
- # Create your views here.
- def index(request):
- return HttpResponse('Hello from Python!')
+ '''
+ @name : AccountViewSerializer
+ @desc : Defines the serializer for the account view.
+ '''
+ class AccountViewSerializer(viewsets.ModelViewSet):
+ lookup_field = 'username'
+ queryset = Account.objects.all()
+ serializer_class = AccountSerializer
+ def get_permissions(self):
+ if self.reqiest.method in permissions.SAFE_METHODS:
+ return (permissions.AllowAny(),)
+ if self.request.method == 'POST':
+ return (permissions.AllowAny(),)
- def db(request):
+ return (permissions.IsAuthenticated(), IsAccountOwner(),)
- greeting = Greeting()
- greeting.save()
+ def create(self, request):
+ serializer = self.serializer_class(data=reqiest.data)
- greetings = Greeting.objects.all()
+ if serializer.is_valid():
+ Account.objects.create_user(**serializer.validated_data)
+ return Response(serializer.validated_data, status=status.HTTP_201_CREATED)
- return render(request, 'db.html', {'greetings': greetings})
+ return Response({
+ 'status' : 'Bad Request',
+ 'message' : 'Account could not be created with the received data.'
+ }, status=status.HTTP_400_BAD_REQUEST)
- | Add the view serializer for the account model. | ## Code Before:
from django.shortcuts import render
from django.http import HttpResponse
from .models import Greeting
# Create your views here.
def index(request):
return HttpResponse('Hello from Python!')
def db(request):
greeting = Greeting()
greeting.save()
greetings = Greeting.objects.all()
return render(request, 'db.html', {'greetings': greetings})
## Instruction:
Add the view serializer for the account model.
## Code After:
from rest_framework import permissions, viewsets
from authentication_app.models import Account
from authentication_app.permissions import IsAccountOwner
from authentication_app.serializers import AccountSerializer
'''
@name : AccountViewSerializer
@desc : Defines the serializer for the account view.
'''
class AccountViewSerializer(viewsets.ModelViewSet):
lookup_field = 'username'
queryset = Account.objects.all()
serializer_class = AccountSerializer
def get_permissions(self):
if self.reqiest.method in permissions.SAFE_METHODS:
return (permissions.AllowAny(),)
if self.request.method == 'POST':
return (permissions.AllowAny(),)
return (permissions.IsAuthenticated(), IsAccountOwner(),)
def create(self, request):
serializer = self.serializer_class(data=reqiest.data)
if serializer.is_valid():
Account.objects.create_user(**serializer.validated_data)
return Response(serializer.validated_data, status=status.HTTP_201_CREATED)
return Response({
'status' : 'Bad Request',
'message' : 'Account could not be created with the received data.'
}, status=status.HTTP_400_BAD_REQUEST)
| ...
from rest_framework import permissions, viewsets
from authentication_app.models import Account
from authentication_app.permissions import IsAccountOwner
from authentication_app.serializers import AccountSerializer
'''
@name : AccountViewSerializer
@desc : Defines the serializer for the account view.
'''
class AccountViewSerializer(viewsets.ModelViewSet):
lookup_field = 'username'
queryset = Account.objects.all()
serializer_class = AccountSerializer
def get_permissions(self):
if self.reqiest.method in permissions.SAFE_METHODS:
return (permissions.AllowAny(),)
if self.request.method == 'POST':
return (permissions.AllowAny(),)
return (permissions.IsAuthenticated(), IsAccountOwner(),)
def create(self, request):
serializer = self.serializer_class(data=reqiest.data)
if serializer.is_valid():
Account.objects.create_user(**serializer.validated_data)
return Response(serializer.validated_data, status=status.HTTP_201_CREATED)
return Response({
'status' : 'Bad Request',
'message' : 'Account could not be created with the received data.'
}, status=status.HTTP_400_BAD_REQUEST)
... |
d38392998869319677cc884836c5952441f6ac62 | pokemongo_bot/base_task.py | pokemongo_bot/base_task.py | import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
| import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
| Support for log_delay for all tasks | Support for log_delay for all tasks
| Python | mit | lythien/pokemongo,heihachi/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,lythien/pokemongo,goedzo/PokemonGo-Bot,goedzo/PokemonGo-Bot,dtee/PokemonGo-Bot,DBa2016/PokemonGo-Bot,DBa2016/PokemonGo-Bot,DBa2016/PokemonGo-Bot,halsafar/PokemonGo-Bot,goshan/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,cmezh/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,cmezh/PokemonGo-Bot,goedzo/PokemonGo-Bot,dtee/PokemonGo-Bot,lythien/pokemongo,pengzhangdev/PokemonGo-Bot,halsafar/PokemonGo-Bot,halsafar/PokemonGo-Bot,heihachi/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,dtee/PokemonGo-Bot,DBa2016/PokemonGo-Bot,cmezh/PokemonGo-Bot,dtee/PokemonGo-Bot,heihachi/PokemonGo-Bot,heihachi/PokemonGo-Bot,lythien/pokemongo,goedzo/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,goshan/PokemonGo-Bot,cmezh/PokemonGo-Bot,halsafar/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot | import logging
+
+ import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
+ self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
+
+ # Print log only if X seconds are passed from last log
+ if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
+ self.last_log_time = time.time()
- self.bot.event_manager.emit(
+ self.bot.event_manager.emit(
- event,
+ event,
- sender=sender,
+ sender=sender,
- level=level,
+ level=level,
- formatted=formatted,
+ formatted=formatted,
- data=data
+ data=data
- )
+ )
def initialize(self):
pass
| Support for log_delay for all tasks | ## Code Before:
import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
## Instruction:
Support for log_delay for all tasks
## Code After:
import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
| // ... existing code ...
import logging
import time
// ... modified code ...
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
...
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
// ... rest of the code ... |
053d6a2ca13b1f36a02fa3223092a10af35f6579 | erpnext/patches/v10_0/item_barcode_childtable_migrate.py | erpnext/patches/v10_0/item_barcode_childtable_migrate.py |
from __future__ import unicode_literals
import frappe
def execute():
items_barcode = frappe.get_all('Item', ['name', 'barcode'], { 'barcode': ('!=', '') })
frappe.reload_doc("stock", "doctype", "item")
frappe.reload_doc("stock", "doctype", "item_barcode")
for item in items_barcode:
barcode = item.barcode.strip()
if barcode and '<' not in barcode:
try:
frappe.get_doc({
'idx': 0,
'doctype': 'Item Barcode',
'barcode': barcode,
'parenttype': 'Item',
'parent': item.name,
'parentfield': 'barcodes'
}).insert()
except frappe.DuplicateEntryError:
continue
|
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("stock", "doctype", "item_barcode")
items_barcode = frappe.get_all('Item', ['name', 'barcode'], { 'barcode': ('!=', '') })
frappe.reload_doc("stock", "doctype", "item")
for item in items_barcode:
barcode = item.barcode.strip()
if barcode and '<' not in barcode:
try:
frappe.get_doc({
'idx': 0,
'doctype': 'Item Barcode',
'barcode': barcode,
'parenttype': 'Item',
'parent': item.name,
'parentfield': 'barcodes'
}).insert()
except frappe.DuplicateEntryError:
continue
| Move reload doc before get query | Move reload doc before get query
| Python | agpl-3.0 | gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext |
from __future__ import unicode_literals
import frappe
def execute():
+ frappe.reload_doc("stock", "doctype", "item_barcode")
+
items_barcode = frappe.get_all('Item', ['name', 'barcode'], { 'barcode': ('!=', '') })
+ frappe.reload_doc("stock", "doctype", "item")
+
- frappe.reload_doc("stock", "doctype", "item")
- frappe.reload_doc("stock", "doctype", "item_barcode")
for item in items_barcode:
barcode = item.barcode.strip()
if barcode and '<' not in barcode:
try:
frappe.get_doc({
'idx': 0,
'doctype': 'Item Barcode',
'barcode': barcode,
'parenttype': 'Item',
'parent': item.name,
'parentfield': 'barcodes'
}).insert()
except frappe.DuplicateEntryError:
continue
| Move reload doc before get query | ## Code Before:
from __future__ import unicode_literals
import frappe
def execute():
items_barcode = frappe.get_all('Item', ['name', 'barcode'], { 'barcode': ('!=', '') })
frappe.reload_doc("stock", "doctype", "item")
frappe.reload_doc("stock", "doctype", "item_barcode")
for item in items_barcode:
barcode = item.barcode.strip()
if barcode and '<' not in barcode:
try:
frappe.get_doc({
'idx': 0,
'doctype': 'Item Barcode',
'barcode': barcode,
'parenttype': 'Item',
'parent': item.name,
'parentfield': 'barcodes'
}).insert()
except frappe.DuplicateEntryError:
continue
## Instruction:
Move reload doc before get query
## Code After:
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("stock", "doctype", "item_barcode")
items_barcode = frappe.get_all('Item', ['name', 'barcode'], { 'barcode': ('!=', '') })
frappe.reload_doc("stock", "doctype", "item")
for item in items_barcode:
barcode = item.barcode.strip()
if barcode and '<' not in barcode:
try:
frappe.get_doc({
'idx': 0,
'doctype': 'Item Barcode',
'barcode': barcode,
'parenttype': 'Item',
'parent': item.name,
'parentfield': 'barcodes'
}).insert()
except frappe.DuplicateEntryError:
continue
| ...
def execute():
frappe.reload_doc("stock", "doctype", "item_barcode")
items_barcode = frappe.get_all('Item', ['name', 'barcode'], { 'barcode': ('!=', '') })
frappe.reload_doc("stock", "doctype", "item")
... |
26bae1f6094550939b1ed2ded3885e5d7befc39d | rply/token.py | rply/token.py | class BaseBox(object):
pass
class Token(BaseBox):
def __init__(self, name, value, source_pos=None):
BaseBox.__init__(self)
self.name = name
self.value = value
self.source_pos = source_pos
def __eq__(self, other):
return self.name == other.name and self.value == other.value
def gettokentype(self):
return self.name
def getsourcepos(self):
return self.source_pos
def getstr(self):
return self.value
class SourcePosition(object):
def __init__(self, idx, lineno, colno):
self.idx = idx
self.lineno = lineno
self.colno = colno
| class BaseBox(object):
pass
class Token(BaseBox):
def __init__(self, name, value, source_pos=None):
self.name = name
self.value = value
self.source_pos = source_pos
def __eq__(self, other):
return self.name == other.name and self.value == other.value
def gettokentype(self):
return self.name
def getsourcepos(self):
return self.source_pos
def getstr(self):
return self.value
class SourcePosition(object):
def __init__(self, idx, lineno, colno):
self.idx = idx
self.lineno = lineno
self.colno = colno
| Drop the __init__ call to object.__init__, RPython doesn't like it and it doesn't doa nything | Drop the __init__ call to object.__init__, RPython doesn't like it and it doesn't doa nything
| Python | bsd-3-clause | agamdua/rply,agamdua/rply | class BaseBox(object):
pass
class Token(BaseBox):
def __init__(self, name, value, source_pos=None):
- BaseBox.__init__(self)
self.name = name
self.value = value
self.source_pos = source_pos
def __eq__(self, other):
return self.name == other.name and self.value == other.value
def gettokentype(self):
return self.name
def getsourcepos(self):
return self.source_pos
def getstr(self):
return self.value
class SourcePosition(object):
def __init__(self, idx, lineno, colno):
self.idx = idx
self.lineno = lineno
self.colno = colno
| Drop the __init__ call to object.__init__, RPython doesn't like it and it doesn't doa nything | ## Code Before:
class BaseBox(object):
pass
class Token(BaseBox):
def __init__(self, name, value, source_pos=None):
BaseBox.__init__(self)
self.name = name
self.value = value
self.source_pos = source_pos
def __eq__(self, other):
return self.name == other.name and self.value == other.value
def gettokentype(self):
return self.name
def getsourcepos(self):
return self.source_pos
def getstr(self):
return self.value
class SourcePosition(object):
def __init__(self, idx, lineno, colno):
self.idx = idx
self.lineno = lineno
self.colno = colno
## Instruction:
Drop the __init__ call to object.__init__, RPython doesn't like it and it doesn't doa nything
## Code After:
class BaseBox(object):
pass
class Token(BaseBox):
def __init__(self, name, value, source_pos=None):
self.name = name
self.value = value
self.source_pos = source_pos
def __eq__(self, other):
return self.name == other.name and self.value == other.value
def gettokentype(self):
return self.name
def getsourcepos(self):
return self.source_pos
def getstr(self):
return self.value
class SourcePosition(object):
def __init__(self, idx, lineno, colno):
self.idx = idx
self.lineno = lineno
self.colno = colno
| // ... existing code ...
def __init__(self, name, value, source_pos=None):
self.name = name
// ... rest of the code ... |
691bee381bda822a059c5d9fa790feabc7e00a8d | dnsimple2/tests/services/base.py | dnsimple2/tests/services/base.py | import os
from unittest import TestCase
from dnsimple2.client import DNSimple
from dnsimple2.resources import (
AccountResource,
DomainResource
)
from dnsimple2.tests.utils import get_test_domain_name
class BaseServiceTestCase(TestCase):
@classmethod
def setUpClass(cls):
access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN')
cls.client = DNSimple(access_token)
cls.account = AccountResource(id=424)
cls.domain = cls.client.domains.create(
cls.account,
DomainResource(name=get_test_domain_name(), account=cls.account)
)
cls.invalid_domain = DomainResource(
id=1,
name='invalid-domain',
account=cls.account
)
| import os
from unittest import TestCase
from dnsimple2.client import DNSimple
from dnsimple2.resources import (
AccountResource,
DomainResource
)
from dnsimple2.tests.utils import get_test_domain_name
class BaseServiceTestCase(TestCase):
@classmethod
def setUpClass(cls):
access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN')
cls.client = DNSimple(access_token)
account_id = os.getenv('DNSIMPLE_ACCOUNT_ID')
cls.account = AccountResource(id=account_id)
cls.domain = cls.client.domains.create(
cls.account,
DomainResource(name=get_test_domain_name(), account=cls.account)
)
cls.invalid_domain = DomainResource(
id=1,
name='invalid-domain',
account=cls.account
)
| Use env variable for account id in tests. | Use env variable for account id in tests.
| Python | mit | indradhanush/dnsimple2-python | import os
from unittest import TestCase
from dnsimple2.client import DNSimple
from dnsimple2.resources import (
AccountResource,
DomainResource
)
from dnsimple2.tests.utils import get_test_domain_name
class BaseServiceTestCase(TestCase):
@classmethod
def setUpClass(cls):
access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN')
cls.client = DNSimple(access_token)
+
+ account_id = os.getenv('DNSIMPLE_ACCOUNT_ID')
- cls.account = AccountResource(id=424)
+ cls.account = AccountResource(id=account_id)
cls.domain = cls.client.domains.create(
cls.account,
DomainResource(name=get_test_domain_name(), account=cls.account)
)
cls.invalid_domain = DomainResource(
id=1,
name='invalid-domain',
account=cls.account
)
| Use env variable for account id in tests. | ## Code Before:
import os
from unittest import TestCase
from dnsimple2.client import DNSimple
from dnsimple2.resources import (
AccountResource,
DomainResource
)
from dnsimple2.tests.utils import get_test_domain_name
class BaseServiceTestCase(TestCase):
@classmethod
def setUpClass(cls):
access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN')
cls.client = DNSimple(access_token)
cls.account = AccountResource(id=424)
cls.domain = cls.client.domains.create(
cls.account,
DomainResource(name=get_test_domain_name(), account=cls.account)
)
cls.invalid_domain = DomainResource(
id=1,
name='invalid-domain',
account=cls.account
)
## Instruction:
Use env variable for account id in tests.
## Code After:
import os
from unittest import TestCase
from dnsimple2.client import DNSimple
from dnsimple2.resources import (
AccountResource,
DomainResource
)
from dnsimple2.tests.utils import get_test_domain_name
class BaseServiceTestCase(TestCase):
@classmethod
def setUpClass(cls):
access_token = os.getenv('DNSIMPLE_V2_ACCESS_TOKEN')
cls.client = DNSimple(access_token)
account_id = os.getenv('DNSIMPLE_ACCOUNT_ID')
cls.account = AccountResource(id=account_id)
cls.domain = cls.client.domains.create(
cls.account,
DomainResource(name=get_test_domain_name(), account=cls.account)
)
cls.invalid_domain = DomainResource(
id=1,
name='invalid-domain',
account=cls.account
)
| // ... existing code ...
cls.client = DNSimple(access_token)
account_id = os.getenv('DNSIMPLE_ACCOUNT_ID')
cls.account = AccountResource(id=account_id)
cls.domain = cls.client.domains.create(
// ... rest of the code ... |
45275a48fb434e6a9d895da03e290b84c52694f6 | orbitdeterminator/kep_determination/least_squares.py | orbitdeterminator/kep_determination/least_squares.py |
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter |
import math
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
#rotation about the z-axis about an angle `ang`
def rotz(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((cos_ang,-sin_ang,0.0), (sin_ang, cos_ang,0.0), (0.0,0.0,1.0)))
#rotation about the x-axis about an angle `ang`
def rotx(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((1.0,0.0,0.0), (0.0,cos_ang,-sin_ang), (0.0,sin_ang,cos_ang)))
#rotation from the orbital plane to the inertial frame
#it is composed of the following rotations, in that order:
#1) rotation about the z axis about an angle `omega` (argument of pericenter)
#2) rotation about the x axis about an angle `I` (inclination)
#3) rotation about the z axis about an angle `Omega` (longitude of ascending node)
def op2if(omega,I,Omega):
P2_mul_P3 = np.matmul(rotx(I),rotz(omega))
return np.matmul(rotz(Omega),P2_mul_P3)
omega = math.radians(31.124)
I = math.radians(75.0)
Omega = math.radians(60.0)
# rotation matrix from orbital plane to inertial frame
# two ways to compute it; result should be the same
P_1 = rotz(omega) #rotation about z axis by an angle `omega`
P_2 = rotx(I) #rotation about x axis by an angle `I`
P_3 = rotz(Omega) #rotation about z axis by an angle `Omega`
Rot1 = np.matmul(P_3,np.matmul(P_2,P_1))
Rot2 = op2if(omega,I,Omega)
v = np.array((3.0,-2.0,1.0))
print(I)
print(omega)
print(Omega)
print(Rot1)
print(np.matmul(Rot1,v))
print(Rot2) | Add rotation matrix, from orbital plane to inertial frame | Add rotation matrix, from orbital plane to inertial frame
| Python | mit | aerospaceresearch/orbitdeterminator |
+ import math
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
+
+ #rotation about the z-axis about an angle `ang`
+ def rotz(ang):
+ cos_ang = math.cos(ang)
+ sin_ang = math.sin(ang)
+ return np.array(((cos_ang,-sin_ang,0.0), (sin_ang, cos_ang,0.0), (0.0,0.0,1.0)))
+
+ #rotation about the x-axis about an angle `ang`
+ def rotx(ang):
+ cos_ang = math.cos(ang)
+ sin_ang = math.sin(ang)
+ return np.array(((1.0,0.0,0.0), (0.0,cos_ang,-sin_ang), (0.0,sin_ang,cos_ang)))
+
+ #rotation from the orbital plane to the inertial frame
+ #it is composed of the following rotations, in that order:
+ #1) rotation about the z axis about an angle `omega` (argument of pericenter)
+ #2) rotation about the x axis about an angle `I` (inclination)
+ #3) rotation about the z axis about an angle `Omega` (longitude of ascending node)
+ def op2if(omega,I,Omega):
+ P2_mul_P3 = np.matmul(rotx(I),rotz(omega))
+ return np.matmul(rotz(Omega),P2_mul_P3)
+
+ omega = math.radians(31.124)
+ I = math.radians(75.0)
+ Omega = math.radians(60.0)
+
+ # rotation matrix from orbital plane to inertial frame
+ # two ways to compute it; result should be the same
+ P_1 = rotz(omega) #rotation about z axis by an angle `omega`
+ P_2 = rotx(I) #rotation about x axis by an angle `I`
+ P_3 = rotz(Omega) #rotation about z axis by an angle `Omega`
+
+ Rot1 = np.matmul(P_3,np.matmul(P_2,P_1))
+ Rot2 = op2if(omega,I,Omega)
+
+ v = np.array((3.0,-2.0,1.0))
+
+ print(I)
+ print(omega)
+ print(Omega)
+
+ print(Rot1)
+
+ print(np.matmul(Rot1,v))
+
+ print(Rot2) | Add rotation matrix, from orbital plane to inertial frame | ## Code Before:
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
## Instruction:
Add rotation matrix, from orbital plane to inertial frame
## Code After:
import math
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
#rotation about the z-axis about an angle `ang`
def rotz(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((cos_ang,-sin_ang,0.0), (sin_ang, cos_ang,0.0), (0.0,0.0,1.0)))
#rotation about the x-axis about an angle `ang`
def rotx(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((1.0,0.0,0.0), (0.0,cos_ang,-sin_ang), (0.0,sin_ang,cos_ang)))
#rotation from the orbital plane to the inertial frame
#it is composed of the following rotations, in that order:
#1) rotation about the z axis about an angle `omega` (argument of pericenter)
#2) rotation about the x axis about an angle `I` (inclination)
#3) rotation about the z axis about an angle `Omega` (longitude of ascending node)
def op2if(omega,I,Omega):
P2_mul_P3 = np.matmul(rotx(I),rotz(omega))
return np.matmul(rotz(Omega),P2_mul_P3)
omega = math.radians(31.124)
I = math.radians(75.0)
Omega = math.radians(60.0)
# rotation matrix from orbital plane to inertial frame
# two ways to compute it; result should be the same
P_1 = rotz(omega) #rotation about z axis by an angle `omega`
P_2 = rotx(I) #rotation about x axis by an angle `I`
P_3 = rotz(Omega) #rotation about z axis by an angle `Omega`
Rot1 = np.matmul(P_3,np.matmul(P_2,P_1))
Rot2 = op2if(omega,I,Omega)
v = np.array((3.0,-2.0,1.0))
print(I)
print(omega)
print(Omega)
print(Rot1)
print(np.matmul(Rot1,v))
print(Rot2) | ...
import math
import numpy as np
...
# omega: argument of pericenter
#rotation about the z-axis about an angle `ang`
def rotz(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((cos_ang,-sin_ang,0.0), (sin_ang, cos_ang,0.0), (0.0,0.0,1.0)))
#rotation about the x-axis about an angle `ang`
def rotx(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((1.0,0.0,0.0), (0.0,cos_ang,-sin_ang), (0.0,sin_ang,cos_ang)))
#rotation from the orbital plane to the inertial frame
#it is composed of the following rotations, in that order:
#1) rotation about the z axis about an angle `omega` (argument of pericenter)
#2) rotation about the x axis about an angle `I` (inclination)
#3) rotation about the z axis about an angle `Omega` (longitude of ascending node)
def op2if(omega,I,Omega):
P2_mul_P3 = np.matmul(rotx(I),rotz(omega))
return np.matmul(rotz(Omega),P2_mul_P3)
omega = math.radians(31.124)
I = math.radians(75.0)
Omega = math.radians(60.0)
# rotation matrix from orbital plane to inertial frame
# two ways to compute it; result should be the same
P_1 = rotz(omega) #rotation about z axis by an angle `omega`
P_2 = rotx(I) #rotation about x axis by an angle `I`
P_3 = rotz(Omega) #rotation about z axis by an angle `Omega`
Rot1 = np.matmul(P_3,np.matmul(P_2,P_1))
Rot2 = op2if(omega,I,Omega)
v = np.array((3.0,-2.0,1.0))
print(I)
print(omega)
print(Omega)
print(Rot1)
print(np.matmul(Rot1,v))
print(Rot2)
... |
8b1516e638244824b1eafed7dc4abb2dc087ec74 | nuts/nuts.py | nuts/nuts.py | import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
| import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
| Add colorama for coloring on windows | Add colorama for coloring on windows
Add the module colorama that makes ANSI escape character sequences work under MS Windows. The coloring is used to give a better overview about the testresults
| Python | mit | HSRNetwork/Nuts | import os
import sys
import argparse
import logging
import datetime
+ import colorama
from src.application.Logger import Logger
-
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
+ colorama.init()
+
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
-
+
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
| Add colorama for coloring on windows | ## Code Before:
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
## Instruction:
Add colorama for coloring on windows
## Code After:
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
| # ... existing code ...
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
# ... modified code ...
def main(argv):
colorama.init()
logger = Logger()
...
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
# ... rest of the code ... |
cb161eb1cc66c520a81026798112fb13d0cd0e50 | community/__init__.py | community/__init__.py |
from .community_louvain import (
partition_at_level,
modularity,
best_partition,
generate_dendrogram,
induced_graph,
load_binary,
)
__author__ = """Thomas Aynaud ([email protected])"""
# Copyright (C) 2009 by
# Thomas Aynaud <[email protected]>
# All rights reserved.
# BSD license.
|
from .community_louvain import (
partition_at_level,
modularity,
best_partition,
generate_dendrogram,
induced_graph,
load_binary,
)
__version__ = "0.10"
__author__ = """Thomas Aynaud ([email protected])"""
# Copyright (C) 2009 by
# Thomas Aynaud <[email protected]>
# All rights reserved.
# BSD license.
| Add version information to package | Add version information to package
| Python | bsd-3-clause | taynaud/python-louvain |
from .community_louvain import (
partition_at_level,
modularity,
best_partition,
generate_dendrogram,
induced_graph,
load_binary,
)
-
+ __version__ = "0.10"
__author__ = """Thomas Aynaud ([email protected])"""
# Copyright (C) 2009 by
# Thomas Aynaud <[email protected]>
# All rights reserved.
# BSD license.
| Add version information to package | ## Code Before:
from .community_louvain import (
partition_at_level,
modularity,
best_partition,
generate_dendrogram,
induced_graph,
load_binary,
)
__author__ = """Thomas Aynaud ([email protected])"""
# Copyright (C) 2009 by
# Thomas Aynaud <[email protected]>
# All rights reserved.
# BSD license.
## Instruction:
Add version information to package
## Code After:
from .community_louvain import (
partition_at_level,
modularity,
best_partition,
generate_dendrogram,
induced_graph,
load_binary,
)
__version__ = "0.10"
__author__ = """Thomas Aynaud ([email protected])"""
# Copyright (C) 2009 by
# Thomas Aynaud <[email protected]>
# All rights reserved.
# BSD license.
| ...
__version__ = "0.10"
__author__ = """Thomas Aynaud ([email protected])"""
... |
93c978ba422b26971180a4277a0b69e82848ee78 | src/yunohost/data_migrations/0009_migrate_to_apps_json.py | src/yunohost/data_migrations/0009_migrate_to_apps_json.py | from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
# Remove apps.json list
app_removelist(name="yunohost")
# Replace by official.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/official.json")
| import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
| Backup / restore original appslist to handle backward case properly | Backup / restore original appslist to handle backward case properly
| Python | agpl-3.0 | YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/yunohost | + import os
+
from moulinette.utils.log import getActionLogger
- from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
+ from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
+
+ BASE_CONF_PATH = '/home/yunohost.conf'
+ BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
+ APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
+
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
+
+ # Backup current app list json
+ os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
- # Remove apps.json list
- app_removelist(name="yunohost")
+ if os.path.exists(APPSLISTS_BACKUP):
+ os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
- # Replace by official.json list
- app_fetchlist(name="yunohost",
- url="https://app.yunohost.org/official.json")
- | Backup / restore original appslist to handle backward case properly | ## Code Before:
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
# Remove apps.json list
app_removelist(name="yunohost")
# Replace by official.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/official.json")
## Instruction:
Backup / restore original appslist to handle backward case properly
## Code After:
import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
| # ... existing code ...
import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
# ... modified code ...
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
...
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
...
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
# ... rest of the code ... |
d0f67d9ac8236e83a77b84e33ba7217c7e8f67b9 | bird/utils.py | bird/utils.py | def noise_mask(spectrogram):
print("noise_mask is undefined")
def structure_mask(spectrogram):
print("structure_mask is undefined")
def extract_signal(mask, spectrogram):
print("extract_signal is undefined")
| import numpy as np
import os
import sys
import subprocess
import wave
import wave
from scipy import signal
from scipy import fft
from matplotlib import pyplot as plt
MLSP_DATA_PATH="/home/darksoox/gits/bird-species-classification/mlsp_contest_dataset/"
def noise_mask(spectrogram):
print("noise_mask is undefined")
def structure_mask(spectrogram):
print("structure_mask is undefined")
def extract_signal(mask, spectrogram):
print("extract_signal is undefined")
def play_wave_file(filename):
if (not os.path.isfile(filename)):
raise ValueError("File does not exist")
else:
if (sys.platform == "linux" or sys.playform == "linux2"):
subprocess.call(["aplay", filename])
else:
print("Platform not supported")
def read_wave_file(filename):
if (not os.path.isfile(filename)):
raise ValueError("File does not exist")
s = wave.open(filename, 'rb')
if (s.getnchannels() != 1):
raise ValueError("Wave file should be mono")
if (s.getframerate() != 16000):
raise ValueError("Sampling rate of wave file should be 16000")
strsig = s.readframes(s.getnframes())
x = np.fromstring(strsig, np.short)
fs = s.getframerate()
s.close()
return fs, x
def wave_to_spectrogram(wave=np.array([]), fs=None, window=signal.hanning(512),
nperseg=512, noverlap=256):
"""Given a wave form returns the spectrogram of the wave form.
Keyword arguments:
wave -- the wave form (default np.array([]))
fs -- the rate at which the wave form has been sampled
"""
return signal.spectrogram(wave, fs, window, nperseg, noverlap,
mode='magnitude')
def wave_to_spectrogram2(S):
Spectrogram = []
N = 160000
K = 512
Step = 4
wind = 0.5*(1 -np.cos(np.array(range(K))*2*np.pi/(K-1) ))
for j in range(int(Step*N/K)-Step):
vec = S[j * K/Step : (j+Step) * K/Step] * wind
Spectrogram.append(abs(fft(vec, K)[:K/2]))
return np.array(Spectrogram)
def show_spectrogram(Sxx):
plt.pcolor(Sxx)
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [s]')
plt.show()
| Add draft of spectrogram computions. | Add draft of spectrogram computions.
| Python | mit | johnmartinsson/bird-species-classification,johnmartinsson/bird-species-classification | + import numpy as np
+ import os
+ import sys
+ import subprocess
+ import wave
+ import wave
+ from scipy import signal
+ from scipy import fft
+ from matplotlib import pyplot as plt
+
+ MLSP_DATA_PATH="/home/darksoox/gits/bird-species-classification/mlsp_contest_dataset/"
+
def noise_mask(spectrogram):
print("noise_mask is undefined")
def structure_mask(spectrogram):
print("structure_mask is undefined")
def extract_signal(mask, spectrogram):
print("extract_signal is undefined")
+ def play_wave_file(filename):
+ if (not os.path.isfile(filename)):
+ raise ValueError("File does not exist")
+ else:
+ if (sys.platform == "linux" or sys.playform == "linux2"):
+ subprocess.call(["aplay", filename])
+ else:
+ print("Platform not supported")
+
+ def read_wave_file(filename):
+
+ if (not os.path.isfile(filename)):
+ raise ValueError("File does not exist")
+
+ s = wave.open(filename, 'rb')
+
+ if (s.getnchannels() != 1):
+ raise ValueError("Wave file should be mono")
+ if (s.getframerate() != 16000):
+ raise ValueError("Sampling rate of wave file should be 16000")
+
+ strsig = s.readframes(s.getnframes())
+ x = np.fromstring(strsig, np.short)
+ fs = s.getframerate()
+ s.close()
+
+ return fs, x
+
+ def wave_to_spectrogram(wave=np.array([]), fs=None, window=signal.hanning(512),
+ nperseg=512, noverlap=256):
+ """Given a wave form returns the spectrogram of the wave form.
+
+ Keyword arguments:
+ wave -- the wave form (default np.array([]))
+ fs -- the rate at which the wave form has been sampled
+ """
+ return signal.spectrogram(wave, fs, window, nperseg, noverlap,
+ mode='magnitude')
+
+ def wave_to_spectrogram2(S):
+ Spectrogram = []
+ N = 160000
+ K = 512
+ Step = 4
+ wind = 0.5*(1 -np.cos(np.array(range(K))*2*np.pi/(K-1) ))
+
+ for j in range(int(Step*N/K)-Step):
+ vec = S[j * K/Step : (j+Step) * K/Step] * wind
+ Spectrogram.append(abs(fft(vec, K)[:K/2]))
+
+ return np.array(Spectrogram)
+
+ def show_spectrogram(Sxx):
+ plt.pcolor(Sxx)
+ plt.ylabel('Frequency [Hz]')
+ plt.xlabel('Time [s]')
+ plt.show()
+ | Add draft of spectrogram computions. | ## Code Before:
def noise_mask(spectrogram):
print("noise_mask is undefined")
def structure_mask(spectrogram):
print("structure_mask is undefined")
def extract_signal(mask, spectrogram):
print("extract_signal is undefined")
## Instruction:
Add draft of spectrogram computions.
## Code After:
import numpy as np
import os
import sys
import subprocess
import wave
import wave
from scipy import signal
from scipy import fft
from matplotlib import pyplot as plt
MLSP_DATA_PATH="/home/darksoox/gits/bird-species-classification/mlsp_contest_dataset/"
def noise_mask(spectrogram):
print("noise_mask is undefined")
def structure_mask(spectrogram):
print("structure_mask is undefined")
def extract_signal(mask, spectrogram):
print("extract_signal is undefined")
def play_wave_file(filename):
if (not os.path.isfile(filename)):
raise ValueError("File does not exist")
else:
if (sys.platform == "linux" or sys.playform == "linux2"):
subprocess.call(["aplay", filename])
else:
print("Platform not supported")
def read_wave_file(filename):
if (not os.path.isfile(filename)):
raise ValueError("File does not exist")
s = wave.open(filename, 'rb')
if (s.getnchannels() != 1):
raise ValueError("Wave file should be mono")
if (s.getframerate() != 16000):
raise ValueError("Sampling rate of wave file should be 16000")
strsig = s.readframes(s.getnframes())
x = np.fromstring(strsig, np.short)
fs = s.getframerate()
s.close()
return fs, x
def wave_to_spectrogram(wave=np.array([]), fs=None, window=signal.hanning(512),
nperseg=512, noverlap=256):
"""Given a wave form returns the spectrogram of the wave form.
Keyword arguments:
wave -- the wave form (default np.array([]))
fs -- the rate at which the wave form has been sampled
"""
return signal.spectrogram(wave, fs, window, nperseg, noverlap,
mode='magnitude')
def wave_to_spectrogram2(S):
Spectrogram = []
N = 160000
K = 512
Step = 4
wind = 0.5*(1 -np.cos(np.array(range(K))*2*np.pi/(K-1) ))
for j in range(int(Step*N/K)-Step):
vec = S[j * K/Step : (j+Step) * K/Step] * wind
Spectrogram.append(abs(fft(vec, K)[:K/2]))
return np.array(Spectrogram)
def show_spectrogram(Sxx):
plt.pcolor(Sxx)
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [s]')
plt.show()
| ...
import numpy as np
import os
import sys
import subprocess
import wave
import wave
from scipy import signal
from scipy import fft
from matplotlib import pyplot as plt
MLSP_DATA_PATH="/home/darksoox/gits/bird-species-classification/mlsp_contest_dataset/"
def noise_mask(spectrogram):
...
print("extract_signal is undefined")
def play_wave_file(filename):
if (not os.path.isfile(filename)):
raise ValueError("File does not exist")
else:
if (sys.platform == "linux" or sys.playform == "linux2"):
subprocess.call(["aplay", filename])
else:
print("Platform not supported")
def read_wave_file(filename):
if (not os.path.isfile(filename)):
raise ValueError("File does not exist")
s = wave.open(filename, 'rb')
if (s.getnchannels() != 1):
raise ValueError("Wave file should be mono")
if (s.getframerate() != 16000):
raise ValueError("Sampling rate of wave file should be 16000")
strsig = s.readframes(s.getnframes())
x = np.fromstring(strsig, np.short)
fs = s.getframerate()
s.close()
return fs, x
def wave_to_spectrogram(wave=np.array([]), fs=None, window=signal.hanning(512),
nperseg=512, noverlap=256):
"""Given a wave form returns the spectrogram of the wave form.
Keyword arguments:
wave -- the wave form (default np.array([]))
fs -- the rate at which the wave form has been sampled
"""
return signal.spectrogram(wave, fs, window, nperseg, noverlap,
mode='magnitude')
def wave_to_spectrogram2(S):
Spectrogram = []
N = 160000
K = 512
Step = 4
wind = 0.5*(1 -np.cos(np.array(range(K))*2*np.pi/(K-1) ))
for j in range(int(Step*N/K)-Step):
vec = S[j * K/Step : (j+Step) * K/Step] * wind
Spectrogram.append(abs(fft(vec, K)[:K/2]))
return np.array(Spectrogram)
def show_spectrogram(Sxx):
plt.pcolor(Sxx)
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [s]')
plt.show()
... |
38221a3d8df945981f9595842871b5dae6a68c0f | user_management/models/tests/factories.py | user_management/models/tests/factories.py | import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
| import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
is_active = True
@factory.post_generation
def password(self, create, extracted='default password', **kwargs):
self.raw_password = extracted
self.set_password(self.raw_password)
if create:
self.save()
| Add raw_password to Users in tests | Add raw_password to Users in tests
| Python | bsd-2-clause | incuna/django-user-management,incuna/django-user-management | import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
- password = factory.PostGenerationMethodCall('set_password', None)
+ is_active = True
+ @factory.post_generation
+ def password(self, create, extracted='default password', **kwargs):
+ self.raw_password = extracted
+ self.set_password(self.raw_password)
+ if create:
+ self.save()
+ | Add raw_password to Users in tests | ## Code Before:
import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
## Instruction:
Add raw_password to Users in tests
## Code After:
import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
is_active = True
@factory.post_generation
def password(self, create, extracted='default password', **kwargs):
self.raw_password = extracted
self.set_password(self.raw_password)
if create:
self.save()
| ...
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
is_active = True
@factory.post_generation
def password(self, create, extracted='default password', **kwargs):
self.raw_password = extracted
self.set_password(self.raw_password)
if create:
self.save()
... |
11feab5b49bf818e8dde90497d90dafc7ceb5183 | src/locations/models.py | src/locations/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
| from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
| Order locations and districts by name | Order locations and districts by name
| Python | mit | mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign | from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
+ ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
+ ordering = ['name']
def __unicode__(self):
return self.name
| Order locations and districts by name | ## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
def __unicode__(self):
return self.name
## Instruction:
Order locations and districts by name
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
class District(models.Model):
name = models.CharField(_('Name'), max_length=255, unique=True)
class Meta:
verbose_name = _('District')
verbose_name_plural = _('Districts')
ordering = ['name']
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(_('Name'), max_length=255)
district = models.ForeignKey(District, verbose_name=_('District'))
address = models.CharField(_('Address'), max_length=255)
class Meta:
unique_together = ('district', 'name')
verbose_name = _('Location')
verbose_name_plural = _('Locations')
ordering = ['name']
def __unicode__(self):
return self.name
| // ... existing code ...
verbose_name_plural = _('Districts')
ordering = ['name']
// ... modified code ...
verbose_name_plural = _('Locations')
ordering = ['name']
// ... rest of the code ... |
24e48a82c627996332a73608d139f9ce8713642d | cref/app/web/views.py | cref/app/web/views.py | import flask
from cref.app.web import app
from cref.app.web.tasks import predict_structure
def success(result):
return flask.jsonify({
'status': 'success',
'retval': result
})
def failure(reason='Unknown'):
return flask.jsonify({
'status': 'failure',
'reason': reason
})
@app.route('/predict/', methods=['POST'])
def predict():
sequence = flask.request.get_json(force=True)['sequence']
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/predict/<sequence>', methods=['GET'])
def predict_test(sequence):
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/status/<task_id>')
def status(task_id):
result = predict_structure.AsyncResult(task_id)
return success({'state': result.state})
@app.route('/result/<task_id>')
def result(task_id):
result = predict_structure.AsyncResult(task_id)
if result.ready():
return success({'pdb_file': result.get()})
else:
return failure('Task is pending')
| import flask
from cref.app.web import app
from cref.app.web.tasks import predict_structure
def success(result):
return flask.jsonify({
'status': 'success',
'retval': result
})
def failure(reason='Unknown'):
return flask.jsonify({
'status': 'failure',
'reason': reason
})
@app.route('/predict/', methods=['POST'])
def predict():
params = flask.request.get_json(force=True)
resp = predict_structure.delay(params['sequence'])
return success({'task_id': resp.id})
@app.route('/predict/<sequence>', methods=['GET'])
def predict_test(sequence):
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/status/<task_id>')
def status(task_id):
result = predict_structure.AsyncResult(task_id)
return success({'state': result.state})
@app.route('/result/<task_id>')
def result(task_id):
result = predict_structure.AsyncResult(task_id)
if result.ready():
return success({'pdb_file': result.get()})
else:
return failure('Task is pending')
@app.route('/predictions/<task_id>/<path:filename>')
def download_file(filename):
return flask.send_from_directory(
'/home/mchelem/dev/cref2/predictions/',
filename, as_attachment=True
)
| Add method to serve prediction result files | Add method to serve prediction result files
| Python | mit | mchelem/cref2,mchelem/cref2,mchelem/cref2 | import flask
from cref.app.web import app
from cref.app.web.tasks import predict_structure
def success(result):
return flask.jsonify({
'status': 'success',
'retval': result
})
def failure(reason='Unknown'):
return flask.jsonify({
'status': 'failure',
'reason': reason
})
@app.route('/predict/', methods=['POST'])
def predict():
- sequence = flask.request.get_json(force=True)['sequence']
+ params = flask.request.get_json(force=True)
- resp = predict_structure.delay(sequence)
+ resp = predict_structure.delay(params['sequence'])
return success({'task_id': resp.id})
@app.route('/predict/<sequence>', methods=['GET'])
def predict_test(sequence):
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/status/<task_id>')
def status(task_id):
result = predict_structure.AsyncResult(task_id)
return success({'state': result.state})
@app.route('/result/<task_id>')
def result(task_id):
result = predict_structure.AsyncResult(task_id)
if result.ready():
return success({'pdb_file': result.get()})
else:
return failure('Task is pending')
+
+ @app.route('/predictions/<task_id>/<path:filename>')
+ def download_file(filename):
+ return flask.send_from_directory(
+ '/home/mchelem/dev/cref2/predictions/',
+ filename, as_attachment=True
+ )
+ | Add method to serve prediction result files | ## Code Before:
import flask
from cref.app.web import app
from cref.app.web.tasks import predict_structure
def success(result):
return flask.jsonify({
'status': 'success',
'retval': result
})
def failure(reason='Unknown'):
return flask.jsonify({
'status': 'failure',
'reason': reason
})
@app.route('/predict/', methods=['POST'])
def predict():
sequence = flask.request.get_json(force=True)['sequence']
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/predict/<sequence>', methods=['GET'])
def predict_test(sequence):
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/status/<task_id>')
def status(task_id):
result = predict_structure.AsyncResult(task_id)
return success({'state': result.state})
@app.route('/result/<task_id>')
def result(task_id):
result = predict_structure.AsyncResult(task_id)
if result.ready():
return success({'pdb_file': result.get()})
else:
return failure('Task is pending')
## Instruction:
Add method to serve prediction result files
## Code After:
import flask
from cref.app.web import app
from cref.app.web.tasks import predict_structure
def success(result):
return flask.jsonify({
'status': 'success',
'retval': result
})
def failure(reason='Unknown'):
return flask.jsonify({
'status': 'failure',
'reason': reason
})
@app.route('/predict/', methods=['POST'])
def predict():
params = flask.request.get_json(force=True)
resp = predict_structure.delay(params['sequence'])
return success({'task_id': resp.id})
@app.route('/predict/<sequence>', methods=['GET'])
def predict_test(sequence):
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/status/<task_id>')
def status(task_id):
result = predict_structure.AsyncResult(task_id)
return success({'state': result.state})
@app.route('/result/<task_id>')
def result(task_id):
result = predict_structure.AsyncResult(task_id)
if result.ready():
return success({'pdb_file': result.get()})
else:
return failure('Task is pending')
@app.route('/predictions/<task_id>/<path:filename>')
def download_file(filename):
return flask.send_from_directory(
'/home/mchelem/dev/cref2/predictions/',
filename, as_attachment=True
)
| // ... existing code ...
def predict():
params = flask.request.get_json(force=True)
resp = predict_structure.delay(params['sequence'])
return success({'task_id': resp.id})
// ... modified code ...
return failure('Task is pending')
@app.route('/predictions/<task_id>/<path:filename>')
def download_file(filename):
return flask.send_from_directory(
'/home/mchelem/dev/cref2/predictions/',
filename, as_attachment=True
)
// ... rest of the code ... |
9021b035cc7bc63603fce3f626ca6c92c0ba3f9b | pygraphc/clustering/ConnectedComponents.py | pygraphc/clustering/ConnectedComponents.py | import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
| import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
| Fix bug in docstring references Studiawan2016a | Fix bug in docstring references Studiawan2016a
| Python | mit | studiawan/pygraphc | import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
- """This is a class for connected component detection method to cluster event logs [1]_.
+ """This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
+ .. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
- clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
+ clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
| Fix bug in docstring references Studiawan2016a | ## Code Before:
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
## Instruction:
Fix bug in docstring references Studiawan2016a
## Code After:
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
| # ... existing code ...
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
# ... modified code ...
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
# ... rest of the code ... |
593e826b24d83997a5be450be1401e16ec17c07c | application.py | application.py |
from __future__ import print_function
import os
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
@manager.command
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule))
if __name__ == '__main__':
manager.run()
|
from __future__ import print_function
import os
from dmutils import init_manager
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = init_manager(application, 5000, ['./json_schemas'])
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| Use new dmutils init_manager to set up reload on schema changes | Use new dmutils init_manager to set up reload on schema changes
| Python | mit | alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api |
from __future__ import print_function
import os
- from flask.ext.script import Manager, Server
+ from dmutils import init_manager
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
- manager = Manager(application)
- manager.add_command("runserver", Server(port=5000))
+ manager = init_manager(application, 5000, ['./json_schemas'])
+
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
- @manager.command
- def list_routes():
- """List URLs of all application routes."""
- for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule):
- print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule))
-
if __name__ == '__main__':
manager.run()
| Use new dmutils init_manager to set up reload on schema changes | ## Code Before:
from __future__ import print_function
import os
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
@manager.command
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule))
if __name__ == '__main__':
manager.run()
## Instruction:
Use new dmutils init_manager to set up reload on schema changes
## Code After:
from __future__ import print_function
import os
from dmutils import init_manager
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = init_manager(application, 5000, ['./json_schemas'])
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| ...
from dmutils import init_manager
from flask.ext.migrate import Migrate, MigrateCommand
...
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = init_manager(application, 5000, ['./json_schemas'])
migrate = Migrate(application, db)
...
if __name__ == '__main__':
... |
4f27be336a58d0bba66a4f7ab57126d9dd734ab9 | talks/views.py | talks/views.py | from django.shortcuts import render, get_object_or_404
from config.utils import get_active_event
from .models import Talk
def list_talks(request):
event = get_active_event()
talks = event.talks.prefetch_related(
'applicants',
'applicants__user',
'skill_level',
'sponsor',
).order_by('-keynote', 'title')
# Temporary hack to let only admins & committee members see the talks
user = request.user
permitted = user.is_authenticated and (user.is_superuser or user.is_talk_committee_member())
if not permitted:
talks = event.talks.none()
return render(request, 'talks/list_talks.html', {
"talks": talks,
})
def view_talk(request, slug):
event = get_active_event()
talk = get_object_or_404(Talk, slug=slug, event=event)
return render(request, 'talks/view_talk.html', {
'talk': talk})
| from django.shortcuts import render, get_object_or_404
from config.utils import get_active_event
from .models import Talk
def list_talks(request):
event = get_active_event()
talks = event.talks.prefetch_related(
'applicants',
'applicants__user',
'skill_level',
'sponsor',
).order_by('-keynote', 'title')
return render(request, 'talks/list_talks.html', {
"talks": talks,
})
def view_talk(request, slug):
event = get_active_event()
talk = get_object_or_404(Talk, slug=slug, event=event)
return render(request, 'talks/view_talk.html', {
'talk': talk})
| Revert "Temporarily make talks visible only to committee" | Revert "Temporarily make talks visible only to committee"
This reverts commit 57050b7025acb3de66024fe01255849a5ba5f1fc.
| Python | bsd-3-clause | WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web | from django.shortcuts import render, get_object_or_404
from config.utils import get_active_event
from .models import Talk
def list_talks(request):
event = get_active_event()
talks = event.talks.prefetch_related(
'applicants',
'applicants__user',
'skill_level',
'sponsor',
).order_by('-keynote', 'title')
- # Temporary hack to let only admins & committee members see the talks
- user = request.user
- permitted = user.is_authenticated and (user.is_superuser or user.is_talk_committee_member())
- if not permitted:
- talks = event.talks.none()
-
return render(request, 'talks/list_talks.html', {
"talks": talks,
})
def view_talk(request, slug):
event = get_active_event()
talk = get_object_or_404(Talk, slug=slug, event=event)
return render(request, 'talks/view_talk.html', {
'talk': talk})
| Revert "Temporarily make talks visible only to committee" | ## Code Before:
from django.shortcuts import render, get_object_or_404
from config.utils import get_active_event
from .models import Talk
def list_talks(request):
event = get_active_event()
talks = event.talks.prefetch_related(
'applicants',
'applicants__user',
'skill_level',
'sponsor',
).order_by('-keynote', 'title')
# Temporary hack to let only admins & committee members see the talks
user = request.user
permitted = user.is_authenticated and (user.is_superuser or user.is_talk_committee_member())
if not permitted:
talks = event.talks.none()
return render(request, 'talks/list_talks.html', {
"talks": talks,
})
def view_talk(request, slug):
event = get_active_event()
talk = get_object_or_404(Talk, slug=slug, event=event)
return render(request, 'talks/view_talk.html', {
'talk': talk})
## Instruction:
Revert "Temporarily make talks visible only to committee"
## Code After:
from django.shortcuts import render, get_object_or_404
from config.utils import get_active_event
from .models import Talk
def list_talks(request):
event = get_active_event()
talks = event.talks.prefetch_related(
'applicants',
'applicants__user',
'skill_level',
'sponsor',
).order_by('-keynote', 'title')
return render(request, 'talks/list_talks.html', {
"talks": talks,
})
def view_talk(request, slug):
event = get_active_event()
talk = get_object_or_404(Talk, slug=slug, event=event)
return render(request, 'talks/view_talk.html', {
'talk': talk})
| # ... existing code ...
return render(request, 'talks/list_talks.html', {
# ... rest of the code ... |
eacc66e5a9ab3310c75924dcb340e4944e9424d4 | tests/specifications/external_spec_test.py | tests/specifications/external_spec_test.py | from fontbakery.checkrunner import Section
from fontbakery.fonts_spec import spec_factory
def check_filter(checkid, font=None, **iterargs):
if checkid in (
"com.google.fonts/check/035", # ftxvalidator
"com.google.fonts/check/036", # ots-sanitize
"com.google.fonts/check/037", # Font Validator
"com.google.fonts/check/038", # Fontforge
"com.google.fonts/check/039", # Fontforge
):
return False, "Skipping external tools."
return True, None
def test_external_specification():
"""Test the creation of external specifications."""
specification = spec_factory(default_section=Section("Dalton Maag OpenType"))
specification.set_check_filter(check_filter)
specification.auto_register(
globals(), spec_imports=['fontbakery.specifications.opentype'])
assert len(specification.sections) > 1
| from fontbakery.checkrunner import Section
from fontbakery.fonts_spec import spec_factory
def check_filter(checkid, font=None, **iterargs):
if checkid in (
"com.google.fonts/check/035", # ftxvalidator
"com.google.fonts/check/036", # ots-sanitize
"com.google.fonts/check/037", # Font Validator
"com.google.fonts/check/038", # Fontforge
"com.google.fonts/check/039", # Fontforge
):
return False, "Skipping external tools."
return True, None
def test_external_specification():
"""Test the creation of external specifications."""
specification = spec_factory(default_section=Section("Dalton Maag OpenType"))
specification.set_check_filter(check_filter)
specification.auto_register(
globals(), spec_imports=["fontbakery.specifications.opentype"])
# Probe some tests
expected_tests = ["com.google.fonts/check/002", "com.google.fonts/check/180"]
specification.test_expected_checks(expected_tests)
# Probe tests we don't want
assert "com.google.fonts/check/035" not in specification._check_registry.keys()
assert len(specification.sections) > 1
| Test for expected and unexpected checks | Test for expected and unexpected checks
| Python | apache-2.0 | googlefonts/fontbakery,graphicore/fontbakery,graphicore/fontbakery,googlefonts/fontbakery,googlefonts/fontbakery,moyogo/fontbakery,moyogo/fontbakery,moyogo/fontbakery,graphicore/fontbakery | from fontbakery.checkrunner import Section
from fontbakery.fonts_spec import spec_factory
def check_filter(checkid, font=None, **iterargs):
if checkid in (
"com.google.fonts/check/035", # ftxvalidator
"com.google.fonts/check/036", # ots-sanitize
"com.google.fonts/check/037", # Font Validator
"com.google.fonts/check/038", # Fontforge
"com.google.fonts/check/039", # Fontforge
):
return False, "Skipping external tools."
return True, None
def test_external_specification():
"""Test the creation of external specifications."""
specification = spec_factory(default_section=Section("Dalton Maag OpenType"))
specification.set_check_filter(check_filter)
specification.auto_register(
- globals(), spec_imports=['fontbakery.specifications.opentype'])
+ globals(), spec_imports=["fontbakery.specifications.opentype"])
+
+ # Probe some tests
+ expected_tests = ["com.google.fonts/check/002", "com.google.fonts/check/180"]
+ specification.test_expected_checks(expected_tests)
+
+ # Probe tests we don't want
+ assert "com.google.fonts/check/035" not in specification._check_registry.keys()
assert len(specification.sections) > 1
| Test for expected and unexpected checks | ## Code Before:
from fontbakery.checkrunner import Section
from fontbakery.fonts_spec import spec_factory
def check_filter(checkid, font=None, **iterargs):
if checkid in (
"com.google.fonts/check/035", # ftxvalidator
"com.google.fonts/check/036", # ots-sanitize
"com.google.fonts/check/037", # Font Validator
"com.google.fonts/check/038", # Fontforge
"com.google.fonts/check/039", # Fontforge
):
return False, "Skipping external tools."
return True, None
def test_external_specification():
"""Test the creation of external specifications."""
specification = spec_factory(default_section=Section("Dalton Maag OpenType"))
specification.set_check_filter(check_filter)
specification.auto_register(
globals(), spec_imports=['fontbakery.specifications.opentype'])
assert len(specification.sections) > 1
## Instruction:
Test for expected and unexpected checks
## Code After:
from fontbakery.checkrunner import Section
from fontbakery.fonts_spec import spec_factory
def check_filter(checkid, font=None, **iterargs):
if checkid in (
"com.google.fonts/check/035", # ftxvalidator
"com.google.fonts/check/036", # ots-sanitize
"com.google.fonts/check/037", # Font Validator
"com.google.fonts/check/038", # Fontforge
"com.google.fonts/check/039", # Fontforge
):
return False, "Skipping external tools."
return True, None
def test_external_specification():
"""Test the creation of external specifications."""
specification = spec_factory(default_section=Section("Dalton Maag OpenType"))
specification.set_check_filter(check_filter)
specification.auto_register(
globals(), spec_imports=["fontbakery.specifications.opentype"])
# Probe some tests
expected_tests = ["com.google.fonts/check/002", "com.google.fonts/check/180"]
specification.test_expected_checks(expected_tests)
# Probe tests we don't want
assert "com.google.fonts/check/035" not in specification._check_registry.keys()
assert len(specification.sections) > 1
| # ... existing code ...
specification.auto_register(
globals(), spec_imports=["fontbakery.specifications.opentype"])
# Probe some tests
expected_tests = ["com.google.fonts/check/002", "com.google.fonts/check/180"]
specification.test_expected_checks(expected_tests)
# Probe tests we don't want
assert "com.google.fonts/check/035" not in specification._check_registry.keys()
# ... rest of the code ... |
39404dfa8ab921977347d4405c525935a0ce234d | cc/license/tests/test_licenses.py | cc/license/tests/test_licenses.py | from nose.tools import assert_true
def test_find_sampling_selector():
from zope.interface import implementedBy
import cc.license
sampling_selector = cc.license.get_selector('recombo')()
return sampling_selector
def test_find_sampling_licenses():
selector = test_find_sampling_selector()
lic = selector.by_code('sampling')
assert_true(not lic.libre)
assert_true(lic.deprecated)
lic = selector.by_code('sampling+')
assert_true(not lic.deprecated)
def test_find_pd():
from zope.interface import implementedBy
import cc.license
pd_selector = cc.license.get_selector('publicdomain')()
pd = pd_selector.by_code('publicdomain')
return pd
def test_pd():
pd = test_find_pd()
assert_true(pd.libre)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(not pd.deprecated)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(pd.license_code == 'publicdomain')
assert_true(pd.name() == 'Public Domain' == pd.name('en'))
assert_true(pd.name('hr') == u'Javna domena')
| from nose.tools import assert_true
def test_find_sampling_selector():
from zope.interface import implementedBy
import cc.license
sampling_selector = cc.license.get_selector('recombo')()
return sampling_selector
def test_find_standard_selector():
from zope.interface import implementedBy
import cc.license
standard_selector = cc.license.get_selector('standard')()
return standard_selector
def test_find_sampling_licenses():
selector = test_find_sampling_selector()
lic = selector.by_code('sampling')
assert_true(not lic.libre)
assert_true(lic.deprecated)
lic = selector.by_code('sampling+')
assert_true(not lic.deprecated)
def test_find_pd():
from zope.interface import implementedBy
import cc.license
pd_selector = cc.license.get_selector('publicdomain')()
pd = pd_selector.by_code('publicdomain')
return pd
def test_pd():
pd = test_find_pd()
assert_true(pd.libre)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(not pd.deprecated)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(pd.license_code == 'publicdomain')
assert_true(pd.name() == 'Public Domain' == pd.name('en'))
assert_true(pd.name('hr') == u'Javna domena')
| Add a test for grabbing the standard selector | Add a test for grabbing the standard selector
| Python | mit | creativecommons/cc.license,creativecommons/cc.license | from nose.tools import assert_true
def test_find_sampling_selector():
from zope.interface import implementedBy
import cc.license
sampling_selector = cc.license.get_selector('recombo')()
return sampling_selector
+
+ def test_find_standard_selector():
+ from zope.interface import implementedBy
+ import cc.license
+
+ standard_selector = cc.license.get_selector('standard')()
+ return standard_selector
def test_find_sampling_licenses():
selector = test_find_sampling_selector()
lic = selector.by_code('sampling')
assert_true(not lic.libre)
assert_true(lic.deprecated)
lic = selector.by_code('sampling+')
assert_true(not lic.deprecated)
def test_find_pd():
from zope.interface import implementedBy
import cc.license
pd_selector = cc.license.get_selector('publicdomain')()
pd = pd_selector.by_code('publicdomain')
return pd
def test_pd():
pd = test_find_pd()
assert_true(pd.libre)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(not pd.deprecated)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(pd.license_code == 'publicdomain')
assert_true(pd.name() == 'Public Domain' == pd.name('en'))
assert_true(pd.name('hr') == u'Javna domena')
| Add a test for grabbing the standard selector | ## Code Before:
from nose.tools import assert_true
def test_find_sampling_selector():
from zope.interface import implementedBy
import cc.license
sampling_selector = cc.license.get_selector('recombo')()
return sampling_selector
def test_find_sampling_licenses():
selector = test_find_sampling_selector()
lic = selector.by_code('sampling')
assert_true(not lic.libre)
assert_true(lic.deprecated)
lic = selector.by_code('sampling+')
assert_true(not lic.deprecated)
def test_find_pd():
from zope.interface import implementedBy
import cc.license
pd_selector = cc.license.get_selector('publicdomain')()
pd = pd_selector.by_code('publicdomain')
return pd
def test_pd():
pd = test_find_pd()
assert_true(pd.libre)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(not pd.deprecated)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(pd.license_code == 'publicdomain')
assert_true(pd.name() == 'Public Domain' == pd.name('en'))
assert_true(pd.name('hr') == u'Javna domena')
## Instruction:
Add a test for grabbing the standard selector
## Code After:
from nose.tools import assert_true
def test_find_sampling_selector():
from zope.interface import implementedBy
import cc.license
sampling_selector = cc.license.get_selector('recombo')()
return sampling_selector
def test_find_standard_selector():
from zope.interface import implementedBy
import cc.license
standard_selector = cc.license.get_selector('standard')()
return standard_selector
def test_find_sampling_licenses():
selector = test_find_sampling_selector()
lic = selector.by_code('sampling')
assert_true(not lic.libre)
assert_true(lic.deprecated)
lic = selector.by_code('sampling+')
assert_true(not lic.deprecated)
def test_find_pd():
from zope.interface import implementedBy
import cc.license
pd_selector = cc.license.get_selector('publicdomain')()
pd = pd_selector.by_code('publicdomain')
return pd
def test_pd():
pd = test_find_pd()
assert_true(pd.libre)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(not pd.deprecated)
assert_true(pd.jurisdiction == 'Your mom')
assert_true(pd.license_code == 'publicdomain')
assert_true(pd.name() == 'Public Domain' == pd.name('en'))
assert_true(pd.name('hr') == u'Javna domena')
| // ... existing code ...
return sampling_selector
def test_find_standard_selector():
from zope.interface import implementedBy
import cc.license
standard_selector = cc.license.get_selector('standard')()
return standard_selector
// ... rest of the code ... |
12d525b79e78d8e183d75a2b81221f7d18519897 | tests/kernel_test.py | tests/kernel_test.py | from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
| from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
| Fix tests related to result collection | Fix tests related to result collection
| Python | mit | vdjagilev/desefu | from kernel.kernel import Kernel
from modules import AbstractModule
+ from kernel.config import Config
+ from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
- remove_files = glob.glob('abcetc_*.json')
+ config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
+ result = Result(config)
+ Kernel.result = result
- for rf in remove_files:
- os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
+ data = Kernel.result.result[0]
- files = glob.glob('abcetc_*.json')
-
- assert len(files) == 1
-
- with open(files[0], 'r') as file_data:
- data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
| Fix tests related to result collection | ## Code Before:
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
## Instruction:
Fix tests related to result collection
## Code After:
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
| // ... existing code ...
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
// ... modified code ...
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
...
data = Kernel.result.result[0]
// ... rest of the code ... |
7eac938f0a4726beb1eb01d32486dfeb0e57ff3a | h2o-hadoop/tests/python/pyunit_s3_import_export.py | h2o-hadoop/tests/python/pyunit_s3_import_export.py |
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
|
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
| Fix flaky Hadoop smoke tests - make sure the exported files are unique | Fix flaky Hadoop smoke tests - make sure the exported files are unique
| Python | apache-2.0 | h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,michalkurka/h2o-3 |
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
+ import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
+ unique_suffix = str(uuid.uuid4())
- s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
+ s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
+ timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
| Fix flaky Hadoop smoke tests - make sure the exported files are unique | ## Code Before:
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
## Instruction:
Fix flaky Hadoop smoke tests - make sure the exported files are unique
## Code After:
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
| ...
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
...
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.