commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b96ac3debb472dcf3aaac84f43309a4d01a27159 | exam/tests/test_dynamic_import.py | exam/tests/test_dynamic_import.py |
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
specific_exam = create_specific_exam('Biopsy')
specific_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
specific_exam = create_specific_exam('Necropsy')
specific_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
|
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
| Update variables names in exam tests | Update variables names in exam tests
| Python | mit | msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub |
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
- specific_exam = create_specific_exam('Biopsy')
+ biopsy_exam = create_specific_exam('Biopsy')
- specific_exam | should | be_kind_of(Biopsy)
+ biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
- specific_exam = create_specific_exam('Necropsy')
+ necropsy_exam = create_specific_exam('Necropsy')
- specific_exam | should | be_kind_of(Necropsy)
+ necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
| Update variables names in exam tests | ## Code Before:
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
specific_exam = create_specific_exam('Biopsy')
specific_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
specific_exam = create_specific_exam('Necropsy')
specific_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
## Instruction:
Update variables names in exam tests
## Code After:
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
| ...
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
...
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
... |
bde0363b51bfa7bb6facac1185c9a687ff952e36 | artifacts/exceptions.py | artifacts/exceptions.py |
from __future__ import print_function, division
class ArtifactsError(RuntimeError):
"""Base for all exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Root for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
|
from __future__ import print_function, division
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
| Add __all__ variable to enforce ordering in docs | Add __all__ variable to enforce ordering in docs
| Python | mit | smarter-travel-media/stac |
from __future__ import print_function, division
+ __all__ = [
+ 'ArtifactsError',
+ 'ArtifactoryApiError',
+ 'NoReleaseArtifactsError',
+ 'NoArtifactVersionsError'
+ ]
+
class ArtifactsError(RuntimeError):
- """Base for all exceptions raised by the Artifacts library"""
+ """Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
- """Root for errors interacting with the Artifactory REST API"""
+ """Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
| Add __all__ variable to enforce ordering in docs | ## Code Before:
from __future__ import print_function, division
class ArtifactsError(RuntimeError):
"""Base for all exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Root for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
## Instruction:
Add __all__ variable to enforce ordering in docs
## Code After:
from __future__ import print_function, division
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
| ...
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
...
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
...
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
... |
9b53673771b8b185232cffad129036bbe084a169 | api.py | api.py | from tastypie.authorization import Authorization
from tastypie.authentication import BasicAuthentication
from tastypie.fields import ForeignKey
from tastypie.resources import ModelResource
from .models import APNSDevice, GCMDevice
class APNSDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = APNSDevice.objects.all()
resource_name = "device/apns"
class GCMDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = GCMDevice.objects.all()
resource_name = "device/gcm"
class APNSDeviceAuthenticatedResource(APNSDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(APNSDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
bundle.data["user_id"] = bundle.request.user.id
return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, **kwargs)
class GCMDeviceAuthenticatedResource(GCMDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(GCMDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
bundle.data["user_id"] = bundle.request.user.id
return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, **kwargs)
| from tastypie.authorization import Authorization
from tastypie.authentication import BasicAuthentication
from tastypie.fields import ForeignKey
from tastypie.resources import ModelResource
from .models import APNSDevice, GCMDevice
class APNSDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = APNSDevice.objects.all()
resource_name = "device/apns"
class GCMDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = GCMDevice.objects.all()
resource_name = "device/gcm"
class APNSDeviceAuthenticatedResource(APNSDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(APNSDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
class GCMDeviceAuthenticatedResource(GCMDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(GCMDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
| Fix device-user linking in authenticated resources | Fix device-user linking in authenticated resources
| Python | mit | Ian-Foote/django-push-notifications,matthewh/django-push-notifications,Ubiwhere/django-push-notifications,freakboy3742/django-push-notifications,gkirkpatrick/django-push-notifications,hylje/django-push-notifications,AndreasBackx/django-push-notifications,cristiano2lopes/django-push-notifications,jleclanche/django-push-notifications,1vank1n/django-push-notifications,leonmu/django-push-notifications,avichalp/django-push-notifications,Dubrzr/django-push-notifications,Tictrac/django-push-notifications,rmoorman/django-push-notifications,leonsas/django-push-notifications,omritoptix/django-ltg-skeleton,lneoe/django-push-notifications,lukeburden/django-push-notifications,rsalmaso/django-push-notifications,ajatamayo/django-push-notifications,vuchau/django-push-notifications,fsto/django-push-notifications,nnseva/django-push-notifications,IvoPintodaSilva/django-push-notifications,CustomerSupport/django-push-notifications,dilvane/django-push-notifications,Adys/django-push-notifications,gio82/django-push-notifications,omritoptix/django-ltg-skeleton,giserh/django-push-notifications,shigmas/django-push-notifications,vikcena01/django-push-notification,jamaalscarlett/django-push-notifications,azevakin/django-push-notifications,apokinsocha/django-push-notifications,GaleDragon/django-push-notifications | from tastypie.authorization import Authorization
from tastypie.authentication import BasicAuthentication
from tastypie.fields import ForeignKey
from tastypie.resources import ModelResource
from .models import APNSDevice, GCMDevice
class APNSDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = APNSDevice.objects.all()
resource_name = "device/apns"
class GCMDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = GCMDevice.objects.all()
resource_name = "device/gcm"
class APNSDeviceAuthenticatedResource(APNSDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(APNSDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
- bundle.data["user_id"] = bundle.request.user.id
+ # See https://github.com/toastdriven/django-tastypie/issues/854
- return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, **kwargs)
+ return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
class GCMDeviceAuthenticatedResource(GCMDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(GCMDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
- bundle.data["user_id"] = bundle.request.user.id
+ # See https://github.com/toastdriven/django-tastypie/issues/854
- return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, **kwargs)
+ return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
| Fix device-user linking in authenticated resources | ## Code Before:
from tastypie.authorization import Authorization
from tastypie.authentication import BasicAuthentication
from tastypie.fields import ForeignKey
from tastypie.resources import ModelResource
from .models import APNSDevice, GCMDevice
class APNSDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = APNSDevice.objects.all()
resource_name = "device/apns"
class GCMDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = GCMDevice.objects.all()
resource_name = "device/gcm"
class APNSDeviceAuthenticatedResource(APNSDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(APNSDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
bundle.data["user_id"] = bundle.request.user.id
return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, **kwargs)
class GCMDeviceAuthenticatedResource(GCMDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(GCMDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
bundle.data["user_id"] = bundle.request.user.id
return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, **kwargs)
## Instruction:
Fix device-user linking in authenticated resources
## Code After:
from tastypie.authorization import Authorization
from tastypie.authentication import BasicAuthentication
from tastypie.fields import ForeignKey
from tastypie.resources import ModelResource
from .models import APNSDevice, GCMDevice
class APNSDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = APNSDevice.objects.all()
resource_name = "device/apns"
class GCMDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = GCMDevice.objects.all()
resource_name = "device/gcm"
class APNSDeviceAuthenticatedResource(APNSDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(APNSDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
class GCMDeviceAuthenticatedResource(GCMDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(GCMDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
| ...
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
...
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
... |
3f178359b8649b6b92900ae790e894971405b720 | main.py | main.py | from src import create
from src import count
from src import thefile
from src import execute
def main(x, y, file):
#Create it
seats = create.new_2d(x, y)
#Count it
counted_start = count.count_array(x, y, seats)
print(counted_start)
#Get the commands
commands = thefile.get_cmmds(file)
#The execution
for line in commands:
seats = execute.execute_cmmds(seats, line)
counted_after = count.count_array(x, y, seats)
counter_occupied = 1000000 - counted_after
return counter_occupied
results = main(1000, 1000, 'inputfile.txt')
print("Cool") | from src import create
from src import count
from src import thefile
from src import execute
def main(x, y, file):
#Create it
seats = create.new_2d(x, y)
#Count it
counted_start = count.count_array(x, y, seats)
print(counted_start)
#Get the commands
commands = thefile.get_cmmds(file)
#The execution
for line in commands:
seats = execute.execute_cmmds(seats, line)
counted_after = count.count_array(x, y, seats)
counter_occupied = 1000000 - counted_after
return counter_occupied
results = main(1000, 1000, 'inputfile.txt')
print("Cool")
print("Other") | CLEAN TEMPLATE Clean up the project template further still | CLEAN TEMPLATE Clean up the project template further still
| Python | bsd-2-clause | kevindiltinero/seass3 | from src import create
from src import count
from src import thefile
from src import execute
def main(x, y, file):
#Create it
seats = create.new_2d(x, y)
#Count it
counted_start = count.count_array(x, y, seats)
print(counted_start)
#Get the commands
commands = thefile.get_cmmds(file)
#The execution
for line in commands:
seats = execute.execute_cmmds(seats, line)
counted_after = count.count_array(x, y, seats)
counter_occupied = 1000000 - counted_after
return counter_occupied
results = main(1000, 1000, 'inputfile.txt')
print("Cool")
+ print("Other") | CLEAN TEMPLATE Clean up the project template further still | ## Code Before:
from src import create
from src import count
from src import thefile
from src import execute
def main(x, y, file):
#Create it
seats = create.new_2d(x, y)
#Count it
counted_start = count.count_array(x, y, seats)
print(counted_start)
#Get the commands
commands = thefile.get_cmmds(file)
#The execution
for line in commands:
seats = execute.execute_cmmds(seats, line)
counted_after = count.count_array(x, y, seats)
counter_occupied = 1000000 - counted_after
return counter_occupied
results = main(1000, 1000, 'inputfile.txt')
print("Cool")
## Instruction:
CLEAN TEMPLATE Clean up the project template further still
## Code After:
from src import create
from src import count
from src import thefile
from src import execute
def main(x, y, file):
#Create it
seats = create.new_2d(x, y)
#Count it
counted_start = count.count_array(x, y, seats)
print(counted_start)
#Get the commands
commands = thefile.get_cmmds(file)
#The execution
for line in commands:
seats = execute.execute_cmmds(seats, line)
counted_after = count.count_array(x, y, seats)
counter_occupied = 1000000 - counted_after
return counter_occupied
results = main(1000, 1000, 'inputfile.txt')
print("Cool")
print("Other") | ...
print("Cool")
print("Other")
... |
a326f2daad6817f426099518da77bc241fd9b51e | bibpy/doi/__init__.py | bibpy/doi/__init__.py |
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def download(doi, source='http://dx.doi.org/{0}', raw=False):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
return contents if raw else bibpy.read_string(contents).entries[0]
finally:
handle.close()
|
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
| Rename doi function, add keyword options | Rename doi function, add keyword options
| Python | mit | MisanthropicBit/bibpy,MisanthropicBit/bibpy |
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
- def download(doi, source='http://dx.doi.org/{0}', raw=False):
+ def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
- return contents if raw else bibpy.read_string(contents).entries[0]
+ if raw:
+ return contents
+ else:
+ return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
| Rename doi function, add keyword options | ## Code Before:
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def download(doi, source='http://dx.doi.org/{0}', raw=False):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
return contents if raw else bibpy.read_string(contents).entries[0]
finally:
handle.close()
## Instruction:
Rename doi function, add keyword options
## Code After:
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
| // ... existing code ...
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
// ... modified code ...
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
// ... rest of the code ... |
6136fc2bd2d9d191df7a9e6afd3aa9e4f110d61e | numpy/core/tests/test_print.py | numpy/core/tests/test_print.py | import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
| import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
| Use parametric tests for format tests so that it is clearer which type is failing. | Use parametric tests for format tests so that it is clearer which type is failing.
| Python | bsd-3-clause | solarjoe/numpy,NextThought/pypy-numpy,musically-ut/numpy,trankmichael/numpy,ViralLeadership/numpy,b-carter/numpy,argriffing/numpy,ogrisel/numpy,mhvk/numpy,mingwpy/numpy,b-carter/numpy,ewmoore/numpy,jakirkham/numpy,ahaldane/numpy,KaelChen/numpy,mhvk/numpy,utke1/numpy,ogrisel/numpy,skymanaditya1/numpy,rmcgibbo/numpy,embray/numpy,immerrr/numpy,ekalosak/numpy,skymanaditya1/numpy,ssanderson/numpy,pelson/numpy,abalkin/numpy,GaZ3ll3/numpy,rhythmsosad/numpy,kiwifb/numpy,numpy/numpy-refactor,nguyentu1602/numpy,maniteja123/numpy,BMJHayward/numpy,empeeu/numpy,has2k1/numpy,dwillmer/numpy,githubmlai/numpy,pyparallel/numpy,BMJHayward/numpy,seberg/numpy,Srisai85/numpy,AustereCuriosity/numpy,dwf/numpy,GrimDerp/numpy,numpy/numpy,charris/numpy,abalkin/numpy,Anwesh43/numpy,ViralLeadership/numpy,dato-code/numpy,rgommers/numpy,pizzathief/numpy,ChanderG/numpy,madphysicist/numpy,dwf/numpy,brandon-rhodes/numpy,gfyoung/numpy,groutr/numpy,numpy/numpy-refactor,CMartelLML/numpy,jonathanunderwood/numpy,rajathkumarmp/numpy,cowlicks/numpy,rhythmsosad/numpy,joferkington/numpy,joferkington/numpy,BabeNovelty/numpy,utke1/numpy,Yusa95/numpy,GrimDerp/numpy,astrofrog/numpy,charris/numpy,CMartelLML/numpy,GaZ3ll3/numpy,cjermain/numpy,ajdawson/numpy,MSeifert04/numpy,MaPePeR/numpy,has2k1/numpy,andsor/numpy,githubmlai/numpy,larsmans/numpy,matthew-brett/numpy,mindw/numpy,Dapid/numpy,nguyentu1602/numpy,gmcastil/numpy,abalkin/numpy,cjermain/numpy,rajathkumarmp/numpy,Anwesh43/numpy,mortada/numpy,sigma-random/numpy,numpy/numpy,chiffa/numpy,Dapid/numpy,mathdd/numpy,hainm/numpy,jschueller/numpy,ogrisel/numpy,brandon-rhodes/numpy,ewmoore/numpy,dwillmer/numpy,ekalosak/numpy,tacaswell/numpy,numpy/numpy-refactor,mwiebe/numpy,bertrand-l/numpy,jakirkham/numpy,endolith/numpy,embray/numpy,solarjoe/numpy,mattip/numpy,MaPePeR/numpy,mwiebe/numpy,AustereCuriosity/numpy,ESSS/numpy,matthew-brett/numpy,argriffing/numpy,nbeaver/numpy,dimasad/numpy,drasmuss/numpy,rudimeier/numpy,Eric89GXL/numpy,naritta/numpy,chatcannon/numpy,Dapid/numpy,jakirkham/numpy,AustereCuriosity/numpy,ESSS/numpy,tdsmith/numpy,rmcgibbo/numpy,pizzathief/numpy,rherault-insa/numpy,embray/numpy,seberg/numpy,rudimeier/numpy,mathdd/numpy,SunghanKim/numpy,MaPePeR/numpy,ddasilva/numpy,naritta/numpy,GrimDerp/numpy,nbeaver/numpy,numpy/numpy,drasmuss/numpy,kiwifb/numpy,mwiebe/numpy,bertrand-l/numpy,ogrisel/numpy,ahaldane/numpy,mortada/numpy,numpy/numpy-refactor,andsor/numpy,astrofrog/numpy,skwbc/numpy,MaPePeR/numpy,githubmlai/numpy,andsor/numpy,empeeu/numpy,dwillmer/numpy,CMartelLML/numpy,Srisai85/numpy,dimasad/numpy,madphysicist/numpy,cjermain/numpy,shoyer/numpy,ewmoore/numpy,drasmuss/numpy,bringingheavendown/numpy,dato-code/numpy,NextThought/pypy-numpy,Anwesh43/numpy,bmorris3/numpy,andsor/numpy,felipebetancur/numpy,mindw/numpy,mhvk/numpy,GaZ3ll3/numpy,skwbc/numpy,BMJHayward/numpy,jschueller/numpy,musically-ut/numpy,charris/numpy,matthew-brett/numpy,pelson/numpy,joferkington/numpy,seberg/numpy,jankoslavic/numpy,WillieMaddox/numpy,grlee77/numpy,Linkid/numpy,KaelChen/numpy,yiakwy/numpy,ChristopherHogan/numpy,MichaelAquilina/numpy,sigma-random/numpy,chatcannon/numpy,sinhrks/numpy,grlee77/numpy,dwillmer/numpy,SiccarPoint/numpy,gfyoung/numpy,pbrod/numpy,simongibbons/numpy,jorisvandenbossche/numpy,MSeifert04/numpy,ViralLeadership/numpy,yiakwy/numpy,njase/numpy,yiakwy/numpy,jorisvandenbossche/numpy,leifdenby/numpy,rhythmsosad/numpy,simongibbons/numpy,trankmichael/numpy,charris/numpy,brandon-rhodes/numpy,tdsmith/numpy,seberg/numpy,pdebuyl/numpy,numpy/numpy-refactor,rudimeier/numpy,anntzer/numpy,jorisvandenbossche/numpy,rajathkumarmp/numpy,musically-ut/numpy,grlee77/numpy,MSeifert04/numpy,bertrand-l/numpy,pyparallel/numpy,ajdawson/numpy,SunghanKim/numpy,KaelChen/numpy,jakirkham/numpy,chatcannon/numpy,kiwifb/numpy,pelson/numpy,Linkid/numpy,simongibbons/numpy,NextThought/pypy-numpy,madphysicist/numpy,MSeifert04/numpy,behzadnouri/numpy,shoyer/numpy,rherault-insa/numpy,WarrenWeckesser/numpy,ContinuumIO/numpy,embray/numpy,pdebuyl/numpy,kirillzhuravlev/numpy,utke1/numpy,larsmans/numpy,GrimDerp/numpy,immerrr/numpy,stefanv/numpy,dwf/numpy,sonnyhu/numpy,ewmoore/numpy,mhvk/numpy,ChanderG/numpy,hainm/numpy,SiccarPoint/numpy,jankoslavic/numpy,behzadnouri/numpy,musically-ut/numpy,mortada/numpy,pbrod/numpy,hainm/numpy,Yusa95/numpy,pizzathief/numpy,embray/numpy,leifdenby/numpy,stuarteberg/numpy,groutr/numpy,sigma-random/numpy,jankoslavic/numpy,solarjoe/numpy,moreati/numpy,pelson/numpy,immerrr/numpy,dimasad/numpy,endolith/numpy,hainm/numpy,larsmans/numpy,tynn/numpy,SunghanKim/numpy,trankmichael/numpy,nbeaver/numpy,pbrod/numpy,ChristopherHogan/numpy,ajdawson/numpy,sigma-random/numpy,dch312/numpy,tdsmith/numpy,stuarteberg/numpy,brandon-rhodes/numpy,has2k1/numpy,MichaelAquilina/numpy,mhvk/numpy,mingwpy/numpy,pelson/numpy,tynn/numpy,rgommers/numpy,felipebetancur/numpy,dato-code/numpy,rmcgibbo/numpy,WarrenWeckesser/numpy,WarrenWeckesser/numpy,rherault-insa/numpy,bringingheavendown/numpy,simongibbons/numpy,jakirkham/numpy,jorisvandenbossche/numpy,dwf/numpy,anntzer/numpy,jankoslavic/numpy,Yusa95/numpy,ajdawson/numpy,sinhrks/numpy,ogrisel/numpy,ssanderson/numpy,naritta/numpy,madphysicist/numpy,moreati/numpy,WarrenWeckesser/numpy,SunghanKim/numpy,numpy/numpy,jschueller/numpy,kirillzhuravlev/numpy,WillieMaddox/numpy,ssanderson/numpy,Eric89GXL/numpy,sinhrks/numpy,stuarteberg/numpy,SiccarPoint/numpy,endolith/numpy,tynn/numpy,maniteja123/numpy,BabeNovelty/numpy,skwbc/numpy,BabeNovelty/numpy,SiccarPoint/numpy,rudimeier/numpy,WarrenWeckesser/numpy,NextThought/pypy-numpy,trankmichael/numpy,dwf/numpy,sonnyhu/numpy,dch312/numpy,tacaswell/numpy,pbrod/numpy,ESSS/numpy,empeeu/numpy,larsmans/numpy,MSeifert04/numpy,maniteja123/numpy,bmorris3/numpy,gfyoung/numpy,mattip/numpy,jorisvandenbossche/numpy,ekalosak/numpy,moreati/numpy,pdebuyl/numpy,MichaelAquilina/numpy,kirillzhuravlev/numpy,ddasilva/numpy,Eric89GXL/numpy,matthew-brett/numpy,mortada/numpy,mathdd/numpy,empeeu/numpy,felipebetancur/numpy,Anwesh43/numpy,pdebuyl/numpy,ewmoore/numpy,bringingheavendown/numpy,behzadnouri/numpy,dato-code/numpy,astrofrog/numpy,Linkid/numpy,gmcastil/numpy,rhythmsosad/numpy,chiffa/numpy,endolith/numpy,stuarteberg/numpy,groutr/numpy,Linkid/numpy,GaZ3ll3/numpy,tacaswell/numpy,rajathkumarmp/numpy,jonathanunderwood/numpy,mindw/numpy,simongibbons/numpy,kirillzhuravlev/numpy,cowlicks/numpy,nguyentu1602/numpy,anntzer/numpy,njase/numpy,chiffa/numpy,pizzathief/numpy,shoyer/numpy,Srisai85/numpy,CMartelLML/numpy,githubmlai/numpy,WillieMaddox/numpy,ContinuumIO/numpy,ahaldane/numpy,mingwpy/numpy,madphysicist/numpy,naritta/numpy,sinhrks/numpy,KaelChen/numpy,stefanv/numpy,felipebetancur/numpy,yiakwy/numpy,cowlicks/numpy,MichaelAquilina/numpy,mindw/numpy,gmcastil/numpy,bmorris3/numpy,dch312/numpy,ahaldane/numpy,Yusa95/numpy,jonathanunderwood/numpy,grlee77/numpy,mathdd/numpy,sonnyhu/numpy,rgommers/numpy,rmcgibbo/numpy,tdsmith/numpy,astrofrog/numpy,sonnyhu/numpy,Srisai85/numpy,ahaldane/numpy,mattip/numpy,stefanv/numpy,shoyer/numpy,ChristopherHogan/numpy,ChanderG/numpy,rgommers/numpy,bmorris3/numpy,argriffing/numpy,mingwpy/numpy,pbrod/numpy,cjermain/numpy,pyparallel/numpy,anntzer/numpy,has2k1/numpy,b-carter/numpy,BMJHayward/numpy,stefanv/numpy,cowlicks/numpy,ChanderG/numpy,joferkington/numpy,skymanaditya1/numpy,dch312/numpy,BabeNovelty/numpy,matthew-brett/numpy,ddasilva/numpy,astrofrog/numpy,immerrr/numpy,jschueller/numpy,shoyer/numpy,grlee77/numpy,leifdenby/numpy,Eric89GXL/numpy,ChristopherHogan/numpy,ekalosak/numpy,stefanv/numpy,skymanaditya1/numpy,mattip/numpy,dimasad/numpy,ContinuumIO/numpy,njase/numpy,pizzathief/numpy,nguyentu1602/numpy | import numpy as np
from numpy.testing import *
- class TestPrint(TestCase):
+ def check_float_type(tp):
+ for x in [0, 1,-1, 1e10, 1e20] :
+ assert_equal(str(tp(x)), str(float(x)))
- def test_float_types(self) :
+ def test_float_types():
- """ Check formatting.
+ """ Check formatting.
- This is only for the str function, and only for simple types.
+ This is only for the str function, and only for simple types.
- The precision of np.float and np.longdouble aren't the same as the
+ The precision of np.float and np.longdouble aren't the same as the
- python float precision.
+ python float precision.
- """
+ """
- for t in [np.float, np.double, np.longdouble] :
+ for t in [np.float, np.double, np.longdouble] :
+ yield check_float_type, t
- for x in [0, 1,-1, 1e10, 1e20] :
- assert_equal(str(t(x)), str(float(x)))
- def test_complex_types(self) :
- """Check formatting.
+ def check_complex_type(tp):
+ for x in [0, 1,-1, 1e10, 1e20] :
+ assert_equal(str(tp(x)), str(complex(x)))
+ assert_equal(str(tp(x*1j)), str(complex(x*1j)))
+ assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
+ def test_complex_types():
+ """Check formatting.
- This is only for the str function, and only for simple types.
- The precision of np.float and np.longdouble aren't the same as the
- python float precision.
+ This is only for the str function, and only for simple types.
+ The precision of np.float and np.longdouble aren't the same as the
+ python float precision.
- """
- for t in [np.cfloat, np.cdouble, np.clongdouble] :
- for x in [0, 1,-1, 1e10, 1e20] :
- assert_equal(str(t(x)), str(complex(x)))
- assert_equal(str(t(x*1j)), str(complex(x*1j)))
- assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
+ """
+ for t in [np.cfloat, np.cdouble, np.clongdouble] :
+ yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
| Use parametric tests for format tests so that it is clearer which type is failing. | ## Code Before:
import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
## Instruction:
Use parametric tests for format tests so that it is clearer which type is failing.
## Code After:
import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
| // ... existing code ...
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
// ... rest of the code ... |
cc80f90a4f003c0967c31d5177971061350ee683 | pycall/call.py | pycall/call.py | """A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = int(wait_time)
self.max_retries = int(max_retries)
| """A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = wait_time
self.max_retries = max_retries
| Revert "Forcing type coersion for int params." | Revert "Forcing type coersion for int params."
This is a pointless bit of code. Since we lazy-evaluate them anyhow, it's a
duplicate effort.
This reverts commit 1ca6b96d492f8f33ac3b3a520937378effb66744.
| Python | unlicense | rdegges/pycall | """A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
- self.wait_time = int(wait_time)
+ self.wait_time = wait_time
- self.max_retries = int(max_retries)
+ self.max_retries = max_retries
| Revert "Forcing type coersion for int params." | ## Code Before:
"""A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = int(wait_time)
self.max_retries = int(max_retries)
## Instruction:
Revert "Forcing type coersion for int params."
## Code After:
"""A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = wait_time
self.max_retries = max_retries
| ...
self.account = account
self.wait_time = wait_time
self.max_retries = max_retries
... |
5748b1a7dc4a5be3b2b9da9959eabe586347078a | tensorflow_federated/python/program/value_reference.py | tensorflow_federated/python/program/value_reference.py | """Defines the abstract interface for classes that reference values."""
import abc
from typing import Any
from tensorflow_federated.python.core.impl.types import typed_object
class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface for classes that reference values.
This interfaces provides the capability to maniplutate values without
requiring them to be materialized as Python objects.
"""
@abc.abstractmethod
def get_value(self) -> Any:
pass
|
import abc
from typing import Union
import numpy as np
from tensorflow_federated.python.core.impl.types import typed_object
class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface representing references to server placed values."""
@abc.abstractmethod
def get_value(self) -> Union[np.generic, np.ndarray]:
"""Returns the referenced value as a numpy scalar or array."""
raise NotImplementedError
| Update the Value Reference API to be more precise about the types of values being referenced. | Update the Value Reference API to be more precise about the types of values being referenced.
PiperOrigin-RevId: 404647934
| Python | apache-2.0 | tensorflow/federated,tensorflow/federated,tensorflow/federated | - """Defines the abstract interface for classes that reference values."""
import abc
- from typing import Any
+ from typing import Union
+
+ import numpy as np
from tensorflow_federated.python.core.impl.types import typed_object
- class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
+ class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
+ """An abstract interface representing references to server placed values."""
- """An abstract interface for classes that reference values.
-
- This interfaces provides the capability to maniplutate values without
- requiring them to be materialized as Python objects.
- """
@abc.abstractmethod
- def get_value(self) -> Any:
- pass
+ def get_value(self) -> Union[np.generic, np.ndarray]:
+ """Returns the referenced value as a numpy scalar or array."""
+ raise NotImplementedError
| Update the Value Reference API to be more precise about the types of values being referenced. | ## Code Before:
"""Defines the abstract interface for classes that reference values."""
import abc
from typing import Any
from tensorflow_federated.python.core.impl.types import typed_object
class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface for classes that reference values.
This interfaces provides the capability to maniplutate values without
requiring them to be materialized as Python objects.
"""
@abc.abstractmethod
def get_value(self) -> Any:
pass
## Instruction:
Update the Value Reference API to be more precise about the types of values being referenced.
## Code After:
import abc
from typing import Union
import numpy as np
from tensorflow_federated.python.core.impl.types import typed_object
class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface representing references to server placed values."""
@abc.abstractmethod
def get_value(self) -> Union[np.generic, np.ndarray]:
"""Returns the referenced value as a numpy scalar or array."""
raise NotImplementedError
| ...
...
import abc
from typing import Union
import numpy as np
...
class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface representing references to server placed values."""
...
@abc.abstractmethod
def get_value(self) -> Union[np.generic, np.ndarray]:
"""Returns the referenced value as a numpy scalar or array."""
raise NotImplementedError
... |
59761e83b240fe7573370f542ea6e877c5850907 | setup.py | setup.py |
from distutils.core import setup, Extension
uinput = Extension('libuinput',
sources = ['src/uinput.c'])
setup(name='python-steamcontroller',
version='1.0',
description='Steam Controller userland driver',
author='Stany MARCEL',
author_email='[email protected]',
url='https://github.com/ynsta/steamcontroller',
package_dir={'steamcontroller': 'src'},
packages=['steamcontroller'],
scripts=['scripts/sc-dump.py',
'scripts/sc-xbox.py'],
license='MIT',
platforms=['Linux'],
ext_modules=[uinput, ])
|
from distutils.core import setup, Extension
uinput = Extension('libuinput',
sources = ['src/uinput.c'])
setup(name='python-steamcontroller',
version='1.0',
description='Steam Controller userland driver',
author='Stany MARCEL',
author_email='[email protected]',
url='https://github.com/ynsta/steamcontroller',
package_dir={'steamcontroller': 'src'},
packages=['steamcontroller'],
scripts=['scripts/sc-dump.py',
'scripts/sc-xbox.py',
'scripts/vdf2json.py',
'scripts/json2vdf.py'],
license='MIT',
platforms=['Linux'],
ext_modules=[uinput, ])
| Add json to from vdf scripts | Add json to from vdf scripts
Signed-off-by: Stany MARCEL <[email protected]>
| Python | mit | ynsta/steamcontroller,oneru/steamcontroller,oneru/steamcontroller,ynsta/steamcontroller |
from distutils.core import setup, Extension
uinput = Extension('libuinput',
sources = ['src/uinput.c'])
setup(name='python-steamcontroller',
version='1.0',
description='Steam Controller userland driver',
author='Stany MARCEL',
author_email='[email protected]',
url='https://github.com/ynsta/steamcontroller',
package_dir={'steamcontroller': 'src'},
packages=['steamcontroller'],
scripts=['scripts/sc-dump.py',
- 'scripts/sc-xbox.py'],
+ 'scripts/sc-xbox.py',
+ 'scripts/vdf2json.py',
+ 'scripts/json2vdf.py'],
license='MIT',
platforms=['Linux'],
ext_modules=[uinput, ])
| Add json to from vdf scripts | ## Code Before:
from distutils.core import setup, Extension
uinput = Extension('libuinput',
sources = ['src/uinput.c'])
setup(name='python-steamcontroller',
version='1.0',
description='Steam Controller userland driver',
author='Stany MARCEL',
author_email='[email protected]',
url='https://github.com/ynsta/steamcontroller',
package_dir={'steamcontroller': 'src'},
packages=['steamcontroller'],
scripts=['scripts/sc-dump.py',
'scripts/sc-xbox.py'],
license='MIT',
platforms=['Linux'],
ext_modules=[uinput, ])
## Instruction:
Add json to from vdf scripts
## Code After:
from distutils.core import setup, Extension
uinput = Extension('libuinput',
sources = ['src/uinput.c'])
setup(name='python-steamcontroller',
version='1.0',
description='Steam Controller userland driver',
author='Stany MARCEL',
author_email='[email protected]',
url='https://github.com/ynsta/steamcontroller',
package_dir={'steamcontroller': 'src'},
packages=['steamcontroller'],
scripts=['scripts/sc-dump.py',
'scripts/sc-xbox.py',
'scripts/vdf2json.py',
'scripts/json2vdf.py'],
license='MIT',
platforms=['Linux'],
ext_modules=[uinput, ])
| // ... existing code ...
scripts=['scripts/sc-dump.py',
'scripts/sc-xbox.py',
'scripts/vdf2json.py',
'scripts/json2vdf.py'],
license='MIT',
// ... rest of the code ... |
ec96669641c9b753c3ce74ce432213a17b0403fe | tests/aggregate_tests.py | tests/aggregate_tests.py |
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
|
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
| Copy and call in-toto's check_usable_gpg function | Copy and call in-toto's check_usable_gpg function
Set environment variable in test aggregate script that may be
used to skip tests if gpg is not available on the test system.
| Python | mit | secure-systems-lab/securesystemslib,secure-systems-lab/securesystemslib |
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
+ import os
import sys
import unittest
+ import subprocess
+
+ def check_usable_gpg():
+ """Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
+ available. """
+ os.environ["TEST_SKIP_GPG"] = "1"
+ for gpg in ["gpg2", "gpg"]:
+ try:
+ subprocess.check_call([gpg, "--version"])
+
+ except OSError:
+ pass
+
+ else:
+ # If one of the two exists, we can unset the skip envvar and ...
+ os.environ.pop("TEST_SKIP_GPG", None)
+ # ... abort the availability check.:
+ break
if __name__ == '__main__':
+ check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
| Copy and call in-toto's check_usable_gpg function | ## Code Before:
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
## Instruction:
Copy and call in-toto's check_usable_gpg function
## Code After:
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
| ...
import os
import sys
...
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
...
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
... |
d301cbeb4e6f248ed137a9d1a6b6f39558231cc3 | tests/functional/test_vcs_mercurial.py | tests/functional/test_vcs_mercurial.py | from pip._internal.vcs.mercurial import Mercurial
from tests.lib import _create_test_package
def test_get_repository_root(script):
version_pkg_path = _create_test_package(script, vcs="hg")
tests_path = version_pkg_path.joinpath("tests")
tests_path.mkdir()
root1 = Mercurial.get_repository_root(version_pkg_path)
assert root1 == version_pkg_path
root2 = Mercurial.get_repository_root(version_pkg_path.joinpath("tests"))
assert root2 == version_pkg_path
| from pip._internal.vcs.mercurial import Mercurial
from tests.lib import _create_test_package, need_mercurial
@need_mercurial
def test_get_repository_root(script):
version_pkg_path = _create_test_package(script, vcs="hg")
tests_path = version_pkg_path.joinpath("tests")
tests_path.mkdir()
root1 = Mercurial.get_repository_root(version_pkg_path)
assert root1 == version_pkg_path
root2 = Mercurial.get_repository_root(version_pkg_path.joinpath("tests"))
assert root2 == version_pkg_path
| Add marker to Mercurial test | Add marker to Mercurial test
| Python | mit | pradyunsg/pip,pfmoore/pip,pradyunsg/pip,pypa/pip,pfmoore/pip,pypa/pip,sbidoul/pip,sbidoul/pip | from pip._internal.vcs.mercurial import Mercurial
- from tests.lib import _create_test_package
+ from tests.lib import _create_test_package, need_mercurial
+ @need_mercurial
def test_get_repository_root(script):
version_pkg_path = _create_test_package(script, vcs="hg")
tests_path = version_pkg_path.joinpath("tests")
tests_path.mkdir()
root1 = Mercurial.get_repository_root(version_pkg_path)
assert root1 == version_pkg_path
root2 = Mercurial.get_repository_root(version_pkg_path.joinpath("tests"))
assert root2 == version_pkg_path
| Add marker to Mercurial test | ## Code Before:
from pip._internal.vcs.mercurial import Mercurial
from tests.lib import _create_test_package
def test_get_repository_root(script):
version_pkg_path = _create_test_package(script, vcs="hg")
tests_path = version_pkg_path.joinpath("tests")
tests_path.mkdir()
root1 = Mercurial.get_repository_root(version_pkg_path)
assert root1 == version_pkg_path
root2 = Mercurial.get_repository_root(version_pkg_path.joinpath("tests"))
assert root2 == version_pkg_path
## Instruction:
Add marker to Mercurial test
## Code After:
from pip._internal.vcs.mercurial import Mercurial
from tests.lib import _create_test_package, need_mercurial
@need_mercurial
def test_get_repository_root(script):
version_pkg_path = _create_test_package(script, vcs="hg")
tests_path = version_pkg_path.joinpath("tests")
tests_path.mkdir()
root1 = Mercurial.get_repository_root(version_pkg_path)
assert root1 == version_pkg_path
root2 = Mercurial.get_repository_root(version_pkg_path.joinpath("tests"))
assert root2 == version_pkg_path
| // ... existing code ...
from pip._internal.vcs.mercurial import Mercurial
from tests.lib import _create_test_package, need_mercurial
// ... modified code ...
@need_mercurial
def test_get_repository_root(script):
// ... rest of the code ... |
3dd205a9dad39abb12e7a05c178117545402c2e1 | reinforcement-learning/train.py | reinforcement-learning/train.py | """This is the agent which currently takes the action with proper q learning."""
import time
start = time.time()
from tqdm import tqdm
import env
import os
import rl
env.make("text")
episodes = 10000
import argparse
parser = argparse.ArgumentParser(description="Train agent on the falling game.")
parser.add_argument("--remove-file", help="Remove existing q table.", default=True)
parser.add_argument("--episodes", type=str, help="Number of episodes to train for.", default=10000)
args = parser.parse_args()
if args.remove_file == True:
os.remove("q-table.npy")
rl.load_q()
elif args.remove_file == "False":
rl.load_q()
else:
print("Invalid argument.")
quit()
episodes = int(args.episodes)
with tqdm(total=episodes) as pbar:
for episode in range(episodes):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
pbar.update(1)
break
action = rl.choose_action(rl.table[env.object[0]])
rl.q(env.player, action)
episode_reward += env.reward(action)
env.action(action)
env.update()
rl.save_q()
print("Q table:")
print(rl.table[env.object[0]])
| """This is the agent which currently takes the action with proper q learning."""
import time
start = time.time()
from tqdm import tqdm
import env
import os
import rl
env.make("text")
episodes = 10000
import argparse
parser = argparse.ArgumentParser(description="Train agent on the falling game.")
parser.add_argument("--remove-file", help="Remove existing q table.", default=True)
parser.add_argument("--episodes", type=str, help="Number of episodes to train for.", default=10000)
args = parser.parse_args()
if args.remove_file == True:
os.remove("q-table.npy")
rl.load_q()
elif args.remove_file == "False":
rl.load_q()
else:
print("Invalid argument.")
quit()
episodes = int(args.episodes)
with tqdm(total=episodes) as pbar:
for episode in range(episodes):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
pbar.update(1)
break
action = rl.choose_action(env.player, "train")
rl.q(env.player, action)
episode_reward += env.reward(action)
env.action(action)
env.update()
rl.save_q()
print("Q table:")
print(rl.table[env.object[0]])
| Update to newest version of rl.py. | Update to newest version of rl.py.
| Python | mit | danieloconell/Louis | """This is the agent which currently takes the action with proper q learning."""
import time
start = time.time()
from tqdm import tqdm
import env
import os
import rl
env.make("text")
episodes = 10000
import argparse
parser = argparse.ArgumentParser(description="Train agent on the falling game.")
parser.add_argument("--remove-file", help="Remove existing q table.", default=True)
parser.add_argument("--episodes", type=str, help="Number of episodes to train for.", default=10000)
args = parser.parse_args()
if args.remove_file == True:
os.remove("q-table.npy")
rl.load_q()
elif args.remove_file == "False":
rl.load_q()
else:
print("Invalid argument.")
quit()
episodes = int(args.episodes)
with tqdm(total=episodes) as pbar:
for episode in range(episodes):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
pbar.update(1)
break
- action = rl.choose_action(rl.table[env.object[0]])
+ action = rl.choose_action(env.player, "train")
rl.q(env.player, action)
episode_reward += env.reward(action)
env.action(action)
env.update()
rl.save_q()
print("Q table:")
print(rl.table[env.object[0]])
| Update to newest version of rl.py. | ## Code Before:
"""This is the agent which currently takes the action with proper q learning."""
import time
start = time.time()
from tqdm import tqdm
import env
import os
import rl
env.make("text")
episodes = 10000
import argparse
parser = argparse.ArgumentParser(description="Train agent on the falling game.")
parser.add_argument("--remove-file", help="Remove existing q table.", default=True)
parser.add_argument("--episodes", type=str, help="Number of episodes to train for.", default=10000)
args = parser.parse_args()
if args.remove_file == True:
os.remove("q-table.npy")
rl.load_q()
elif args.remove_file == "False":
rl.load_q()
else:
print("Invalid argument.")
quit()
episodes = int(args.episodes)
with tqdm(total=episodes) as pbar:
for episode in range(episodes):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
pbar.update(1)
break
action = rl.choose_action(rl.table[env.object[0]])
rl.q(env.player, action)
episode_reward += env.reward(action)
env.action(action)
env.update()
rl.save_q()
print("Q table:")
print(rl.table[env.object[0]])
## Instruction:
Update to newest version of rl.py.
## Code After:
"""This is the agent which currently takes the action with proper q learning."""
import time
start = time.time()
from tqdm import tqdm
import env
import os
import rl
env.make("text")
episodes = 10000
import argparse
parser = argparse.ArgumentParser(description="Train agent on the falling game.")
parser.add_argument("--remove-file", help="Remove existing q table.", default=True)
parser.add_argument("--episodes", type=str, help="Number of episodes to train for.", default=10000)
args = parser.parse_args()
if args.remove_file == True:
os.remove("q-table.npy")
rl.load_q()
elif args.remove_file == "False":
rl.load_q()
else:
print("Invalid argument.")
quit()
episodes = int(args.episodes)
with tqdm(total=episodes) as pbar:
for episode in range(episodes):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
pbar.update(1)
break
action = rl.choose_action(env.player, "train")
rl.q(env.player, action)
episode_reward += env.reward(action)
env.action(action)
env.update()
rl.save_q()
print("Q table:")
print(rl.table[env.object[0]])
| ...
break
action = rl.choose_action(env.player, "train")
rl.q(env.player, action)
... |
438d78058951179f947480b0340752fa9b372a9d | sqs.py | sqs.py | from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
super(SQSRequest, self).__init__(*args, **kwargs)
| from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
| Add init code to deal with AWS HTTP API | Add init code to deal with AWS HTTP API
| Python | mit | MA3STR0/AsyncAWS | from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
+ t = datetime.datetime.utcnow()
+ method = kwargs.get('method', 'GET')
+ url = kwargs.get('url') or args[0]
+ params = sorted(url.split('?')[1].split('&'))
+ canonical_querystring = '&'.join(params)
+ kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
+ args = tuple()
+ host = url.split('://')[1].split('/')[0]
+ canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
+ service = 'sqs'
+ region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
| Add init code to deal with AWS HTTP API | ## Code Before:
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
super(SQSRequest, self).__init__(*args, **kwargs)
## Instruction:
Add init code to deal with AWS HTTP API
## Code After:
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
| // ... existing code ...
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
// ... rest of the code ... |
a978a7ed7f40ac7a77aa31ec89a3bb8ae58abb61 | ecommerce/courses/migrations/0006_auto_20171204_1036.py | ecommerce/courses/migrations/0006_auto_20171204_1036.py | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
courses = Course.objects.all()
for course in courses:
course.created = course.history.earliest().history_date
course.modified = course.history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
| from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
| Fix migration issue Fixed 'Course' object has no attribute 'history' issue in the migration | Fix migration issue
Fixed 'Course' object has no attribute 'history' issue in the migration
| Python | agpl-3.0 | edx/ecommerce,edx/ecommerce,eduNEXT/edunext-ecommerce,eduNEXT/edunext-ecommerce,eduNEXT/edunext-ecommerce,edx/ecommerce,edx/ecommerce,eduNEXT/edunext-ecommerce | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
+ HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
+ history = HistoricalCourse.objects.filter(id=course.id)
- course.created = course.history.earliest().history_date
+ course.created = history.earliest().history_date
- course.modified = course.history.latest().history_date
+ course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
| Fix migration issue Fixed 'Course' object has no attribute 'history' issue in the migration | ## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
courses = Course.objects.all()
for course in courses:
course.created = course.history.earliest().history_date
course.modified = course.history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
## Instruction:
Fix migration issue Fixed 'Course' object has no attribute 'history' issue in the migration
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
| # ... existing code ...
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
# ... modified code ...
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
# ... rest of the code ... |
125dfa47e5656c3f9b1e8846be03010ed02c6f91 | tests/rules_tests/isValid_tests/InvalidSyntaxTest.py | tests/rules_tests/isValid_tests/InvalidSyntaxTest.py |
from unittest import main, TestCase
from grammpy import Rule
class InvalidSyntaxTest(TestCase):
pass
if __name__ == '__main__':
main() |
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions import RuleSyntaxException
from .grammar import *
class InvalidSyntaxTest(TestCase):
def test_rulesMissingEncloseList(self):
class tmp(Rule):
rules = ([0], [1])
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingTuple(self):
class tmp(Rule):
rules = [[0], [1]]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingInnerLeftList(self):
class tmp(Rule):
rules = [(0, [1])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingInnerRightList(self):
class tmp(Rule):
rules = [([0], 1)]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_multipleRulesMissingInnerLeftList(self):
class tmp(Rule):
rules = [(NFirst, TSecond), (0, [1])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_multipleRulesMissingInnerRightList(self):
class tmp(Rule):
rules = [(NFifth, TFirst), ([0], 1)]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_emptyRule(self):
class tmp(Rule):
rules = [([], [])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_emptyOneOfRules(self):
class tmp(Rule):
rules = [(NFifth, TFirst), ([], [])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_onlyOuterArray(self):
class tmp(Rule):
rules = [NFifth, TFirst]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_outerIsTuple(self):
class tmp(Rule):
rules = (([NFirst], [TSecond]), ([0], [1]))
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
if __name__ == '__main__':
main()
| Add base set of rule's invalid syntax tests | Add base set of rule's invalid syntax tests
| Python | mit | PatrikValkovic/grammpy |
from unittest import main, TestCase
from grammpy import Rule
+ from grammpy.exceptions import RuleSyntaxException
+ from .grammar import *
class InvalidSyntaxTest(TestCase):
- pass
+ def test_rulesMissingEncloseList(self):
+ class tmp(Rule):
+ rules = ([0], [1])
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_rulesMissingTuple(self):
+ class tmp(Rule):
+ rules = [[0], [1]]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_rulesMissingInnerLeftList(self):
+ class tmp(Rule):
+ rules = [(0, [1])]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_rulesMissingInnerRightList(self):
+ class tmp(Rule):
+ rules = [([0], 1)]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_multipleRulesMissingInnerLeftList(self):
+ class tmp(Rule):
+ rules = [(NFirst, TSecond), (0, [1])]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_multipleRulesMissingInnerRightList(self):
+ class tmp(Rule):
+ rules = [(NFifth, TFirst), ([0], 1)]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_emptyRule(self):
+ class tmp(Rule):
+ rules = [([], [])]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_emptyOneOfRules(self):
+ class tmp(Rule):
+ rules = [(NFifth, TFirst), ([], [])]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_onlyOuterArray(self):
+ class tmp(Rule):
+ rules = [NFifth, TFirst]
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
+
+ def test_outerIsTuple(self):
+ class tmp(Rule):
+ rules = (([NFirst], [TSecond]), ([0], [1]))
+ with self.assertRaises(RuleSyntaxException):
+ tmp.validate(grammar)
if __name__ == '__main__':
main()
+ | Add base set of rule's invalid syntax tests | ## Code Before:
from unittest import main, TestCase
from grammpy import Rule
class InvalidSyntaxTest(TestCase):
pass
if __name__ == '__main__':
main()
## Instruction:
Add base set of rule's invalid syntax tests
## Code After:
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions import RuleSyntaxException
from .grammar import *
class InvalidSyntaxTest(TestCase):
def test_rulesMissingEncloseList(self):
class tmp(Rule):
rules = ([0], [1])
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingTuple(self):
class tmp(Rule):
rules = [[0], [1]]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingInnerLeftList(self):
class tmp(Rule):
rules = [(0, [1])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingInnerRightList(self):
class tmp(Rule):
rules = [([0], 1)]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_multipleRulesMissingInnerLeftList(self):
class tmp(Rule):
rules = [(NFirst, TSecond), (0, [1])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_multipleRulesMissingInnerRightList(self):
class tmp(Rule):
rules = [(NFifth, TFirst), ([0], 1)]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_emptyRule(self):
class tmp(Rule):
rules = [([], [])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_emptyOneOfRules(self):
class tmp(Rule):
rules = [(NFifth, TFirst), ([], [])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_onlyOuterArray(self):
class tmp(Rule):
rules = [NFifth, TFirst]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_outerIsTuple(self):
class tmp(Rule):
rules = (([NFirst], [TSecond]), ([0], [1]))
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
if __name__ == '__main__':
main()
| ...
from grammpy import Rule
from grammpy.exceptions import RuleSyntaxException
from .grammar import *
...
class InvalidSyntaxTest(TestCase):
def test_rulesMissingEncloseList(self):
class tmp(Rule):
rules = ([0], [1])
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingTuple(self):
class tmp(Rule):
rules = [[0], [1]]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingInnerLeftList(self):
class tmp(Rule):
rules = [(0, [1])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_rulesMissingInnerRightList(self):
class tmp(Rule):
rules = [([0], 1)]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_multipleRulesMissingInnerLeftList(self):
class tmp(Rule):
rules = [(NFirst, TSecond), (0, [1])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_multipleRulesMissingInnerRightList(self):
class tmp(Rule):
rules = [(NFifth, TFirst), ([0], 1)]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_emptyRule(self):
class tmp(Rule):
rules = [([], [])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_emptyOneOfRules(self):
class tmp(Rule):
rules = [(NFifth, TFirst), ([], [])]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_onlyOuterArray(self):
class tmp(Rule):
rules = [NFifth, TFirst]
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
def test_outerIsTuple(self):
class tmp(Rule):
rules = (([NFirst], [TSecond]), ([0], [1]))
with self.assertRaises(RuleSyntaxException):
tmp.validate(grammar)
... |
431fdabc5c103c9581758543359a54f650d24bcf | nodes/cpu_node.py | nodes/cpu_node.py | from node import Node
from model.cpu import CPU
from extraction.http.cpu_cw_http import CpuCwHTTP
class CPUNode(Node):
label = "CPU"
def __init__(self, service, timespan):
super(CPUNode, self).__init__(service, timespan)
def load_entities(self):
return CpuCwHTTP(self.service, self.timespan).load_entities()
def graph_nodes(self):
cpu_nodes = self.load_entities()
items = {}
for cpu in cpu_nodes:
if not items.has_key(cpu.label):
items[cpu.label] = []
items[cpu.label].append(cpu.__dict__)
return { 'expand_nodes' : [{"data" : {"id": 'code_fragment:validate', "name": 'func validate() (Go)'}}], 'data' : items }
def infer_context(self):
return [] | from node import Node
from model.cpu import CPU
from extraction.http.cpu_cw_http import CpuCwHTTP
class CPUNode(Node):
label = "CPU"
def __init__(self, service, timespan):
super(CPUNode, self).__init__(service, timespan)
def load_entities(self):
return CpuCwHTTP(self.service, self.timespan).load_entities()
def graph_nodes(self):
cpu_nodes = self.load_entities()
items = {}
for cpu in cpu_nodes:
if not items.has_key(cpu.label):
items[cpu.label] = []
items[cpu.label].append(cpu.__dict__)
return { 'expand_nodes' : [], 'data' : items }
def infer_context(self):
return []
| Remove CloudWave specific context expansion | Remove CloudWave specific context expansion
| Python | apache-2.0 | sealuzh/ContextBasedAnalytics,sealuzh/ContextBasedAnalytics,sealuzh/ContextBasedAnalytics | from node import Node
from model.cpu import CPU
from extraction.http.cpu_cw_http import CpuCwHTTP
class CPUNode(Node):
label = "CPU"
def __init__(self, service, timespan):
super(CPUNode, self).__init__(service, timespan)
def load_entities(self):
return CpuCwHTTP(self.service, self.timespan).load_entities()
def graph_nodes(self):
cpu_nodes = self.load_entities()
items = {}
for cpu in cpu_nodes:
if not items.has_key(cpu.label):
items[cpu.label] = []
items[cpu.label].append(cpu.__dict__)
- return { 'expand_nodes' : [{"data" : {"id": 'code_fragment:validate', "name": 'func validate() (Go)'}}], 'data' : items }
+ return { 'expand_nodes' : [], 'data' : items }
def infer_context(self):
return []
+ | Remove CloudWave specific context expansion | ## Code Before:
from node import Node
from model.cpu import CPU
from extraction.http.cpu_cw_http import CpuCwHTTP
class CPUNode(Node):
label = "CPU"
def __init__(self, service, timespan):
super(CPUNode, self).__init__(service, timespan)
def load_entities(self):
return CpuCwHTTP(self.service, self.timespan).load_entities()
def graph_nodes(self):
cpu_nodes = self.load_entities()
items = {}
for cpu in cpu_nodes:
if not items.has_key(cpu.label):
items[cpu.label] = []
items[cpu.label].append(cpu.__dict__)
return { 'expand_nodes' : [{"data" : {"id": 'code_fragment:validate', "name": 'func validate() (Go)'}}], 'data' : items }
def infer_context(self):
return []
## Instruction:
Remove CloudWave specific context expansion
## Code After:
from node import Node
from model.cpu import CPU
from extraction.http.cpu_cw_http import CpuCwHTTP
class CPUNode(Node):
label = "CPU"
def __init__(self, service, timespan):
super(CPUNode, self).__init__(service, timespan)
def load_entities(self):
return CpuCwHTTP(self.service, self.timespan).load_entities()
def graph_nodes(self):
cpu_nodes = self.load_entities()
items = {}
for cpu in cpu_nodes:
if not items.has_key(cpu.label):
items[cpu.label] = []
items[cpu.label].append(cpu.__dict__)
return { 'expand_nodes' : [], 'data' : items }
def infer_context(self):
return []
| // ... existing code ...
return { 'expand_nodes' : [], 'data' : items }
// ... rest of the code ... |
b6233dff3cec42696f2ea0eea286ded48f02e79b | rllib/optimizers/rollout.py | rllib/optimizers/rollout.py | import logging
import ray
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.memory import ray_get_and_free
logger = logging.getLogger(__name__)
def collect_samples(agents, sample_batch_size, num_envs_per_worker,
train_batch_size):
"""Collects at least train_batch_size samples, never discarding any."""
num_timesteps_so_far = 0
trajectories = []
agent_dict = {}
for agent in agents:
fut_sample = agent.sample.remote()
agent_dict[fut_sample] = agent
while agent_dict:
[fut_sample], _ = ray.wait(list(agent_dict))
agent = agent_dict.pop(fut_sample)
next_sample = ray_get_and_free(fut_sample)
assert next_sample.count >= sample_batch_size * num_envs_per_worker
num_timesteps_so_far += next_sample.count
trajectories.append(next_sample)
# Only launch more tasks if we don't already have enough pending
pending = len(agent_dict) * sample_batch_size * num_envs_per_worker
if num_timesteps_so_far + pending < train_batch_size:
fut_sample2 = agent.sample.remote()
agent_dict[fut_sample2] = agent
return SampleBatch.concat_samples(trajectories)
| import logging
import ray
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.memory import ray_get_and_free
logger = logging.getLogger(__name__)
def collect_samples(agents, sample_batch_size, num_envs_per_worker,
train_batch_size):
"""Collects at least train_batch_size samples, never discarding any."""
num_timesteps_so_far = 0
trajectories = []
agent_dict = {}
for agent in agents:
fut_sample = agent.sample.remote()
agent_dict[fut_sample] = agent
while agent_dict:
[fut_sample], _ = ray.wait(list(agent_dict))
agent = agent_dict.pop(fut_sample)
next_sample = ray_get_and_free(fut_sample)
num_timesteps_so_far += next_sample.count
trajectories.append(next_sample)
# Only launch more tasks if we don't already have enough pending
pending = len(agent_dict) * sample_batch_size * num_envs_per_worker
if num_timesteps_so_far + pending < train_batch_size:
fut_sample2 = agent.sample.remote()
agent_dict[fut_sample2] = agent
return SampleBatch.concat_samples(trajectories)
| Fix bad sample count assert | [rllib] Fix bad sample count assert
| Python | apache-2.0 | richardliaw/ray,ray-project/ray,robertnishihara/ray,richardliaw/ray,pcmoritz/ray-1,robertnishihara/ray,ray-project/ray,pcmoritz/ray-1,robertnishihara/ray,pcmoritz/ray-1,pcmoritz/ray-1,robertnishihara/ray,pcmoritz/ray-1,richardliaw/ray,ray-project/ray,richardliaw/ray,pcmoritz/ray-1,richardliaw/ray,ray-project/ray,robertnishihara/ray,robertnishihara/ray,ray-project/ray,pcmoritz/ray-1,robertnishihara/ray,ray-project/ray,ray-project/ray,robertnishihara/ray,pcmoritz/ray-1,richardliaw/ray,robertnishihara/ray,richardliaw/ray,richardliaw/ray,ray-project/ray | import logging
import ray
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.memory import ray_get_and_free
logger = logging.getLogger(__name__)
def collect_samples(agents, sample_batch_size, num_envs_per_worker,
train_batch_size):
"""Collects at least train_batch_size samples, never discarding any."""
num_timesteps_so_far = 0
trajectories = []
agent_dict = {}
for agent in agents:
fut_sample = agent.sample.remote()
agent_dict[fut_sample] = agent
while agent_dict:
[fut_sample], _ = ray.wait(list(agent_dict))
agent = agent_dict.pop(fut_sample)
next_sample = ray_get_and_free(fut_sample)
- assert next_sample.count >= sample_batch_size * num_envs_per_worker
num_timesteps_so_far += next_sample.count
trajectories.append(next_sample)
# Only launch more tasks if we don't already have enough pending
pending = len(agent_dict) * sample_batch_size * num_envs_per_worker
if num_timesteps_so_far + pending < train_batch_size:
fut_sample2 = agent.sample.remote()
agent_dict[fut_sample2] = agent
return SampleBatch.concat_samples(trajectories)
| Fix bad sample count assert | ## Code Before:
import logging
import ray
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.memory import ray_get_and_free
logger = logging.getLogger(__name__)
def collect_samples(agents, sample_batch_size, num_envs_per_worker,
train_batch_size):
"""Collects at least train_batch_size samples, never discarding any."""
num_timesteps_so_far = 0
trajectories = []
agent_dict = {}
for agent in agents:
fut_sample = agent.sample.remote()
agent_dict[fut_sample] = agent
while agent_dict:
[fut_sample], _ = ray.wait(list(agent_dict))
agent = agent_dict.pop(fut_sample)
next_sample = ray_get_and_free(fut_sample)
assert next_sample.count >= sample_batch_size * num_envs_per_worker
num_timesteps_so_far += next_sample.count
trajectories.append(next_sample)
# Only launch more tasks if we don't already have enough pending
pending = len(agent_dict) * sample_batch_size * num_envs_per_worker
if num_timesteps_so_far + pending < train_batch_size:
fut_sample2 = agent.sample.remote()
agent_dict[fut_sample2] = agent
return SampleBatch.concat_samples(trajectories)
## Instruction:
Fix bad sample count assert
## Code After:
import logging
import ray
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.memory import ray_get_and_free
logger = logging.getLogger(__name__)
def collect_samples(agents, sample_batch_size, num_envs_per_worker,
train_batch_size):
"""Collects at least train_batch_size samples, never discarding any."""
num_timesteps_so_far = 0
trajectories = []
agent_dict = {}
for agent in agents:
fut_sample = agent.sample.remote()
agent_dict[fut_sample] = agent
while agent_dict:
[fut_sample], _ = ray.wait(list(agent_dict))
agent = agent_dict.pop(fut_sample)
next_sample = ray_get_and_free(fut_sample)
num_timesteps_so_far += next_sample.count
trajectories.append(next_sample)
# Only launch more tasks if we don't already have enough pending
pending = len(agent_dict) * sample_batch_size * num_envs_per_worker
if num_timesteps_so_far + pending < train_batch_size:
fut_sample2 = agent.sample.remote()
agent_dict[fut_sample2] = agent
return SampleBatch.concat_samples(trajectories)
| # ... existing code ...
next_sample = ray_get_and_free(fut_sample)
num_timesteps_so_far += next_sample.count
# ... rest of the code ... |
668440b16916651b85b4a4a507214cee721906a8 | scanpointgenerator/__init__.py | scanpointgenerator/__init__.py | from point import Point # noqa
from generator import Generator # noqa
from arraygenerator import ArrayGenerator # noqa
from compoundgenerator import CompoundGenerator # noqa
from linegenerator import LineGenerator # noqa
from lissajousgenerator import LissajousGenerator # noqa
from randomoffsetgenerator import RandomOffsetGenerator # noqa
from spiralgenerator import SpiralGenerator # noqa
from plotgenerator import plot_generator # noqa
| from scanpointgenerator.point import Point # noqa
from scanpointgenerator.generator import Generator # noqa
from scanpointgenerator.arraygenerator import ArrayGenerator # noqa
from scanpointgenerator.compoundgenerator import CompoundGenerator # noqa
from scanpointgenerator.linegenerator import LineGenerator # noqa
from scanpointgenerator.lissajousgenerator import LissajousGenerator # noqa
from scanpointgenerator.randomoffsetgenerator import RandomOffsetGenerator # noqa
from scanpointgenerator.spiralgenerator import SpiralGenerator # noqa
from scanpointgenerator.plotgenerator import plot_generator # noqa
| Add absolute imports in init | Add absolute imports in init
| Python | apache-2.0 | dls-controls/scanpointgenerator | - from point import Point # noqa
+ from scanpointgenerator.point import Point # noqa
- from generator import Generator # noqa
+ from scanpointgenerator.generator import Generator # noqa
- from arraygenerator import ArrayGenerator # noqa
+ from scanpointgenerator.arraygenerator import ArrayGenerator # noqa
- from compoundgenerator import CompoundGenerator # noqa
+ from scanpointgenerator.compoundgenerator import CompoundGenerator # noqa
- from linegenerator import LineGenerator # noqa
+ from scanpointgenerator.linegenerator import LineGenerator # noqa
- from lissajousgenerator import LissajousGenerator # noqa
+ from scanpointgenerator.lissajousgenerator import LissajousGenerator # noqa
- from randomoffsetgenerator import RandomOffsetGenerator # noqa
+ from scanpointgenerator.randomoffsetgenerator import RandomOffsetGenerator # noqa
- from spiralgenerator import SpiralGenerator # noqa
+ from scanpointgenerator.spiralgenerator import SpiralGenerator # noqa
- from plotgenerator import plot_generator # noqa
+ from scanpointgenerator.plotgenerator import plot_generator # noqa
| Add absolute imports in init | ## Code Before:
from point import Point # noqa
from generator import Generator # noqa
from arraygenerator import ArrayGenerator # noqa
from compoundgenerator import CompoundGenerator # noqa
from linegenerator import LineGenerator # noqa
from lissajousgenerator import LissajousGenerator # noqa
from randomoffsetgenerator import RandomOffsetGenerator # noqa
from spiralgenerator import SpiralGenerator # noqa
from plotgenerator import plot_generator # noqa
## Instruction:
Add absolute imports in init
## Code After:
from scanpointgenerator.point import Point # noqa
from scanpointgenerator.generator import Generator # noqa
from scanpointgenerator.arraygenerator import ArrayGenerator # noqa
from scanpointgenerator.compoundgenerator import CompoundGenerator # noqa
from scanpointgenerator.linegenerator import LineGenerator # noqa
from scanpointgenerator.lissajousgenerator import LissajousGenerator # noqa
from scanpointgenerator.randomoffsetgenerator import RandomOffsetGenerator # noqa
from scanpointgenerator.spiralgenerator import SpiralGenerator # noqa
from scanpointgenerator.plotgenerator import plot_generator # noqa
| ...
from scanpointgenerator.point import Point # noqa
from scanpointgenerator.generator import Generator # noqa
from scanpointgenerator.arraygenerator import ArrayGenerator # noqa
from scanpointgenerator.compoundgenerator import CompoundGenerator # noqa
from scanpointgenerator.linegenerator import LineGenerator # noqa
from scanpointgenerator.lissajousgenerator import LissajousGenerator # noqa
from scanpointgenerator.randomoffsetgenerator import RandomOffsetGenerator # noqa
from scanpointgenerator.spiralgenerator import SpiralGenerator # noqa
from scanpointgenerator.plotgenerator import plot_generator # noqa
... |
eae4b06bd798eab3a46bdd5b7452411bb7fb02e1 | dashcam.py | dashcam.py |
import pygame
import picamera
import os
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
pygame.init()
pygame.mouse.set_visible(False)
screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN)
|
import pygame
import picamera
import os
import sys
import io
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
size = width, height = 320, 240
pygame.init()
pygame.mouse.set_visible(False)
screen = pygame.display.set_mode(size)
go_button = pygame.image.load("/home/pi/bike_dashcam/media/go.bmp")
| Update dascham with pygame GO button load | Update dascham with pygame GO button load
| Python | mit | the-raspberry-pi-guy/bike_dashcam,the-raspberry-pi-guy/bike_dashcam |
import pygame
import picamera
import os
+ import sys
+ import io
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
+ size = width, height = 320, 240
+
pygame.init()
pygame.mouse.set_visible(False)
- screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN)
+ screen = pygame.display.set_mode(size)
+ go_button = pygame.image.load("/home/pi/bike_dashcam/media/go.bmp")
+
+
+ | Update dascham with pygame GO button load | ## Code Before:
import pygame
import picamera
import os
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
pygame.init()
pygame.mouse.set_visible(False)
screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN)
## Instruction:
Update dascham with pygame GO button load
## Code After:
import pygame
import picamera
import os
import sys
import io
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
size = width, height = 320, 240
pygame.init()
pygame.mouse.set_visible(False)
screen = pygame.display.set_mode(size)
go_button = pygame.image.load("/home/pi/bike_dashcam/media/go.bmp")
| ...
import os
import sys
import io
...
size = width, height = 320, 240
pygame.init()
...
pygame.mouse.set_visible(False)
screen = pygame.display.set_mode(size)
go_button = pygame.image.load("/home/pi/bike_dashcam/media/go.bmp")
... |
b9b8d77898c81afa5d918cc93c9011ace6f23965 | content_editor/renderer.py | content_editor/renderer.py | from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return mark_safe(''.join(
conditional_escape(self.render_content(c)) for c in contents
))
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
| from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
| Allow iterating over rendered contents | Allow iterating over rendered contents
| Python | bsd-3-clause | matthiask/feincms2-content,matthiask/feincms2-content,matthiask/django-content-editor,matthiask/feincms2-content,matthiask/django-content-editor,matthiask/django-content-editor,matthiask/django-content-editor | from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
+
+
+ __all__ = ('PluginRenderer',)
+
+
+ class RenderedContents(object):
+ def __init__(self, contents):
+ self.contents = contents
+
+ def __unicode__(self):
+ return mark_safe(''.join(self.contents))
+
+ def __iter__(self):
+ return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
- return mark_safe(''.join(
+ return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
- ))
+ )
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
| Allow iterating over rendered contents | ## Code Before:
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return mark_safe(''.join(
conditional_escape(self.render_content(c)) for c in contents
))
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
## Instruction:
Allow iterating over rendered contents
## Code After:
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
| ...
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
...
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
... |
65f069c82beea8e96bce780add4f6c3637a0d549 | challenge_3/python/ning/challenge_3.py | challenge_3/python/ning/challenge_3.py | def find_majority(sequence):
item_counter = dict()
for item in sequence:
if item not in item_counter:
item_counter[item] = 1
else:
item_counter[item] += 1
for item, item_count in item_counter.items():
if item_count > len(sequence) / 2:
return item
test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7]
print(find_majority(test_sequence_list))
| def find_majority(sequence):
item_counter = dict()
for item in sequence:
if item not in item_counter:
item_counter[item] = 1
else:
item_counter[item] += 1
if item_counter[item] > len(sequence) / 2:
return item
test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7]
print(find_majority(test_sequence_list))
| Include check majority in first loop rather than separate loop | Include check majority in first loop rather than separate loop
| Python | mit | mindm/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,erocs/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,popcornanachronism/2017Challenges | def find_majority(sequence):
item_counter = dict()
for item in sequence:
if item not in item_counter:
item_counter[item] = 1
else:
item_counter[item] += 1
-
- for item, item_count in item_counter.items():
- if item_count > len(sequence) / 2:
+ if item_counter[item] > len(sequence) / 2:
- return item
+ return item
test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7]
print(find_majority(test_sequence_list))
| Include check majority in first loop rather than separate loop | ## Code Before:
def find_majority(sequence):
item_counter = dict()
for item in sequence:
if item not in item_counter:
item_counter[item] = 1
else:
item_counter[item] += 1
for item, item_count in item_counter.items():
if item_count > len(sequence) / 2:
return item
test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7]
print(find_majority(test_sequence_list))
## Instruction:
Include check majority in first loop rather than separate loop
## Code After:
def find_majority(sequence):
item_counter = dict()
for item in sequence:
if item not in item_counter:
item_counter[item] = 1
else:
item_counter[item] += 1
if item_counter[item] > len(sequence) / 2:
return item
test_sequence_list = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7]
print(find_majority(test_sequence_list))
| ...
item_counter[item] += 1
if item_counter[item] > len(sequence) / 2:
return item
... |
791d378d1c5cb2e9729877bc70261b9354bdb590 | testsuite/cases/pillow_rotate_right.py | testsuite/cases/pillow_rotate_right.py |
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
im.transpose(self.transposition)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
]
if hasattr(Image, 'TRANSPOSE'):
cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
|
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
| Transpose and Transpose180 for all Pillow versions | Transpose and Transpose180 for all Pillow versions
| Python | mit | python-pillow/pillow-perf,python-pillow/pillow-perf |
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
- im.transpose(self.transposition)
+ for trans in self.transposition:
+ im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
- rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
+ rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
- rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
+ rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
- rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
+ rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
- rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
+ rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
- rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
+ rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
+ rpartial(RotateRightCase, 'Transpose',
+ [Image.TRANSPOSE]
+ if hasattr(Image, 'TRANSPOSE')
+ else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
+ rpartial(RotateRightCase, 'Transpose180',
+ [Image.TRANSPOSE_ROTATE_180]
+ if hasattr(Image, 'TRANSPOSE_ROTATE_180')
+ else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
- if hasattr(Image, 'TRANSPOSE'):
- cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
- | Transpose and Transpose180 for all Pillow versions | ## Code Before:
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
im.transpose(self.transposition)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
]
if hasattr(Image, 'TRANSPOSE'):
cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
## Instruction:
Transpose and Transpose180 for all Pillow versions
## Code After:
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
| // ... existing code ...
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
// ... modified code ...
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
// ... rest of the code ... |
dcd2972bee896ea3c7885b1d6a8a6e132329d66b | apps/persona/urls.py | apps/persona/urls.py | from django.conf.urls.defaults import *
from mozorg.util import page
import views
urlpatterns = patterns('',
page('', 'persona/persona.html'),
page('about', 'persona/about.html'),
page('privacy-policy', 'persona/privacy-policy.html'),
page('terms-of-service', 'persona/terms-of-service.html'),
page('developer-faq', 'persona/developer-faq.html')
)
| from django.conf.urls.defaults import *
from mozorg.util import page
urlpatterns = patterns('',
page('', 'persona/persona.html'),
page('about', 'persona/about.html'),
page('privacy-policy', 'persona/privacy-policy.html'),
page('terms-of-service', 'persona/terms-of-service.html'),
page('developer-faq', 'persona/developer-faq.html')
)
| Remove unnecessary 'import views' line | Remove unnecessary 'import views' line
| Python | mpl-2.0 | jacshfr/mozilla-bedrock,marcoscaceres/bedrock,mmmavis/bedrock,sgarrity/bedrock,ericawright/bedrock,hoosteeno/bedrock,pascalchevrel/bedrock,schalkneethling/bedrock,alexgibson/bedrock,yglazko/bedrock,gauthierm/bedrock,sylvestre/bedrock,TheJJ100100/bedrock,dudepare/bedrock,davehunt/bedrock,bensternthal/bedrock,bensternthal/bedrock,craigcook/bedrock,mmmavis/bedrock,flodolo/bedrock,andreadelrio/bedrock,sylvestre/bedrock,davidwboswell/documentation_autoresponse,malena/bedrock,pascalchevrel/bedrock,glogiotatidis/bedrock,amjadm61/bedrock,Jobava/bedrock,l-hedgehog/bedrock,Jobava/bedrock,ericawright/bedrock,chirilo/bedrock,l-hedgehog/bedrock,pmclanahan/bedrock,davehunt/bedrock,mmmavis/lightbeam-bedrock-website,TheoChevalier/bedrock,CSCI-462-01-2017/bedrock,dudepare/bedrock,rishiloyola/bedrock,bensternthal/bedrock,schalkneethling/bedrock,andreadelrio/bedrock,TheJJ100100/bedrock,mkmelin/bedrock,TheoChevalier/bedrock,CSCI-462-01-2017/bedrock,glogiotatidis/bedrock,mozilla/bedrock,schalkneethling/bedrock,kyoshino/bedrock,TheoChevalier/bedrock,MichaelKohler/bedrock,analytics-pros/mozilla-bedrock,dudepare/bedrock,yglazko/bedrock,Sancus/bedrock,analytics-pros/mozilla-bedrock,jacshfr/mozilla-bedrock,rishiloyola/bedrock,gauthierm/bedrock,sgarrity/bedrock,alexgibson/bedrock,jacshfr/mozilla-bedrock,jgmize/bedrock,Sancus/bedrock,MichaelKohler/bedrock,SujaySKumar/bedrock,mozilla/mwc,ckprice/bedrock,jpetto/bedrock,MichaelKohler/bedrock,mahinthjoe/bedrock,marcoscaceres/bedrock,flodolo/bedrock,Sancus/bedrock,amjadm61/bedrock,TheoChevalier/bedrock,chirilo/bedrock,ericawright/bedrock,pascalchevrel/bedrock,sylvestre/bedrock,gerv/bedrock,alexgibson/bedrock,jpetto/bedrock,mmmavis/lightbeam-bedrock-website,chirilo/bedrock,gerv/bedrock,mozilla/bedrock,mermi/bedrock,l-hedgehog/bedrock,elin-moco/bedrock,mozilla/mwc,glogiotatidis/bedrock,gerv/bedrock,SujaySKumar/bedrock,elin-moco/bedrock,jacshfr/mozilla-bedrock,l-hedgehog/bedrock,alexgibson/bedrock,CSCI-462-01-2017/bedrock,elin-moco/bedrock,elin-moco/bedrock,sgarrity/bedrock,ckprice/bedrock,gauthierm/bedrock,davidwboswell/documentation_autoresponse,MichaelKohler/bedrock,mozilla/bedrock,chirilo/bedrock,malena/bedrock,petabyte/bedrock,amjadm61/bedrock,hoosteeno/bedrock,pmclanahan/bedrock,yglazko/bedrock,yglazko/bedrock,malena/bedrock,SujaySKumar/bedrock,craigcook/bedrock,amjadm61/bedrock,jpetto/bedrock,SujaySKumar/bedrock,andreadelrio/bedrock,davidwboswell/documentation_autoresponse,mozilla/mwc,kyoshino/bedrock,mmmavis/bedrock,petabyte/bedrock,hoosteeno/bedrock,jgmize/bedrock,rishiloyola/bedrock,craigcook/bedrock,andreadelrio/bedrock,schalkneethling/bedrock,TheJJ100100/bedrock,analytics-pros/mozilla-bedrock,mozilla/bedrock,mozilla/mwc,petabyte/bedrock,ericawright/bedrock,mermi/bedrock,ckprice/bedrock,mahinthjoe/bedrock,marcoscaceres/bedrock,sgarrity/bedrock,mermi/bedrock,pmclanahan/bedrock,petabyte/bedrock,pascalchevrel/bedrock,mermi/bedrock,jpetto/bedrock,gauthierm/bedrock,mahinthjoe/bedrock,ckprice/bedrock,glogiotatidis/bedrock,kyoshino/bedrock,mmmavis/lightbeam-bedrock-website,marcoscaceres/bedrock,mkmelin/bedrock,mkmelin/bedrock,CSCI-462-01-2017/bedrock,craigcook/bedrock,jacshfr/mozilla-bedrock,bensternthal/bedrock,malena/bedrock,Jobava/bedrock,flodolo/bedrock,Jobava/bedrock,jgmize/bedrock,davidwboswell/documentation_autoresponse,mkmelin/bedrock,mahinthjoe/bedrock,analytics-pros/mozilla-bedrock,Sancus/bedrock,davehunt/bedrock,flodolo/bedrock,amjadm61/bedrock,TheJJ100100/bedrock,pmclanahan/bedrock,rishiloyola/bedrock,mmmavis/bedrock,dudepare/bedrock,sylvestre/bedrock,jgmize/bedrock,hoosteeno/bedrock,kyoshino/bedrock,gerv/bedrock,davehunt/bedrock | from django.conf.urls.defaults import *
from mozorg.util import page
- import views
urlpatterns = patterns('',
page('', 'persona/persona.html'),
page('about', 'persona/about.html'),
page('privacy-policy', 'persona/privacy-policy.html'),
page('terms-of-service', 'persona/terms-of-service.html'),
page('developer-faq', 'persona/developer-faq.html')
)
| Remove unnecessary 'import views' line | ## Code Before:
from django.conf.urls.defaults import *
from mozorg.util import page
import views
urlpatterns = patterns('',
page('', 'persona/persona.html'),
page('about', 'persona/about.html'),
page('privacy-policy', 'persona/privacy-policy.html'),
page('terms-of-service', 'persona/terms-of-service.html'),
page('developer-faq', 'persona/developer-faq.html')
)
## Instruction:
Remove unnecessary 'import views' line
## Code After:
from django.conf.urls.defaults import *
from mozorg.util import page
urlpatterns = patterns('',
page('', 'persona/persona.html'),
page('about', 'persona/about.html'),
page('privacy-policy', 'persona/privacy-policy.html'),
page('terms-of-service', 'persona/terms-of-service.html'),
page('developer-faq', 'persona/developer-faq.html')
)
| # ... existing code ...
from mozorg.util import page
# ... rest of the code ... |
8883f1a45595219ae843b3400df1f56ab07aa4fe | corehq/apps/userreports/document_stores.py | corehq/apps/userreports/document_stores.py | from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| Add document store for locations | Add document store for locations
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | + from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
+ from pillowtop.dao.exceptions import DocumentNotFoundError
+ from pillowtop.dao.interface import ReadOnlyDocumentStore
+
+
+ class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
+
+ def __init__(self, domain):
+ self.domain = domain
+ self.queryset = SQLLocation.objects.filter(domain=domain)
+
+ def get_document(self, doc_id):
+ try:
+ return self.queryset.get(location_id=doc_id).to_json()
+ except SQLLocation.DoesNotExist as e:
+ raise DocumentNotFoundError(e)
+
+ def iter_document_ids(self, last_id=None):
+ return iter(self.queryset.location_ids())
+
+ def iter_documents(self, ids):
+ for location in self.queryset.filter(location_id__in=ids):
+ yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
+ elif doc_type == 'Location':
+ return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| Add document store for locations | ## Code Before:
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
## Instruction:
Add document store for locations
## Code After:
from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| ...
from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
...
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
...
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
... |
e9541dbd1959b7a2ad1ee9145d3168c5898fe204 | python/generate_sorl_xml.py | python/generate_sorl_xml.py |
import sys
from message import Message
from persistence import Persistence
from const import XML_FILE_EXTENSION
message = Persistence.get_message_from_file(sys.argv[1])
if (message is not None):
Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
|
import sys
from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
| Fix wrong naming in generate...py | Fix wrong naming in generate...py
Signed-off-by: Fabio Benigno <[email protected]>
| Python | apache-2.0 | fpbfabio/newsgroups1000s,fpbfabio/dblp_data_processing |
import sys
- from message import Message
+ from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
- message = Persistence.get_message_from_file(sys.argv[1])
+ document = Persistence.get_document_from_file(sys.argv[1])
- if (message is not None):
+ if (document is not None):
- Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
+ Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
| Fix wrong naming in generate...py | ## Code Before:
import sys
from message import Message
from persistence import Persistence
from const import XML_FILE_EXTENSION
message = Persistence.get_message_from_file(sys.argv[1])
if (message is not None):
Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
## Instruction:
Fix wrong naming in generate...py
## Code After:
import sys
from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
| ...
import sys
from document import Document
from persistence import Persistence
...
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
... |
335f1de1120e658f4e87dcbbcaf882146df895bb | zounds/__init__.py | zounds/__init__.py | from node.duration import \
Hours, Minutes, Seconds, Milliseconds, Microseconds, Picoseconds
from node.audio_metadata import MetaData, AudioMetaDataEncoder
from node.ogg_vorbis import \
OggVorbis, OggVorbisDecoder, OggVorbisEncoder, OggVorbisFeature, \
OggVorbisWrapper
from node.audiostream import AudioStream
from node.basic import Slice, Sum, Max
from node.learn import KMeans, BinaryRbm, LinearRbm, Learned
from node.onset import \
MeasureOfTransience, MovingAveragePeakPicker, SparseTimestampDecoder, \
SparseTimestampEncoder, TimeSliceDecoder, TimeSliceFeature
from node.preprocess import \
MeanStdNormalization, UnitNorm, PreprocessingPipeline
from node.random_samples import ReservoirSampler
from node.resample import Resampler
from node.samplerate import \
SR11025, SR22050, SR44100, SR48000, SR96000, HalfLapped
from node.sliding_window import SlidingWindow, OggVorbisWindowingFunc
from node.spectral import FFT, DCT, BarkBands, Chroma, BFCC
from node.template_match import TemplateMatch
from node.timeseries import \
TimeSlice, ConstantRateTimeSeriesEncoder, ConstantRateTimeSeriesFeature, \
GreedyConstantRateTimeSeriesDecoder
from node.api import ZoundsApp
from node.util import process_dir
| from node.duration import \
Hours, Minutes, Seconds, Milliseconds, Microseconds, Picoseconds
from node.audio_metadata import MetaData, AudioMetaDataEncoder
from node.ogg_vorbis import \
OggVorbis, OggVorbisDecoder, OggVorbisEncoder, OggVorbisFeature, \
OggVorbisWrapper
from node.audiostream import AudioStream
from node.basic import Slice, Sum, Max
from node.learn import KMeans, BinaryRbm, LinearRbm, Learned
from node.onset import \
MeasureOfTransience, MovingAveragePeakPicker, SparseTimestampDecoder, \
SparseTimestampEncoder, TimeSliceDecoder, TimeSliceFeature, ComplexDomain
from node.preprocess import \
MeanStdNormalization, UnitNorm, PreprocessingPipeline
from node.random_samples import ReservoirSampler
from node.resample import Resampler
from node.samplerate import \
SR11025, SR22050, SR44100, SR48000, SR96000, HalfLapped
from node.sliding_window import SlidingWindow, OggVorbisWindowingFunc
from node.spectral import FFT, DCT, BarkBands, Chroma, BFCC
from node.template_match import TemplateMatch
from node.timeseries import \
TimeSlice, ConstantRateTimeSeriesEncoder, ConstantRateTimeSeriesFeature, \
GreedyConstantRateTimeSeriesDecoder
from node.api import ZoundsApp
from node.util import process_dir
| Add onset detection processing node to top-level exports | Add onset detection processing node to top-level exports
| Python | mit | JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds | from node.duration import \
Hours, Minutes, Seconds, Milliseconds, Microseconds, Picoseconds
from node.audio_metadata import MetaData, AudioMetaDataEncoder
from node.ogg_vorbis import \
OggVorbis, OggVorbisDecoder, OggVorbisEncoder, OggVorbisFeature, \
OggVorbisWrapper
from node.audiostream import AudioStream
from node.basic import Slice, Sum, Max
from node.learn import KMeans, BinaryRbm, LinearRbm, Learned
from node.onset import \
MeasureOfTransience, MovingAveragePeakPicker, SparseTimestampDecoder, \
- SparseTimestampEncoder, TimeSliceDecoder, TimeSliceFeature
+ SparseTimestampEncoder, TimeSliceDecoder, TimeSliceFeature, ComplexDomain
from node.preprocess import \
MeanStdNormalization, UnitNorm, PreprocessingPipeline
from node.random_samples import ReservoirSampler
from node.resample import Resampler
from node.samplerate import \
SR11025, SR22050, SR44100, SR48000, SR96000, HalfLapped
from node.sliding_window import SlidingWindow, OggVorbisWindowingFunc
from node.spectral import FFT, DCT, BarkBands, Chroma, BFCC
from node.template_match import TemplateMatch
from node.timeseries import \
TimeSlice, ConstantRateTimeSeriesEncoder, ConstantRateTimeSeriesFeature, \
GreedyConstantRateTimeSeriesDecoder
from node.api import ZoundsApp
from node.util import process_dir
| Add onset detection processing node to top-level exports | ## Code Before:
from node.duration import \
Hours, Minutes, Seconds, Milliseconds, Microseconds, Picoseconds
from node.audio_metadata import MetaData, AudioMetaDataEncoder
from node.ogg_vorbis import \
OggVorbis, OggVorbisDecoder, OggVorbisEncoder, OggVorbisFeature, \
OggVorbisWrapper
from node.audiostream import AudioStream
from node.basic import Slice, Sum, Max
from node.learn import KMeans, BinaryRbm, LinearRbm, Learned
from node.onset import \
MeasureOfTransience, MovingAveragePeakPicker, SparseTimestampDecoder, \
SparseTimestampEncoder, TimeSliceDecoder, TimeSliceFeature
from node.preprocess import \
MeanStdNormalization, UnitNorm, PreprocessingPipeline
from node.random_samples import ReservoirSampler
from node.resample import Resampler
from node.samplerate import \
SR11025, SR22050, SR44100, SR48000, SR96000, HalfLapped
from node.sliding_window import SlidingWindow, OggVorbisWindowingFunc
from node.spectral import FFT, DCT, BarkBands, Chroma, BFCC
from node.template_match import TemplateMatch
from node.timeseries import \
TimeSlice, ConstantRateTimeSeriesEncoder, ConstantRateTimeSeriesFeature, \
GreedyConstantRateTimeSeriesDecoder
from node.api import ZoundsApp
from node.util import process_dir
## Instruction:
Add onset detection processing node to top-level exports
## Code After:
from node.duration import \
Hours, Minutes, Seconds, Milliseconds, Microseconds, Picoseconds
from node.audio_metadata import MetaData, AudioMetaDataEncoder
from node.ogg_vorbis import \
OggVorbis, OggVorbisDecoder, OggVorbisEncoder, OggVorbisFeature, \
OggVorbisWrapper
from node.audiostream import AudioStream
from node.basic import Slice, Sum, Max
from node.learn import KMeans, BinaryRbm, LinearRbm, Learned
from node.onset import \
MeasureOfTransience, MovingAveragePeakPicker, SparseTimestampDecoder, \
SparseTimestampEncoder, TimeSliceDecoder, TimeSliceFeature, ComplexDomain
from node.preprocess import \
MeanStdNormalization, UnitNorm, PreprocessingPipeline
from node.random_samples import ReservoirSampler
from node.resample import Resampler
from node.samplerate import \
SR11025, SR22050, SR44100, SR48000, SR96000, HalfLapped
from node.sliding_window import SlidingWindow, OggVorbisWindowingFunc
from node.spectral import FFT, DCT, BarkBands, Chroma, BFCC
from node.template_match import TemplateMatch
from node.timeseries import \
TimeSlice, ConstantRateTimeSeriesEncoder, ConstantRateTimeSeriesFeature, \
GreedyConstantRateTimeSeriesDecoder
from node.api import ZoundsApp
from node.util import process_dir
| // ... existing code ...
MeasureOfTransience, MovingAveragePeakPicker, SparseTimestampDecoder, \
SparseTimestampEncoder, TimeSliceDecoder, TimeSliceFeature, ComplexDomain
// ... rest of the code ... |
0eb7e39c726ced0e802de925c7ce3b3ec35c61d9 | src/billing/factories.py | src/billing/factories.py | import factory
import random
from billing.models import Billing, OrderBilling
from member.factories import ClientFactory
from order.factories import OrderFactory
class BillingFactory(factory.DjangoModelFactory):
class Meta:
model = Billing
client = factory.SubFactory(ClientFactory)
total_amount = random.randrange(1, stop=75, step=1)
billing_month = random.randrange(1, stop=12, step=1)
billing_year = random.randrange(2016, stop=2020, step=1)
detail = {"123": 123}
class BillingOrder(factory.DjangoModelFactory):
billing_id = BillingFactory().id
order_id = OrderFactory()
| import factory
import random
from billing.models import Billing, OrderBilling
from member.factories import ClientFactory
from order.factories import OrderFactory
class BillingFactory(factory.DjangoModelFactory):
class Meta:
model = Billing
client = factory.SubFactory(ClientFactory)
total_amount = random.randrange(1, stop=75, step=1)
billing_month = random.randrange(1, stop=12, step=1)
billing_year = random.randrange(2016, stop=2020, step=1)
detail = {"123": 123}
| Remove a BillingOrder factory class that wasn't use | Remove a BillingOrder factory class that wasn't use
There was a problem with this class... but since I couldn't find
code using it, I simply deleted it.
| Python | agpl-3.0 | savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,savoirfairelinux/sous-chef,madmath/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/sous-chef | import factory
import random
from billing.models import Billing, OrderBilling
from member.factories import ClientFactory
from order.factories import OrderFactory
class BillingFactory(factory.DjangoModelFactory):
class Meta:
model = Billing
client = factory.SubFactory(ClientFactory)
total_amount = random.randrange(1, stop=75, step=1)
billing_month = random.randrange(1, stop=12, step=1)
billing_year = random.randrange(2016, stop=2020, step=1)
detail = {"123": 123}
-
- class BillingOrder(factory.DjangoModelFactory):
- billing_id = BillingFactory().id
-
- order_id = OrderFactory()
- | Remove a BillingOrder factory class that wasn't use | ## Code Before:
import factory
import random
from billing.models import Billing, OrderBilling
from member.factories import ClientFactory
from order.factories import OrderFactory
class BillingFactory(factory.DjangoModelFactory):
class Meta:
model = Billing
client = factory.SubFactory(ClientFactory)
total_amount = random.randrange(1, stop=75, step=1)
billing_month = random.randrange(1, stop=12, step=1)
billing_year = random.randrange(2016, stop=2020, step=1)
detail = {"123": 123}
class BillingOrder(factory.DjangoModelFactory):
billing_id = BillingFactory().id
order_id = OrderFactory()
## Instruction:
Remove a BillingOrder factory class that wasn't use
## Code After:
import factory
import random
from billing.models import Billing, OrderBilling
from member.factories import ClientFactory
from order.factories import OrderFactory
class BillingFactory(factory.DjangoModelFactory):
class Meta:
model = Billing
client = factory.SubFactory(ClientFactory)
total_amount = random.randrange(1, stop=75, step=1)
billing_month = random.randrange(1, stop=12, step=1)
billing_year = random.randrange(2016, stop=2020, step=1)
detail = {"123": 123}
| ...
detail = {"123": 123}
... |
018172a47450eb5500d330803a2e5a7429891016 | migrations/versions/177_add_run_state_eas_folderstatus.py | migrations/versions/177_add_run_state_eas_folderstatus.py |
# revision identifiers, used by Alembic.
revision = '2b9dd6f7593a'
down_revision = '48a1991e5dbd'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('easfoldersyncstatus', sa.Column('sync_should_run',
sa.Boolean(), server_default=sa.sql.expression.true(),
nullable=False))
def downgrade():
op.drop_column('easfoldersyncstatus', 'sync_should_run')
|
# revision identifiers, used by Alembic.
revision = '2b9dd6f7593a'
down_revision = '48a1991e5dbd'
from alembic import op
import sqlalchemy as sa
def upgrade():
from inbox.ignition import main_engine
engine = main_engine(pool_size=1, max_overflow=0)
if not engine.has_table('easfoldersyncstatus'):
return
op.add_column('easfoldersyncstatus', sa.Column('sync_should_run',
sa.Boolean(), server_default=sa.sql.expression.true(),
nullable=False))
def downgrade():
from inbox.ignition import main_engine
engine = main_engine(pool_size=1, max_overflow=0)
if not engine.has_table('easfoldersyncstatus'):
return
op.drop_column('easfoldersyncstatus', 'sync_should_run')
| Update migration 177 to check for table existence first | Update migration 177 to check for table existence first
| Python | agpl-3.0 | ErinCall/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,closeio/nylas,gale320/sync-engine,nylas/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,jobscore/sync-engine,jobscore/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,closeio/nylas,wakermahmud/sync-engine,jobscore/sync-engine,closeio/nylas,ErinCall/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,closeio/nylas,nylas/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,nylas/sync-engine,gale320/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,gale320/sync-engine |
# revision identifiers, used by Alembic.
revision = '2b9dd6f7593a'
down_revision = '48a1991e5dbd'
from alembic import op
import sqlalchemy as sa
def upgrade():
+ from inbox.ignition import main_engine
+ engine = main_engine(pool_size=1, max_overflow=0)
+ if not engine.has_table('easfoldersyncstatus'):
+ return
op.add_column('easfoldersyncstatus', sa.Column('sync_should_run',
sa.Boolean(), server_default=sa.sql.expression.true(),
nullable=False))
def downgrade():
+ from inbox.ignition import main_engine
+ engine = main_engine(pool_size=1, max_overflow=0)
+ if not engine.has_table('easfoldersyncstatus'):
+ return
op.drop_column('easfoldersyncstatus', 'sync_should_run')
| Update migration 177 to check for table existence first | ## Code Before:
# revision identifiers, used by Alembic.
revision = '2b9dd6f7593a'
down_revision = '48a1991e5dbd'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('easfoldersyncstatus', sa.Column('sync_should_run',
sa.Boolean(), server_default=sa.sql.expression.true(),
nullable=False))
def downgrade():
op.drop_column('easfoldersyncstatus', 'sync_should_run')
## Instruction:
Update migration 177 to check for table existence first
## Code After:
# revision identifiers, used by Alembic.
revision = '2b9dd6f7593a'
down_revision = '48a1991e5dbd'
from alembic import op
import sqlalchemy as sa
def upgrade():
from inbox.ignition import main_engine
engine = main_engine(pool_size=1, max_overflow=0)
if not engine.has_table('easfoldersyncstatus'):
return
op.add_column('easfoldersyncstatus', sa.Column('sync_should_run',
sa.Boolean(), server_default=sa.sql.expression.true(),
nullable=False))
def downgrade():
from inbox.ignition import main_engine
engine = main_engine(pool_size=1, max_overflow=0)
if not engine.has_table('easfoldersyncstatus'):
return
op.drop_column('easfoldersyncstatus', 'sync_should_run')
| ...
def upgrade():
from inbox.ignition import main_engine
engine = main_engine(pool_size=1, max_overflow=0)
if not engine.has_table('easfoldersyncstatus'):
return
op.add_column('easfoldersyncstatus', sa.Column('sync_should_run',
...
def downgrade():
from inbox.ignition import main_engine
engine = main_engine(pool_size=1, max_overflow=0)
if not engine.has_table('easfoldersyncstatus'):
return
op.drop_column('easfoldersyncstatus', 'sync_should_run')
... |
35b5215cd16493fea00c7ebb2106c633ce4c6a9b | qutebrowser/config.py | qutebrowser/config.py | config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
| config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
| Use Arial as Fantasy font | qutebrowser: Use Arial as Fantasy font
| Python | mit | The-Compiler/dotfiles,The-Compiler/dotfiles,The-Compiler/dotfiles | config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
+ c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
| Use Arial as Fantasy font | ## Code Before:
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
## Instruction:
Use Arial as Fantasy font
## Code After:
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
| # ... existing code ...
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
# ... rest of the code ... |
5bb8d24d90b7e6fab72f4f4988ea3055d3250b7e | src/nodeconductor_assembly_waldur/invoices/serializers.py | src/nodeconductor_assembly_waldur/invoices/serializers.py | from rest_framework import serializers
from . import models
class OpenStackItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta(object):
model = models.OpenStackItem
fields = ('package_details', 'package', 'price', 'start', 'end')
extra_kwargs = {
'package': {'lookup_field': 'uuid', 'view_name': 'openstack-package-detail'},
}
def to_representation(self, instance):
instance.package_details['name'] = instance.name
return super(OpenStackItemSerializer, self).to_representation(instance)
class InvoiceSerializer(serializers.HyperlinkedModelSerializer):
total = serializers.DecimalField(max_digits=15, decimal_places=7)
openstack_items = OpenStackItemSerializer(many=True)
class Meta(object):
model = models.Invoice
fields = (
'url', 'uuid', 'customer', 'total', 'openstack_items', 'state', 'year', 'month'
)
view_name = 'invoice-detail'
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'customer': {'lookup_field': 'uuid'},
}
| from rest_framework import serializers
from . import models
class OpenStackItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta(object):
model = models.OpenStackItem
fields = ('package_details', 'package', 'price', 'start', 'end')
extra_kwargs = {
'package': {'lookup_field': 'uuid', 'view_name': 'openstack-package-detail'},
}
def to_representation(self, instance):
instance.package_details['name'] = instance.name
return super(OpenStackItemSerializer, self).to_representation(instance)
class InvoiceSerializer(serializers.HyperlinkedModelSerializer):
total = serializers.DecimalField(max_digits=15, decimal_places=7)
openstack_items = OpenStackItemSerializer(many=True)
class Meta(object):
model = models.Invoice
fields = (
'url', 'uuid', 'customer', 'total', 'openstack_items', 'state', 'year', 'month'
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'customer': {'lookup_field': 'uuid'},
}
| Remove redundant view_name variable in serializer | Remove redundant view_name variable in serializer
- WAL-109
| Python | mit | opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind | from rest_framework import serializers
from . import models
class OpenStackItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta(object):
model = models.OpenStackItem
fields = ('package_details', 'package', 'price', 'start', 'end')
extra_kwargs = {
'package': {'lookup_field': 'uuid', 'view_name': 'openstack-package-detail'},
}
def to_representation(self, instance):
instance.package_details['name'] = instance.name
return super(OpenStackItemSerializer, self).to_representation(instance)
class InvoiceSerializer(serializers.HyperlinkedModelSerializer):
total = serializers.DecimalField(max_digits=15, decimal_places=7)
openstack_items = OpenStackItemSerializer(many=True)
class Meta(object):
model = models.Invoice
fields = (
'url', 'uuid', 'customer', 'total', 'openstack_items', 'state', 'year', 'month'
)
- view_name = 'invoice-detail'
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'customer': {'lookup_field': 'uuid'},
}
| Remove redundant view_name variable in serializer | ## Code Before:
from rest_framework import serializers
from . import models
class OpenStackItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta(object):
model = models.OpenStackItem
fields = ('package_details', 'package', 'price', 'start', 'end')
extra_kwargs = {
'package': {'lookup_field': 'uuid', 'view_name': 'openstack-package-detail'},
}
def to_representation(self, instance):
instance.package_details['name'] = instance.name
return super(OpenStackItemSerializer, self).to_representation(instance)
class InvoiceSerializer(serializers.HyperlinkedModelSerializer):
total = serializers.DecimalField(max_digits=15, decimal_places=7)
openstack_items = OpenStackItemSerializer(many=True)
class Meta(object):
model = models.Invoice
fields = (
'url', 'uuid', 'customer', 'total', 'openstack_items', 'state', 'year', 'month'
)
view_name = 'invoice-detail'
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'customer': {'lookup_field': 'uuid'},
}
## Instruction:
Remove redundant view_name variable in serializer
## Code After:
from rest_framework import serializers
from . import models
class OpenStackItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta(object):
model = models.OpenStackItem
fields = ('package_details', 'package', 'price', 'start', 'end')
extra_kwargs = {
'package': {'lookup_field': 'uuid', 'view_name': 'openstack-package-detail'},
}
def to_representation(self, instance):
instance.package_details['name'] = instance.name
return super(OpenStackItemSerializer, self).to_representation(instance)
class InvoiceSerializer(serializers.HyperlinkedModelSerializer):
total = serializers.DecimalField(max_digits=15, decimal_places=7)
openstack_items = OpenStackItemSerializer(many=True)
class Meta(object):
model = models.Invoice
fields = (
'url', 'uuid', 'customer', 'total', 'openstack_items', 'state', 'year', 'month'
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'customer': {'lookup_field': 'uuid'},
}
| ...
)
extra_kwargs = {
... |
22ac4b9f8dd7d74a84585131fb982f3594a91603 | hr_family/models/hr_children.py | hr_family/models/hr_children.py |
from openerp import models, fields
GENDER_SELECTION = [('m', 'M'),
('f', 'F')]
class HrChildren(models.Model):
_name = 'hr.employee.children'
_description = 'HR Employee Children'
name = fields.Char("Name", required=True)
date_of_birth = fields.Date("Date of Birth", oldname='dob')
employee_id = fields.Many2one('hr.employee', "Employee")
gender = fields.Selection(selection=GENDER_SELECTION, string='Gender')
|
from openerp import models, fields
GENDER_SELECTION = [('male', 'Male'),
('female', 'Female')]
class HrChildren(models.Model):
_name = 'hr.employee.children'
_description = 'HR Employee Children'
name = fields.Char("Name", required=True)
date_of_birth = fields.Date("Date of Birth", oldname='dob')
employee_id = fields.Many2one('hr.employee', "Employee")
gender = fields.Selection(selection=GENDER_SELECTION, string='Gender')
| Use the same selection like employee | [IMP][hr_family] Use the same selection like employee
| Python | agpl-3.0 | xpansa/hr,Vauxoo/hr,Eficent/hr,thinkopensolutions/hr,microcom/hr,hbrunn/hr,acsone/hr,hbrunn/hr,Antiun/hr,feketemihai/hr,thinkopensolutions/hr,Antiun/hr,xpansa/hr,Endika/hr,feketemihai/hr,Endika/hr,open-synergy/hr,VitalPet/hr,microcom/hr,Vauxoo/hr,VitalPet/hr,open-synergy/hr,Eficent/hr,acsone/hr |
from openerp import models, fields
- GENDER_SELECTION = [('m', 'M'),
+ GENDER_SELECTION = [('male', 'Male'),
- ('f', 'F')]
+ ('female', 'Female')]
class HrChildren(models.Model):
_name = 'hr.employee.children'
_description = 'HR Employee Children'
name = fields.Char("Name", required=True)
date_of_birth = fields.Date("Date of Birth", oldname='dob')
employee_id = fields.Many2one('hr.employee', "Employee")
gender = fields.Selection(selection=GENDER_SELECTION, string='Gender')
| Use the same selection like employee | ## Code Before:
from openerp import models, fields
GENDER_SELECTION = [('m', 'M'),
('f', 'F')]
class HrChildren(models.Model):
_name = 'hr.employee.children'
_description = 'HR Employee Children'
name = fields.Char("Name", required=True)
date_of_birth = fields.Date("Date of Birth", oldname='dob')
employee_id = fields.Many2one('hr.employee', "Employee")
gender = fields.Selection(selection=GENDER_SELECTION, string='Gender')
## Instruction:
Use the same selection like employee
## Code After:
from openerp import models, fields
GENDER_SELECTION = [('male', 'Male'),
('female', 'Female')]
class HrChildren(models.Model):
_name = 'hr.employee.children'
_description = 'HR Employee Children'
name = fields.Char("Name", required=True)
date_of_birth = fields.Date("Date of Birth", oldname='dob')
employee_id = fields.Many2one('hr.employee', "Employee")
gender = fields.Selection(selection=GENDER_SELECTION, string='Gender')
| ...
GENDER_SELECTION = [('male', 'Male'),
('female', 'Female')]
... |
bc9f4d4e5022f4219727e9085164982f9efb005e | editor/views/generic.py | editor/views/generic.py | import git
import os
from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render
class SaveContentMixin():
"""Save exam or question content to a git repository and to a database."""
# object = None
# request = None
# template_name = None
def write_content(self, directory, form, inlines=None):
try:
repo = git.Repo(settings.GLOBAL_SETTINGS['REPO_PATH'])
path_to_file = os.path.join(settings.GLOBAL_SETTINGS['REPO_PATH'],
directory, self.object.filename)
fh = open(path_to_file, 'w')
fh.write(self.object.content)
fh.close()
repo.index.add([os.path.join(directory, self.object.filename)])
repo.index.commit('Made some changes to %s' % self.object.name)
except IOError:
error = "Could not save file."
return render(self.request, self.template_name,
{'form': form, 'inlines': inlines, 'error': error,
'object': self.object})
self.object = form.save()
if inlines is not None:
for formset in inlines:
formset.save()
return HttpResponseRedirect(self.get_success_url()) | import git
import os
from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render
class SaveContentMixin():
"""Save exam or question content to a git repository and to a database."""
# object = None
# request = None
# template_name = None
def write_content(self, directory, form, inlines=None):
try:
repo = git.Repo(settings.GLOBAL_SETTINGS['REPO_PATH'])
os.environ['GIT_AUTHOR_NAME'] = 'Numbas'
os.environ['GIT_AUTHOR_EMAIL'] = '[email protected]'
path_to_file = os.path.join(settings.GLOBAL_SETTINGS['REPO_PATH'],
directory, self.object.filename)
fh = open(path_to_file, 'w')
fh.write(self.object.content)
fh.close()
repo.index.add([os.path.join(directory, self.object.filename)])
repo.index.commit('Made some changes to %s' % self.object.name)
except IOError:
error = "Could not save file."
return render(self.request, self.template_name,
{'form': form, 'inlines': inlines, 'error': error,
'object': self.object})
self.object = form.save()
if inlines is not None:
for formset in inlines:
formset.save()
return HttpResponseRedirect(self.get_success_url()) | Set the git author name and e-mail | Set the git author name and e-mail
| Python | apache-2.0 | numbas/editor,numbas/editor,numbas/editor | import git
import os
from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render
class SaveContentMixin():
"""Save exam or question content to a git repository and to a database."""
# object = None
# request = None
# template_name = None
def write_content(self, directory, form, inlines=None):
try:
repo = git.Repo(settings.GLOBAL_SETTINGS['REPO_PATH'])
+ os.environ['GIT_AUTHOR_NAME'] = 'Numbas'
+ os.environ['GIT_AUTHOR_EMAIL'] = '[email protected]'
path_to_file = os.path.join(settings.GLOBAL_SETTINGS['REPO_PATH'],
directory, self.object.filename)
fh = open(path_to_file, 'w')
fh.write(self.object.content)
fh.close()
repo.index.add([os.path.join(directory, self.object.filename)])
repo.index.commit('Made some changes to %s' % self.object.name)
except IOError:
error = "Could not save file."
return render(self.request, self.template_name,
{'form': form, 'inlines': inlines, 'error': error,
'object': self.object})
self.object = form.save()
if inlines is not None:
for formset in inlines:
formset.save()
return HttpResponseRedirect(self.get_success_url()) | Set the git author name and e-mail | ## Code Before:
import git
import os
from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render
class SaveContentMixin():
"""Save exam or question content to a git repository and to a database."""
# object = None
# request = None
# template_name = None
def write_content(self, directory, form, inlines=None):
try:
repo = git.Repo(settings.GLOBAL_SETTINGS['REPO_PATH'])
path_to_file = os.path.join(settings.GLOBAL_SETTINGS['REPO_PATH'],
directory, self.object.filename)
fh = open(path_to_file, 'w')
fh.write(self.object.content)
fh.close()
repo.index.add([os.path.join(directory, self.object.filename)])
repo.index.commit('Made some changes to %s' % self.object.name)
except IOError:
error = "Could not save file."
return render(self.request, self.template_name,
{'form': form, 'inlines': inlines, 'error': error,
'object': self.object})
self.object = form.save()
if inlines is not None:
for formset in inlines:
formset.save()
return HttpResponseRedirect(self.get_success_url())
## Instruction:
Set the git author name and e-mail
## Code After:
import git
import os
from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render
class SaveContentMixin():
"""Save exam or question content to a git repository and to a database."""
# object = None
# request = None
# template_name = None
def write_content(self, directory, form, inlines=None):
try:
repo = git.Repo(settings.GLOBAL_SETTINGS['REPO_PATH'])
os.environ['GIT_AUTHOR_NAME'] = 'Numbas'
os.environ['GIT_AUTHOR_EMAIL'] = '[email protected]'
path_to_file = os.path.join(settings.GLOBAL_SETTINGS['REPO_PATH'],
directory, self.object.filename)
fh = open(path_to_file, 'w')
fh.write(self.object.content)
fh.close()
repo.index.add([os.path.join(directory, self.object.filename)])
repo.index.commit('Made some changes to %s' % self.object.name)
except IOError:
error = "Could not save file."
return render(self.request, self.template_name,
{'form': form, 'inlines': inlines, 'error': error,
'object': self.object})
self.object = form.save()
if inlines is not None:
for formset in inlines:
formset.save()
return HttpResponseRedirect(self.get_success_url()) | ...
repo = git.Repo(settings.GLOBAL_SETTINGS['REPO_PATH'])
os.environ['GIT_AUTHOR_NAME'] = 'Numbas'
os.environ['GIT_AUTHOR_EMAIL'] = '[email protected]'
path_to_file = os.path.join(settings.GLOBAL_SETTINGS['REPO_PATH'],
... |
7d9c7133de36d2fd7587d7be361cd0ff964d4e94 | deflect/urls.py | deflect/urls.py | from django.conf.urls import patterns
from django.conf.urls import url
from .views import redirect
urlpatterns = patterns('',
url(r'^(?P<key>[a-zA-Z0-9]+)$', redirect, name='deflect-redirect'),
)
| from django.conf import settings
from django.conf.urls import patterns
from django.conf.urls import url
from .views import alias
from .views import redirect
urlpatterns = patterns('',
url(r'^(?P<key>[a-zA-Z0-9]+)$', redirect, name='deflect-redirect'),
)
alias_prefix = getattr(settings, 'DEFLECT_ALIAS_PREFIX', '')
if alias_prefix:
urlpatterns += patterns('',
url(r'^%s(?P<key>[a-zA-Z0-9]+)$' % alias_prefix, alias,
name='deflect-alias'),
)
| Add custom URL alias paths to URLconf | Add custom URL alias paths to URLconf
| Python | bsd-3-clause | jbittel/django-deflect | + from django.conf import settings
from django.conf.urls import patterns
from django.conf.urls import url
+ from .views import alias
from .views import redirect
urlpatterns = patterns('',
url(r'^(?P<key>[a-zA-Z0-9]+)$', redirect, name='deflect-redirect'),
)
+ alias_prefix = getattr(settings, 'DEFLECT_ALIAS_PREFIX', '')
+ if alias_prefix:
+ urlpatterns += patterns('',
+ url(r'^%s(?P<key>[a-zA-Z0-9]+)$' % alias_prefix, alias,
+ name='deflect-alias'),
+ )
+ | Add custom URL alias paths to URLconf | ## Code Before:
from django.conf.urls import patterns
from django.conf.urls import url
from .views import redirect
urlpatterns = patterns('',
url(r'^(?P<key>[a-zA-Z0-9]+)$', redirect, name='deflect-redirect'),
)
## Instruction:
Add custom URL alias paths to URLconf
## Code After:
from django.conf import settings
from django.conf.urls import patterns
from django.conf.urls import url
from .views import alias
from .views import redirect
urlpatterns = patterns('',
url(r'^(?P<key>[a-zA-Z0-9]+)$', redirect, name='deflect-redirect'),
)
alias_prefix = getattr(settings, 'DEFLECT_ALIAS_PREFIX', '')
if alias_prefix:
urlpatterns += patterns('',
url(r'^%s(?P<key>[a-zA-Z0-9]+)$' % alias_prefix, alias,
name='deflect-alias'),
)
| // ... existing code ...
from django.conf import settings
from django.conf.urls import patterns
// ... modified code ...
from .views import alias
from .views import redirect
...
)
alias_prefix = getattr(settings, 'DEFLECT_ALIAS_PREFIX', '')
if alias_prefix:
urlpatterns += patterns('',
url(r'^%s(?P<key>[a-zA-Z0-9]+)$' % alias_prefix, alias,
name='deflect-alias'),
)
// ... rest of the code ... |
8004590503914d9674a0b17f412c8d1836f5e1a1 | testScript.py | testScript.py | from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821')
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
| from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs
##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
| Add second author for testing purposes | Add second author for testing purposes
| Python | bsd-3-clause | ElsevierDev/elsapy | from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
- myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821')
+ myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs
+ ##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs
+
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
| Add second author for testing purposes | ## Code Before:
from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821')
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
## Instruction:
Add second author for testing purposes
## Code After:
from elsapy import *
conFile = open("config.json")
config = json.load(conFile)
myCl = elsClient(config['apikey'])
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs
##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs
myAuth.read(myCl)
print ("myAuth.fullName: ", myAuth.fullName)
myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849')
myAff.read(myCl)
print ("myAff.name: ", myAff.name)
myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457')
myDoc.read(myCl)
print ("myDoc.title: ", myDoc.title)
myAuth.readDocs(myCl)
print ("myAuth.docList: ")
i = 0
for doc in myAuth.docList:
i += 1
print (i, ' - ', doc['dc:title'])
| ...
myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs
##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs
myAuth.read(myCl)
... |
eb165c0eb929b542178daea7057c258718d1ba6a | testfixtures/snippet.py | testfixtures/snippet.py |
from byceps.blueprints.snippet.models.snippet import \
CurrentVersionAssociation, Snippet, SnippetVersion
def create_snippet(party, name):
return Snippet(
party=party,
name=name)
def create_snippet_version(snippet, creator, *, created_at=None,
title='', head='', body='', image_url_path=None):
return SnippetVersion(
snippet=snippet,
created_at=created_at,
creator=creator,
title=title,
head=head,
body=body,
image_url_path=image_url_path)
def create_current_version_association(snippet, version):
return CurrentVersionAssociation(
snippet=snippet,
version=version)
|
from byceps.blueprints.snippet.models.snippet import \
CurrentVersionAssociation, Snippet, SnippetVersion
def create_snippet(party, name):
return Snippet(
party=party,
name=name)
def create_snippet_version(snippet, creator, *, created_at=None,
title='', head='', body='', image_url_path=None):
version = SnippetVersion(
snippet=snippet,
creator=creator,
title=title,
head=head,
body=body,
image_url_path=image_url_path)
if created_at is not None:
version.created_at = created_at
return version
def create_current_version_association(snippet, version):
return CurrentVersionAssociation(
snippet=snippet,
version=version)
| Set creation date via field, not via constructor | Set creation date via field, not via constructor
| Python | bsd-3-clause | m-ober/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps |
from byceps.blueprints.snippet.models.snippet import \
CurrentVersionAssociation, Snippet, SnippetVersion
def create_snippet(party, name):
return Snippet(
party=party,
name=name)
def create_snippet_version(snippet, creator, *, created_at=None,
title='', head='', body='', image_url_path=None):
- return SnippetVersion(
+ version = SnippetVersion(
snippet=snippet,
- created_at=created_at,
creator=creator,
title=title,
head=head,
body=body,
image_url_path=image_url_path)
+ if created_at is not None:
+ version.created_at = created_at
+
+ return version
def create_current_version_association(snippet, version):
return CurrentVersionAssociation(
snippet=snippet,
version=version)
| Set creation date via field, not via constructor | ## Code Before:
from byceps.blueprints.snippet.models.snippet import \
CurrentVersionAssociation, Snippet, SnippetVersion
def create_snippet(party, name):
return Snippet(
party=party,
name=name)
def create_snippet_version(snippet, creator, *, created_at=None,
title='', head='', body='', image_url_path=None):
return SnippetVersion(
snippet=snippet,
created_at=created_at,
creator=creator,
title=title,
head=head,
body=body,
image_url_path=image_url_path)
def create_current_version_association(snippet, version):
return CurrentVersionAssociation(
snippet=snippet,
version=version)
## Instruction:
Set creation date via field, not via constructor
## Code After:
from byceps.blueprints.snippet.models.snippet import \
CurrentVersionAssociation, Snippet, SnippetVersion
def create_snippet(party, name):
return Snippet(
party=party,
name=name)
def create_snippet_version(snippet, creator, *, created_at=None,
title='', head='', body='', image_url_path=None):
version = SnippetVersion(
snippet=snippet,
creator=creator,
title=title,
head=head,
body=body,
image_url_path=image_url_path)
if created_at is not None:
version.created_at = created_at
return version
def create_current_version_association(snippet, version):
return CurrentVersionAssociation(
snippet=snippet,
version=version)
| // ... existing code ...
title='', head='', body='', image_url_path=None):
version = SnippetVersion(
snippet=snippet,
creator=creator,
// ... modified code ...
if created_at is not None:
version.created_at = created_at
return version
// ... rest of the code ... |
fb08d35b8470cb659e9a9f80d58d15c18faeaf9c | testinfra/backend/base.py | testinfra/backend/base.py |
from __future__ import unicode_literals
import collections
import pipes
CommandResult = collections.namedtuple('CommandResult', [
'rc', 'stdout', 'stderr', 'command',
])
class BaseBackend(object):
def quote(self, command, *args):
return command % tuple(pipes.quote(a) for a in args)
@staticmethod
def parse_hostspec(hostspec):
host = hostspec
user = None
port = None
if "@" in host:
user, host = host.split("@", 1)
if ":" in host:
host, port = host.split(":", 1)
return host, user, port
def run(self, command, *args):
raise NotImplementedError
|
from __future__ import unicode_literals
import collections
import pipes
CommandResult = collections.namedtuple('CommandResult', [
'rc', 'stdout', 'stderr', 'command',
])
class BaseBackend(object):
def quote(self, command, *args):
if args:
return command % tuple(pipes.quote(a) for a in args)
else:
return command
@staticmethod
def parse_hostspec(hostspec):
host = hostspec
user = None
port = None
if "@" in host:
user, host = host.split("@", 1)
if ":" in host:
host, port = host.split(":", 1)
return host, user, port
def run(self, command, *args):
raise NotImplementedError
| Fix safe_ssh command without arguments | backend: Fix safe_ssh command without arguments
| Python | apache-2.0 | philpep/testinfra,Leibniz137/testinfra |
from __future__ import unicode_literals
import collections
import pipes
CommandResult = collections.namedtuple('CommandResult', [
'rc', 'stdout', 'stderr', 'command',
])
class BaseBackend(object):
def quote(self, command, *args):
+ if args:
- return command % tuple(pipes.quote(a) for a in args)
+ return command % tuple(pipes.quote(a) for a in args)
+ else:
+ return command
@staticmethod
def parse_hostspec(hostspec):
host = hostspec
user = None
port = None
if "@" in host:
user, host = host.split("@", 1)
if ":" in host:
host, port = host.split(":", 1)
return host, user, port
def run(self, command, *args):
raise NotImplementedError
| Fix safe_ssh command without arguments | ## Code Before:
from __future__ import unicode_literals
import collections
import pipes
CommandResult = collections.namedtuple('CommandResult', [
'rc', 'stdout', 'stderr', 'command',
])
class BaseBackend(object):
def quote(self, command, *args):
return command % tuple(pipes.quote(a) for a in args)
@staticmethod
def parse_hostspec(hostspec):
host = hostspec
user = None
port = None
if "@" in host:
user, host = host.split("@", 1)
if ":" in host:
host, port = host.split(":", 1)
return host, user, port
def run(self, command, *args):
raise NotImplementedError
## Instruction:
Fix safe_ssh command without arguments
## Code After:
from __future__ import unicode_literals
import collections
import pipes
CommandResult = collections.namedtuple('CommandResult', [
'rc', 'stdout', 'stderr', 'command',
])
class BaseBackend(object):
def quote(self, command, *args):
if args:
return command % tuple(pipes.quote(a) for a in args)
else:
return command
@staticmethod
def parse_hostspec(hostspec):
host = hostspec
user = None
port = None
if "@" in host:
user, host = host.split("@", 1)
if ":" in host:
host, port = host.split(":", 1)
return host, user, port
def run(self, command, *args):
raise NotImplementedError
| ...
def quote(self, command, *args):
if args:
return command % tuple(pipes.quote(a) for a in args)
else:
return command
... |
30dbda17bfa3b52dc2aace6eba6b8c1e4b3f7542 | robot-name/robot_name.py | robot-name/robot_name.py |
import string
import random
class Robot():
"""Robot facory settings"""
def __init__(self):
self.name = ""
|
import string
import random
class Robot():
"""Robot facory settings"""
def __init__(self):
self.name = ""
def factory_name(self):
char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2))
num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3))
self.name = char + num
return self.name
R1 = Robot()
print(R1.factory_name())
| Add methord to generate unique robot name | Add methord to generate unique robot name
| Python | mit | amalshehu/exercism-python |
import string
import random
class Robot():
"""Robot facory settings"""
def __init__(self):
self.name = ""
-
+ def factory_name(self):
+ char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2))
+ num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3))
+ self.name = char + num
+ return self.name
+
+ R1 = Robot()
+ print(R1.factory_name())
+ | Add methord to generate unique robot name | ## Code Before:
import string
import random
class Robot():
"""Robot facory settings"""
def __init__(self):
self.name = ""
## Instruction:
Add methord to generate unique robot name
## Code After:
import string
import random
class Robot():
"""Robot facory settings"""
def __init__(self):
self.name = ""
def factory_name(self):
char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2))
num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3))
self.name = char + num
return self.name
R1 = Robot()
print(R1.factory_name())
| // ... existing code ...
def factory_name(self):
char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2))
num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3))
self.name = char + num
return self.name
R1 = Robot()
print(R1.factory_name())
// ... rest of the code ... |
d1008437dcf618700bce53913f3450aceda8a23f | djangoautoconf/auto_conf_admin_utils.py | djangoautoconf/auto_conf_admin_utils.py | from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | Remove xadmin as it will not work with guardian. | Remove xadmin as it will not work with guardian.
| Python | bsd-3-clause | weijia/djangoautoconf,weijia/djangoautoconf | from guardian.admin import GuardedModelAdmin
- #from django.contrib import admin
+ from django.contrib import admin
+ #The following not work with guardian?
- import xadmin as admin
+ #import xadmin as admin
- def register_to_sys(class_inst, admin_class = None):
+ def register_to_sys(class_inst, admin_class=None):
+
if admin_class is None:
- admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
+ admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
+
+ #admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
+
user_admin_site.register(class_inst, admin_class)
except:
pass
- #register(class_inst)
+ #register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | Remove xadmin as it will not work with guardian. | ## Code Before:
from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
## Instruction:
Remove xadmin as it will not work with guardian.
## Code After:
from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | # ... existing code ...
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
# ... modified code ...
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
...
pass
#admin.site.register(class_inst)
try:
...
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
...
pass
#register(class_inst)
# ... rest of the code ... |
05e86efadfd0a05bac660e2ce47a5502b5bbdddb | tempest/tests/fake_auth_provider.py | tempest/tests/fake_auth_provider.py |
class FakeAuthProvider(object):
def auth_request(self, method, url, headers=None, body=None, filters=None):
return url, headers, body
def get_token(self):
return "faketoken"
def base_url(self, filters, auth_data=None):
return "https://example.com"
|
class FakeAuthProvider(object):
def get_token(self):
return "faketoken"
def base_url(self, filters, auth_data=None):
return "https://example.com"
| Remove auth_request as no used | Remove auth_request as no used
Function auth_request() isn't be used, it can be removed for the
code clean.
Change-Id: I979b67e934c72f50dd62c75ac614f99f136cfeae
| Python | apache-2.0 | vedujoshi/tempest,Tesora/tesora-tempest,openstack/tempest,openstack/tempest,vedujoshi/tempest,masayukig/tempest,masayukig/tempest,Juniper/tempest,cisco-openstack/tempest,sebrandon1/tempest,sebrandon1/tempest,cisco-openstack/tempest,Tesora/tesora-tempest,Juniper/tempest |
class FakeAuthProvider(object):
-
- def auth_request(self, method, url, headers=None, body=None, filters=None):
- return url, headers, body
def get_token(self):
return "faketoken"
def base_url(self, filters, auth_data=None):
return "https://example.com"
| Remove auth_request as no used | ## Code Before:
class FakeAuthProvider(object):
def auth_request(self, method, url, headers=None, body=None, filters=None):
return url, headers, body
def get_token(self):
return "faketoken"
def base_url(self, filters, auth_data=None):
return "https://example.com"
## Instruction:
Remove auth_request as no used
## Code After:
class FakeAuthProvider(object):
def get_token(self):
return "faketoken"
def base_url(self, filters, auth_data=None):
return "https://example.com"
| # ... existing code ...
class FakeAuthProvider(object):
# ... rest of the code ... |
5b283e1dd48b811b54345de53c177d78e4eb084a | fancypages/__init__.py | fancypages/__init__.py | import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
| import os
__version__ = (0, 0, 1, 'alpha', 1)
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
| Bring path function in line with oscar fancypages | Bring path function in line with oscar fancypages
| Python | bsd-3-clause | socradev/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages | import os
__version__ = (0, 0, 1, 'alpha', 1)
- FP_MAIN_TEMPLATE_DIR = os.path.join(
- os.path.dirname(os.path.abspath(__file__))
- )
+ def get_fancypages_paths(path):
+ return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
+ | Bring path function in line with oscar fancypages | ## Code Before:
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
## Instruction:
Bring path function in line with oscar fancypages
## Code After:
import os
__version__ = (0, 0, 1, 'alpha', 1)
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
| # ... existing code ...
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
# ... rest of the code ... |
c0169c5073e4a83120f4d6860258c3085b4c1cf5 | setup.py | setup.py | import subprocess as sp
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
sp.check_call(['flit', 'install', '--deps', 'production'])
| import subprocess as sp
import sys
import os
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
flit = os.path.join(os.path.dirname(sys.executable), 'flit')
cmd = [flit, 'install', '--deps', 'production']
print(" ".join(cmd))
sp.check_call(cmd)
| Use flit that's been installed in the virtualenv | Use flit that's been installed in the virtualenv
| Python | bsd-3-clause | jupyter/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,EdwardJKim/nbgrader,dementrock/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader | import subprocess as sp
+ import sys
+ import os
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
+ flit = os.path.join(os.path.dirname(sys.executable), 'flit')
- sp.check_call(['flit', 'install', '--deps', 'production'])
+ cmd = [flit, 'install', '--deps', 'production']
+ print(" ".join(cmd))
+ sp.check_call(cmd)
| Use flit that's been installed in the virtualenv | ## Code Before:
import subprocess as sp
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
sp.check_call(['flit', 'install', '--deps', 'production'])
## Instruction:
Use flit that's been installed in the virtualenv
## Code After:
import subprocess as sp
import sys
import os
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
flit = os.path.join(os.path.dirname(sys.executable), 'flit')
cmd = [flit, 'install', '--deps', 'production']
print(" ".join(cmd))
sp.check_call(cmd)
| // ... existing code ...
import subprocess as sp
import sys
import os
// ... modified code ...
flit = os.path.join(os.path.dirname(sys.executable), 'flit')
cmd = [flit, 'install', '--deps', 'production']
print(" ".join(cmd))
sp.check_call(cmd)
// ... rest of the code ... |
f468a26893c44411dc1f865b208788373f993918 | asciibooth/camera.py | asciibooth/camera.py | import io
# import time
import picamera
from . import config
class Camera:
def __init__(self):
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
self.preview_alpha = 200
def capture(self):
stream = io.BytesIO()
self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE)
stream.seek(0)
return stream
def toggle_preview(self):
if self.camera.preview is None:
self.camera.start_preview()
self.camera.preview.alpha = self.preview_alpha
else:
self.camera.stop_preview()
def start_preview(self, alpha=255):
self.camera.start_preview()
self.camera.preview.alpha = alpha
def stop_preview(self):
self.camera.stop_preview()
def stop(self):
self.camera.close()
| import io
# import time
import picamera
from . import config
class Camera:
def __init__(self):
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
self.camera.hflip = True
self.preview_alpha = 200
def capture(self):
stream = io.BytesIO()
self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE)
stream.seek(0)
return stream
def toggle_preview(self):
if self.camera.preview is None:
self.camera.start_preview(hflip = True)
self.camera.preview.alpha = self.preview_alpha
else:
self.camera.stop_preview()
def start_preview(self, alpha=255):
self.camera.start_preview()
self.camera.preview.alpha = alpha
def stop_preview(self):
self.camera.stop_preview()
def stop(self):
self.camera.close()
| Enable hflip for capture and preview | Enable hflip for capture and preview
| Python | cc0-1.0 | jnv/asciibooth,jnv/asciibooth | import io
# import time
import picamera
from . import config
class Camera:
def __init__(self):
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
+ self.camera.hflip = True
self.preview_alpha = 200
def capture(self):
stream = io.BytesIO()
self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE)
stream.seek(0)
return stream
def toggle_preview(self):
if self.camera.preview is None:
- self.camera.start_preview()
+ self.camera.start_preview(hflip = True)
self.camera.preview.alpha = self.preview_alpha
else:
self.camera.stop_preview()
def start_preview(self, alpha=255):
self.camera.start_preview()
self.camera.preview.alpha = alpha
def stop_preview(self):
self.camera.stop_preview()
def stop(self):
self.camera.close()
| Enable hflip for capture and preview | ## Code Before:
import io
# import time
import picamera
from . import config
class Camera:
def __init__(self):
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
self.preview_alpha = 200
def capture(self):
stream = io.BytesIO()
self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE)
stream.seek(0)
return stream
def toggle_preview(self):
if self.camera.preview is None:
self.camera.start_preview()
self.camera.preview.alpha = self.preview_alpha
else:
self.camera.stop_preview()
def start_preview(self, alpha=255):
self.camera.start_preview()
self.camera.preview.alpha = alpha
def stop_preview(self):
self.camera.stop_preview()
def stop(self):
self.camera.close()
## Instruction:
Enable hflip for capture and preview
## Code After:
import io
# import time
import picamera
from . import config
class Camera:
def __init__(self):
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
self.camera.hflip = True
self.preview_alpha = 200
def capture(self):
stream = io.BytesIO()
self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE)
stream.seek(0)
return stream
def toggle_preview(self):
if self.camera.preview is None:
self.camera.start_preview(hflip = True)
self.camera.preview.alpha = self.preview_alpha
else:
self.camera.stop_preview()
def start_preview(self, alpha=255):
self.camera.start_preview()
self.camera.preview.alpha = alpha
def stop_preview(self):
self.camera.stop_preview()
def stop(self):
self.camera.close()
| // ... existing code ...
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
self.camera.hflip = True
self.preview_alpha = 200
// ... modified code ...
if self.camera.preview is None:
self.camera.start_preview(hflip = True)
self.camera.preview.alpha = self.preview_alpha
// ... rest of the code ... |
fdf0daefac50de71a8c4f80a9ef877669ebea48b | byceps/services/tourney/transfer/models.py | byceps/services/tourney/transfer/models.py |
from typing import NewType
from uuid import UUID
from attr import attrs
TourneyCategoryID = NewType('TourneyCategoryID', UUID)
TourneyID = NewType('TourneyID', UUID)
MatchID = NewType('MatchID', UUID)
MatchCommentID = NewType('MatchCommentID', UUID)
ParticipantID = NewType('ParticipantID', UUID)
@attrs(auto_attribs=True, frozen=True, slots=True)
class Match:
id: MatchID
|
from dataclasses import dataclass
from typing import NewType
from uuid import UUID
TourneyCategoryID = NewType('TourneyCategoryID', UUID)
TourneyID = NewType('TourneyID', UUID)
MatchID = NewType('MatchID', UUID)
MatchCommentID = NewType('MatchCommentID', UUID)
ParticipantID = NewType('ParticipantID', UUID)
@dataclass(frozen=True)
class Match:
id: MatchID
| Change tourney match transfer model from `attrs` to `dataclass` | Change tourney match transfer model from `attrs` to `dataclass`
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps |
+ from dataclasses import dataclass
from typing import NewType
from uuid import UUID
-
- from attr import attrs
TourneyCategoryID = NewType('TourneyCategoryID', UUID)
TourneyID = NewType('TourneyID', UUID)
MatchID = NewType('MatchID', UUID)
MatchCommentID = NewType('MatchCommentID', UUID)
ParticipantID = NewType('ParticipantID', UUID)
- @attrs(auto_attribs=True, frozen=True, slots=True)
+ @dataclass(frozen=True)
class Match:
id: MatchID
| Change tourney match transfer model from `attrs` to `dataclass` | ## Code Before:
from typing import NewType
from uuid import UUID
from attr import attrs
TourneyCategoryID = NewType('TourneyCategoryID', UUID)
TourneyID = NewType('TourneyID', UUID)
MatchID = NewType('MatchID', UUID)
MatchCommentID = NewType('MatchCommentID', UUID)
ParticipantID = NewType('ParticipantID', UUID)
@attrs(auto_attribs=True, frozen=True, slots=True)
class Match:
id: MatchID
## Instruction:
Change tourney match transfer model from `attrs` to `dataclass`
## Code After:
from dataclasses import dataclass
from typing import NewType
from uuid import UUID
TourneyCategoryID = NewType('TourneyCategoryID', UUID)
TourneyID = NewType('TourneyID', UUID)
MatchID = NewType('MatchID', UUID)
MatchCommentID = NewType('MatchCommentID', UUID)
ParticipantID = NewType('ParticipantID', UUID)
@dataclass(frozen=True)
class Match:
id: MatchID
| // ... existing code ...
from dataclasses import dataclass
from typing import NewType
// ... modified code ...
from uuid import UUID
...
@dataclass(frozen=True)
class Match:
// ... rest of the code ... |
824d769b1b1f55a018b380f6631f11727339a018 | fpsd/run_tests.py | fpsd/run_tests.py |
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
|
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
| Add feature generation tests to test runner | Add feature generation tests to test runner
| Python | agpl-3.0 | freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop |
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
+ call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
| Add feature generation tests to test runner | ## Code Before:
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
## Instruction:
Add feature generation tests to test runner
## Code After:
from subprocess import call
from os.path import dirname, abspath, join
# Run all the tests using py.test
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_sketchy_sites"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_utils"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
| ...
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_database_methods"])
call(["python3.5", "-m", "unittest", "-f", "-v", "test.test_features"])
... |
9921b6bd73c5256a3b65c2a5106717ce0fc8f0cf | djangorestframework/utils/breadcrumbs.py | djangorestframework/utils/breadcrumbs.py | from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
| from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
| Use get_script_prefix to play nicely if not installed at the root. | Use get_script_prefix to play nicely if not installed at the root.
| Python | bsd-2-clause | rafaelcaricio/django-rest-framework,maryokhin/django-rest-framework,jtiai/django-rest-framework,cheif/django-rest-framework,vstoykov/django-rest-framework,wwj718/django-rest-framework,ebsaral/django-rest-framework,jpadilla/django-rest-framework,damycra/django-rest-framework,kezabelle/django-rest-framework,cyberj/django-rest-framework,hnarayanan/django-rest-framework,kgeorgy/django-rest-framework,antonyc/django-rest-framework,lubomir/django-rest-framework,ambivalentno/django-rest-framework,potpath/django-rest-framework,ashishfinoit/django-rest-framework,waytai/django-rest-framework,nhorelik/django-rest-framework,iheitlager/django-rest-framework,rubendura/django-rest-framework,aericson/django-rest-framework,rubendura/django-rest-framework,uploadcare/django-rest-framework,atombrella/django-rest-framework,krinart/django-rest-framework,HireAnEsquire/django-rest-framework,gregmuellegger/django-rest-framework,thedrow/django-rest-framework-1,mgaitan/django-rest-framework,hnakamur/django-rest-framework,callorico/django-rest-framework,hnakamur/django-rest-framework,tigeraniya/django-rest-framework,douwevandermeij/django-rest-framework,dmwyatt/django-rest-framework,agconti/django-rest-framework,canassa/django-rest-framework,johnraz/django-rest-framework,linovia/django-rest-framework,wwj718/django-rest-framework,brandoncazander/django-rest-framework,canassa/django-rest-framework,ashishfinoit/django-rest-framework,agconti/django-rest-framework,krinart/django-rest-framework,xiaotangyuan/django-rest-framework,cyberj/django-rest-framework,uruz/django-rest-framework,akalipetis/django-rest-framework,tcroiset/django-rest-framework,buptlsl/django-rest-framework,d0ugal/django-rest-framework,kgeorgy/django-rest-framework,vstoykov/django-rest-framework,thedrow/django-rest-framework-1,jerryhebert/django-rest-framework,delinhabit/django-rest-framework,davesque/django-rest-framework,potpath/django-rest-framework,kennydude/django-rest-framework,qsorix/django-rest-framework,uploadcare/django-rest-framework,ebsaral/django-rest-framework,jness/django-rest-framework,MJafarMashhadi/django-rest-framework,tcroiset/django-rest-framework,adambain-vokal/django-rest-framework,johnraz/django-rest-framework,mgaitan/django-rest-framework,wedaly/django-rest-framework,sheppard/django-rest-framework,uploadcare/django-rest-framework,rhblind/django-rest-framework,sehmaschine/django-rest-framework,sbellem/django-rest-framework,arpheno/django-rest-framework,rafaelang/django-rest-framework,bluedazzle/django-rest-framework,jtiai/django-rest-framework,antonyc/django-rest-framework,ajaali/django-rest-framework,rhblind/django-rest-framework,ebsaral/django-rest-framework,akalipetis/django-rest-framework,buptlsl/django-rest-framework,potpath/django-rest-framework,xiaotangyuan/django-rest-framework,jerryhebert/django-rest-framework,douwevandermeij/django-rest-framework,qsorix/django-rest-framework,callorico/django-rest-framework,sbellem/django-rest-framework,sehmaschine/django-rest-framework,elim/django-rest-framework,akalipetis/django-rest-framework,kylefox/django-rest-framework,adambain-vokal/django-rest-framework,maryokhin/django-rest-framework,fishky/django-rest-framework,werthen/django-rest-framework,simudream/django-rest-framework,delinhabit/django-rest-framework,aericson/django-rest-framework,abdulhaq-e/django-rest-framework,simudream/django-rest-framework,paolopaolopaolo/django-rest-framework,jpulec/django-rest-framework,James1345/django-rest-framework,fishky/django-rest-framework,ajaali/django-rest-framework,ashishfinoit/django-rest-framework,alacritythief/django-rest-framework,ticosax/django-rest-framework,cheif/django-rest-framework,wedaly/django-rest-framework,callorico/django-rest-framework,YBJAY00000/django-rest-framework,mgaitan/django-rest-framework,YBJAY00000/django-rest-framework,wzbozon/django-rest-framework,bluedazzle/django-rest-framework,elim/django-rest-framework,kylefox/django-rest-framework,alacritythief/django-rest-framework,kennydude/django-rest-framework,hunter007/django-rest-framework,abdulhaq-e/django-rest-framework,leeahoward/django-rest-framework,damycra/django-rest-framework,andriy-s/django-rest-framework,waytai/django-rest-framework,aericson/django-rest-framework,leeahoward/django-rest-framework,wzbozon/django-rest-framework,wangpanjun/django-rest-framework,tcroiset/django-rest-framework,bluedazzle/django-rest-framework,jness/django-rest-framework,tigeraniya/django-rest-framework,andriy-s/django-rest-framework,hnakamur/django-rest-framework,rhblind/django-rest-framework,nryoung/django-rest-framework,edx/django-rest-framework,AlexandreProenca/django-rest-framework,simudream/django-rest-framework,nhorelik/django-rest-framework,krinart/django-rest-framework,wangpanjun/django-rest-framework,davesque/django-rest-framework,ticosax/django-rest-framework,jpulec/django-rest-framework,hunter007/django-rest-framework,jpulec/django-rest-framework,maryokhin/django-rest-framework,iheitlager/django-rest-framework,rafaelcaricio/django-rest-framework,yiyocx/django-rest-framework,jpadilla/django-rest-framework,jerryhebert/django-rest-framework,ezheidtmann/django-rest-framework,waytai/django-rest-framework,tomchristie/django-rest-framework,gregmuellegger/django-rest-framework,wzbozon/django-rest-framework,James1345/django-rest-framework,werthen/django-rest-framework,atombrella/django-rest-framework,pombredanne/django-rest-framework,adambain-vokal/django-rest-framework,paolopaolopaolo/django-rest-framework,gregmuellegger/django-rest-framework,qsorix/django-rest-framework,raphaelmerx/django-rest-framework,dmwyatt/django-rest-framework,brandoncazander/django-rest-framework,xiaotangyuan/django-rest-framework,AlexandreProenca/django-rest-framework,werthen/django-rest-framework,kgeorgy/django-rest-framework,hnarayanan/django-rest-framework,jpadilla/django-rest-framework,ajaali/django-rest-framework,kylefox/django-rest-framework,ossanna16/django-rest-framework,justanr/django-rest-framework,pombredanne/django-rest-framework,YBJAY00000/django-rest-framework,atombrella/django-rest-framework,kezabelle/django-rest-framework,James1345/django-rest-framework,MJafarMashhadi/django-rest-framework,iheitlager/django-rest-framework,wangpanjun/django-rest-framework,ticosax/django-rest-framework,edx/django-rest-framework,d0ugal/django-rest-framework,rubendura/django-rest-framework,HireAnEsquire/django-rest-framework,cheif/django-rest-framework,nryoung/django-rest-framework,AlexandreProenca/django-rest-framework,brandoncazander/django-rest-framework,arpheno/django-rest-framework,MJafarMashhadi/django-rest-framework,raphaelmerx/django-rest-framework,kennydude/django-rest-framework,nryoung/django-rest-framework,lubomir/django-rest-framework,ossanna16/django-rest-framework,thedrow/django-rest-framework-1,justanr/django-rest-framework,buptlsl/django-rest-framework,lubomir/django-rest-framework,vstoykov/django-rest-framework,zeldalink0515/django-rest-framework,raphaelmerx/django-rest-framework,damycra/django-rest-framework,ambivalentno/django-rest-framework,nhorelik/django-rest-framework,VishvajitP/django-rest-framework,kezabelle/django-rest-framework,sheppard/django-rest-framework,leeahoward/django-rest-framework,dmwyatt/django-rest-framework,wedaly/django-rest-framework,justanr/django-rest-framework,uruz/django-rest-framework,delinhabit/django-rest-framework,tomchristie/django-rest-framework,VishvajitP/django-rest-framework,canassa/django-rest-framework,rafaelang/django-rest-framework,andriy-s/django-rest-framework,hunter007/django-rest-framework,paolopaolopaolo/django-rest-framework,hnarayanan/django-rest-framework,HireAnEsquire/django-rest-framework,abdulhaq-e/django-rest-framework,jness/django-rest-framework,douwevandermeij/django-rest-framework,pombredanne/django-rest-framework,ossanna16/django-rest-framework,linovia/django-rest-framework,cyberj/django-rest-framework,wwj718/django-rest-framework,d0ugal/django-rest-framework,sheppard/django-rest-framework,sehmaschine/django-rest-framework,tigeraniya/django-rest-framework,linovia/django-rest-framework,zeldalink0515/django-rest-framework,alacritythief/django-rest-framework,uruz/django-rest-framework,VishvajitP/django-rest-framework,ambivalentno/django-rest-framework,fishky/django-rest-framework,tomchristie/django-rest-framework,sbellem/django-rest-framework,zeldalink0515/django-rest-framework,arpheno/django-rest-framework,agconti/django-rest-framework,davesque/django-rest-framework,elim/django-rest-framework,jtiai/django-rest-framework,yiyocx/django-rest-framework,yiyocx/django-rest-framework,edx/django-rest-framework,johnraz/django-rest-framework,antonyc/django-rest-framework,ezheidtmann/django-rest-framework,ezheidtmann/django-rest-framework,rafaelcaricio/django-rest-framework,rafaelang/django-rest-framework | - from django.core.urlresolvers import resolve
+ from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
- def breadcrumbs_recursive(url, breadcrumbs_list):
+ def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
- breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
+ breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
- return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
+ return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
- return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
+ return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
+ prefix = get_script_prefix()
+ url = url[len(prefix):]
- return breadcrumbs_recursive(url, [])
+ return breadcrumbs_recursive(url, [], prefix)
| Use get_script_prefix to play nicely if not installed at the root. | ## Code Before:
from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
## Instruction:
Use get_script_prefix to play nicely if not installed at the root.
## Code After:
from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
| # ... existing code ...
from django.core.urlresolvers import resolve, get_script_prefix
# ... modified code ...
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
...
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
...
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
...
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
# ... rest of the code ... |
5aa55190bae3657e09f6c2fbdedb9ab71210fad5 | cocktails/drinks/models.py | cocktails/drinks/models.py | from django.db import models
# Create your models here.
class Ingredient(models.Model):
name = models.CharField(max_length=100)
abv = models.FloatField()
type = models.CharField(max_length=25)
def __str__(self):
return self.name
class Admin:
list_display = ('name')
class Meta:
ordering = ['id']
class IngredientLine(models.Model):
ing = models.ForeignKey(Ingredient, on_delete=models.CASCADE, default=1)
amt = models.FloatField(default=0)
def __str__(self):
return "{} ounces of {}".format(str(self.amt), self.ing.name)
class Drink(models.Model):
name = models.CharField(max_length=100)
owner = models.ForeignKey('auth.User', related_name='drinks')
ings = models.ManyToManyField(IngredientLine)
instructions = models.TextField()
def __str__(self):
return self.name
| from django.db import models
# Create your models here.
class Ingredient(models.Model):
name = models.CharField(max_length=100)
abv = models.FloatField()
type = models.CharField(max_length=25)
def __str__(self):
return self.name
class Admin:
list_display = ('name')
class Meta:
ordering = ['id']
class IngredientLine(models.Model):
ing = models.ForeignKey(Ingredient, on_delete=models.CASCADE, default=1)
amt = models.FloatField(default=0)
def __str__(self):
if self.amt == 0:
return self.ing.name
return "{} ounces of {}".format(str(self.amt), self.ing.name)
class Drink(models.Model):
name = models.CharField(max_length=100)
owner = models.ForeignKey('auth.User', related_name='drinks')
ings = models.ManyToManyField(IngredientLine)
instructions = models.TextField()
def __str__(self):
return self.name
| Remove 0.0 from ings line | Remove 0.0 from ings line
| Python | mit | jake-jake-jake/cocktails,jake-jake-jake/cocktails,jake-jake-jake/cocktails,jake-jake-jake/cocktails | from django.db import models
# Create your models here.
class Ingredient(models.Model):
name = models.CharField(max_length=100)
abv = models.FloatField()
type = models.CharField(max_length=25)
def __str__(self):
return self.name
class Admin:
list_display = ('name')
class Meta:
ordering = ['id']
class IngredientLine(models.Model):
ing = models.ForeignKey(Ingredient, on_delete=models.CASCADE, default=1)
amt = models.FloatField(default=0)
def __str__(self):
+ if self.amt == 0:
+ return self.ing.name
return "{} ounces of {}".format(str(self.amt), self.ing.name)
class Drink(models.Model):
name = models.CharField(max_length=100)
owner = models.ForeignKey('auth.User', related_name='drinks')
ings = models.ManyToManyField(IngredientLine)
instructions = models.TextField()
def __str__(self):
return self.name
| Remove 0.0 from ings line | ## Code Before:
from django.db import models
# Create your models here.
class Ingredient(models.Model):
name = models.CharField(max_length=100)
abv = models.FloatField()
type = models.CharField(max_length=25)
def __str__(self):
return self.name
class Admin:
list_display = ('name')
class Meta:
ordering = ['id']
class IngredientLine(models.Model):
ing = models.ForeignKey(Ingredient, on_delete=models.CASCADE, default=1)
amt = models.FloatField(default=0)
def __str__(self):
return "{} ounces of {}".format(str(self.amt), self.ing.name)
class Drink(models.Model):
name = models.CharField(max_length=100)
owner = models.ForeignKey('auth.User', related_name='drinks')
ings = models.ManyToManyField(IngredientLine)
instructions = models.TextField()
def __str__(self):
return self.name
## Instruction:
Remove 0.0 from ings line
## Code After:
from django.db import models
# Create your models here.
class Ingredient(models.Model):
name = models.CharField(max_length=100)
abv = models.FloatField()
type = models.CharField(max_length=25)
def __str__(self):
return self.name
class Admin:
list_display = ('name')
class Meta:
ordering = ['id']
class IngredientLine(models.Model):
ing = models.ForeignKey(Ingredient, on_delete=models.CASCADE, default=1)
amt = models.FloatField(default=0)
def __str__(self):
if self.amt == 0:
return self.ing.name
return "{} ounces of {}".format(str(self.amt), self.ing.name)
class Drink(models.Model):
name = models.CharField(max_length=100)
owner = models.ForeignKey('auth.User', related_name='drinks')
ings = models.ManyToManyField(IngredientLine)
instructions = models.TextField()
def __str__(self):
return self.name
| # ... existing code ...
def __str__(self):
if self.amt == 0:
return self.ing.name
return "{} ounces of {}".format(str(self.amt), self.ing.name)
# ... rest of the code ... |
98f638e5a42765a284a4fd006190507efee5363a | runtests.py | runtests.py |
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
this_dir = os.path.abspath(os.path.dirname(__file__))
if this_dir not in sys.path:
sys.path.insert(0, this_dir)
import django
from django.test.utils import get_runner
from django.conf import settings
def runtests():
if not settings.configured:
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(["base", "threads", "events", "managers"])
sys.exit(bool(failures))
if __name__ == "__main__":
runtests()
|
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
this_dir = os.path.abspath(os.path.dirname(__file__))
if this_dir not in sys.path:
sys.path.insert(0, this_dir)
import django
from django.test.runner import DiscoverRunner
from django.conf import settings
def runtests():
if not settings.configured:
django.setup()
test_runner = DiscoverRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(["base", "threads", "events", "managers"])
sys.exit(bool(failures))
if __name__ == "__main__":
runtests()
| Use the new test runner | Use the new test runner
| Python | bsd-2-clause | knagra/farnsworth,knagra/farnsworth,knagra/farnsworth,knagra/farnsworth |
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
this_dir = os.path.abspath(os.path.dirname(__file__))
if this_dir not in sys.path:
sys.path.insert(0, this_dir)
import django
- from django.test.utils import get_runner
+ from django.test.runner import DiscoverRunner
from django.conf import settings
def runtests():
if not settings.configured:
django.setup()
- TestRunner = get_runner(settings)
- test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
+ test_runner = DiscoverRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(["base", "threads", "events", "managers"])
sys.exit(bool(failures))
if __name__ == "__main__":
runtests()
| Use the new test runner | ## Code Before:
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
this_dir = os.path.abspath(os.path.dirname(__file__))
if this_dir not in sys.path:
sys.path.insert(0, this_dir)
import django
from django.test.utils import get_runner
from django.conf import settings
def runtests():
if not settings.configured:
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(["base", "threads", "events", "managers"])
sys.exit(bool(failures))
if __name__ == "__main__":
runtests()
## Instruction:
Use the new test runner
## Code After:
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
this_dir = os.path.abspath(os.path.dirname(__file__))
if this_dir not in sys.path:
sys.path.insert(0, this_dir)
import django
from django.test.runner import DiscoverRunner
from django.conf import settings
def runtests():
if not settings.configured:
django.setup()
test_runner = DiscoverRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(["base", "threads", "events", "managers"])
sys.exit(bool(failures))
if __name__ == "__main__":
runtests()
| # ... existing code ...
import django
from django.test.runner import DiscoverRunner
from django.conf import settings
# ... modified code ...
test_runner = DiscoverRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(["base", "threads", "events", "managers"])
# ... rest of the code ... |
e73409c17c89ef54f5c7e807059b229517e77617 | mailchute/smtpd/mailchute.py | mailchute/smtpd/mailchute.py | import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
allowed_receiver_domain = settings.RECEIVER_DOMAIN
recipient_domain = recipient.split('@')[1].lower()
return (allowed_receiver_domain is None
or recipient_domain == settings.RECEIVER_DOMAIN)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
| import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
recipient_domain = recipient.split('@')[1].lower()
allowed_receiver_domains = settings.RECEIVER_DOMAIN
if allowed_receiver_domains:
allowed_receiver_domains = allowed_receiver_domains.split(',')
return (allowed_receiver_domains is None
or recipient_domain in allowed_receiver_domains)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
| Handle multiple receiver domain properly | Handle multiple receiver domain properly
| Python | bsd-3-clause | kevinjqiu/mailchute,kevinjqiu/mailchute | import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
- allowed_receiver_domain = settings.RECEIVER_DOMAIN
recipient_domain = recipient.split('@')[1].lower()
+ allowed_receiver_domains = settings.RECEIVER_DOMAIN
+ if allowed_receiver_domains:
+ allowed_receiver_domains = allowed_receiver_domains.split(',')
- return (allowed_receiver_domain is None
+ return (allowed_receiver_domains is None
- or recipient_domain == settings.RECEIVER_DOMAIN)
+ or recipient_domain in allowed_receiver_domains)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
| Handle multiple receiver domain properly | ## Code Before:
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
allowed_receiver_domain = settings.RECEIVER_DOMAIN
recipient_domain = recipient.split('@')[1].lower()
return (allowed_receiver_domain is None
or recipient_domain == settings.RECEIVER_DOMAIN)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
## Instruction:
Handle multiple receiver domain properly
## Code After:
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
recipient_domain = recipient.split('@')[1].lower()
allowed_receiver_domains = settings.RECEIVER_DOMAIN
if allowed_receiver_domains:
allowed_receiver_domains = allowed_receiver_domains.split(',')
return (allowed_receiver_domains is None
or recipient_domain in allowed_receiver_domains)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
| ...
def _should_persist(self, recipient):
recipient_domain = recipient.split('@')[1].lower()
allowed_receiver_domains = settings.RECEIVER_DOMAIN
if allowed_receiver_domains:
allowed_receiver_domains = allowed_receiver_domains.split(',')
return (allowed_receiver_domains is None
or recipient_domain in allowed_receiver_domains)
... |
a1318a5ced6efc4ae88abc0b23190daea5899704 | open_humans/serializers.py | open_humans/serializers.py | from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_profile_url')
class Meta:
model = User
fields = ('id', 'url', 'username')
def get_profile_url(self, obj):
return reverse('member_profile', args=(obj.id,))
| from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
| Make /api/profile return no private data | Make /api/profile return no private data
| Python | mit | OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans | from django.contrib.auth.models import User
- from django.core.urlresolvers import reverse
+ # from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
- url = serializers.SerializerMethodField('get_profile_url')
+ # url = serializers.SerializerMethodField('get_profile_url')
+ message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
- fields = ('id', 'url', 'username')
+ # fields = ('id', 'url', 'username')
+ fields = ('message',)
- def get_profile_url(self, obj):
+ # def get_profile_url(self, obj):
- return reverse('member_profile', args=(obj.id,))
+ # return reverse('member_profile', args=(obj.id,))
+ def get_message(self, obj):
+ return 'profiles are not yet implemented'
+ | Make /api/profile return no private data | ## Code Before:
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_profile_url')
class Meta:
model = User
fields = ('id', 'url', 'username')
def get_profile_url(self, obj):
return reverse('member_profile', args=(obj.id,))
## Instruction:
Make /api/profile return no private data
## Code After:
from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
| ...
from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
...
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
...
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
... |
2288ff574db552dd5c078102f9bbed1b0c3b6490 | forms.py | forms.py | from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
from models import User
class LoginForm(Form):
username = TextField('username', [validators.Required()])
password = PasswordField('password', [validators.Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
| from flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
| Update Flask-WTF imports to >0.9.0-style | Update Flask-WTF imports to >0.9.0-style
| Python | mit | mahrz/kernkrieg,mahrz/kernkrieg,mahrz/kernkrieg | - from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
+ from flask.ext.wtf import Form
+ from wtforms.fields import TextField, PasswordField, BooleanField
+ from wtforms.validators import Required
from models import User
class LoginForm(Form):
- username = TextField('username', [validators.Required()])
+ username = TextField('username', [Required()])
- password = PasswordField('password', [validators.Required()])
+ password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
| Update Flask-WTF imports to >0.9.0-style | ## Code Before:
from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
from models import User
class LoginForm(Form):
username = TextField('username', [validators.Required()])
password = PasswordField('password', [validators.Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
## Instruction:
Update Flask-WTF imports to >0.9.0-style
## Code After:
from flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
| // ... existing code ...
from flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
// ... modified code ...
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
// ... rest of the code ... |
434bc5554dcd8b2f2bf4a3b4a1d1991746e86b78 | setup.py | setup.py | from distutils.core import setup
setup(
name = 'pybenchmark',
packages = ['pybenchmark'], # this must be the same as the name above
version = '0.0.5',
description = 'A benchmark utility used in performance tests.',
author = 'Eugene Duboviy',
author_email = '[email protected]',
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.5', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.md').read(),
install_requires=[
'psutil',
'gevent',
],
)
| from distutils.core import setup
setup(
name = 'pybenchmark',
packages = ['pybenchmark'], # this must be the same as the name above
version = '0.0.6',
description = 'A benchmark utility used in performance tests.',
author = 'Eugene Duboviy',
author_email = '[email protected]',
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.6', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description="""
When measuring execution time, the result depends on the computer hardware.
To be able to produce a universal measure, the simplest way is to benchmark the speed of a fixed sequence
of code and calculate a ratio out of it. From there, the time taken by a function can be translated to a
universal value that can be compared on any computer. Python provides a benchmark utility in its test
package that measures the duration of a sequence of well-chosen operations.
pybenchmark designed to provide a simple and pythonic way to get performance data.
""",
install_requires=[
'psutil',
'gevent',
],
)
| Fix long description; Add new PyPI release version | Fix long description; Add new PyPI release version
| Python | mit | duboviy/pybenchmark | from distutils.core import setup
setup(
name = 'pybenchmark',
packages = ['pybenchmark'], # this must be the same as the name above
- version = '0.0.5',
+ version = '0.0.6',
description = 'A benchmark utility used in performance tests.',
author = 'Eugene Duboviy',
author_email = '[email protected]',
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
- download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.5', # I'll explain this in a second
+ download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.6', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
],
- long_description=open('README.md').read(),
+ long_description="""
+ When measuring execution time, the result depends on the computer hardware.
+ To be able to produce a universal measure, the simplest way is to benchmark the speed of a fixed sequence
+ of code and calculate a ratio out of it. From there, the time taken by a function can be translated to a
+ universal value that can be compared on any computer. Python provides a benchmark utility in its test
+ package that measures the duration of a sequence of well-chosen operations.
+ pybenchmark designed to provide a simple and pythonic way to get performance data.
+ """,
install_requires=[
'psutil',
'gevent',
],
)
| Fix long description; Add new PyPI release version | ## Code Before:
from distutils.core import setup
setup(
name = 'pybenchmark',
packages = ['pybenchmark'], # this must be the same as the name above
version = '0.0.5',
description = 'A benchmark utility used in performance tests.',
author = 'Eugene Duboviy',
author_email = '[email protected]',
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.5', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.md').read(),
install_requires=[
'psutil',
'gevent',
],
)
## Instruction:
Fix long description; Add new PyPI release version
## Code After:
from distutils.core import setup
setup(
name = 'pybenchmark',
packages = ['pybenchmark'], # this must be the same as the name above
version = '0.0.6',
description = 'A benchmark utility used in performance tests.',
author = 'Eugene Duboviy',
author_email = '[email protected]',
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.6', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description="""
When measuring execution time, the result depends on the computer hardware.
To be able to produce a universal measure, the simplest way is to benchmark the speed of a fixed sequence
of code and calculate a ratio out of it. From there, the time taken by a function can be translated to a
universal value that can be compared on any computer. Python provides a benchmark utility in its test
package that measures the duration of a sequence of well-chosen operations.
pybenchmark designed to provide a simple and pythonic way to get performance data.
""",
install_requires=[
'psutil',
'gevent',
],
)
| // ... existing code ...
packages = ['pybenchmark'], # this must be the same as the name above
version = '0.0.6',
description = 'A benchmark utility used in performance tests.',
// ... modified code ...
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.6', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
...
],
long_description="""
When measuring execution time, the result depends on the computer hardware.
To be able to produce a universal measure, the simplest way is to benchmark the speed of a fixed sequence
of code and calculate a ratio out of it. From there, the time taken by a function can be translated to a
universal value that can be compared on any computer. Python provides a benchmark utility in its test
package that measures the duration of a sequence of well-chosen operations.
pybenchmark designed to provide a simple and pythonic way to get performance data.
""",
install_requires=[
// ... rest of the code ... |
9c848315eba6580249d1f9fc5b598a08ec818fed | tests/test_functions.py | tests/test_functions.py | """This module tests the TimeFunction class"""
import pytest
import pandas as pd
from tssim.functions import TimeFunction
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(TimeFunction(func).generate(ts))
| """This module tests the TimeFunction class"""
import pandas as pd
import pytest
import tssim
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
| Update reference in TimeFunction test. | Update reference in TimeFunction test.
| Python | mit | mansenfranzen/tssim | """This module tests the TimeFunction class"""
+ import pandas as pd
import pytest
- import pandas as pd
+ import tssim
- from tssim.functions import TimeFunction
-
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
- assert func(ts).equals(TimeFunction(func).generate(ts))
+ assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
| Update reference in TimeFunction test. | ## Code Before:
"""This module tests the TimeFunction class"""
import pytest
import pandas as pd
from tssim.functions import TimeFunction
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(TimeFunction(func).generate(ts))
## Instruction:
Update reference in TimeFunction test.
## Code After:
"""This module tests the TimeFunction class"""
import pandas as pd
import pytest
import tssim
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
| // ... existing code ...
import pandas as pd
import pytest
import tssim
// ... modified code ...
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
// ... rest of the code ... |
5188561f7de7f6762e1820a6b447f144f963b1d0 | common/spaces.py | common/spaces.py | """Digital Ocean Spaces interaction"""
import boto3
from django.conf import settings
class SpacesBucket():
"""Interact with Spaces buckets"""
def __init__(self):
session = boto3.session.Session()
self._client = session.client('s3',
region_name='nyc3',
endpoint_url='https://nyc3.digitaloceanspaces.com',
aws_access_key_id=settings.SPACES_ACCESS_KEY_ID,
aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET)
def create(self, name="new-space-name"):
"""Create a new Space"""
self._client.create_bucket(Bucket=name)
def list(self):
"""List all buckets on your account"""
response = self._client.list_buckets()
spaces = [space['Name'] for space in response['Buckets']]
print("Spaces List: %s" % spaces)
| """Digital Ocean Spaces interaction"""
import boto3
from django.conf import settings
class SpacesBucket():
"""Interact with Spaces buckets"""
def __init__(self, space_name="lutris"):
session = boto3.session.Session()
self._client = session.client('s3',
region_name='nyc3',
endpoint_url='https://nyc3.digitaloceanspaces.com',
aws_access_key_id=settings.SPACES_ACCESS_KEY_ID,
aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET)
self.space_name = space_name
def create(self, name="new-space-name"):
"""Create a new Space"""
self._client.create_bucket(Bucket=name)
def list_spaces(self):
"""List all buckets on your account"""
response = self._client.list_buckets()
return [space['Name'] for space in response['Buckets']]
def upload(self, local_path, dest_path, public=False):
"""Upload a file to Spaces"""
self._client.upload_file(local_path, self.space_name, dest_path)
if public:
self._client.put_object_acl(
ACL="public-read",
Bucket=self.space_name,
Key=dest_path
)
| Add upload to Spaces API client | Add upload to Spaces API client
| Python | agpl-3.0 | lutris/website,lutris/website,lutris/website,lutris/website | """Digital Ocean Spaces interaction"""
import boto3
from django.conf import settings
class SpacesBucket():
"""Interact with Spaces buckets"""
- def __init__(self):
+ def __init__(self, space_name="lutris"):
session = boto3.session.Session()
self._client = session.client('s3',
region_name='nyc3',
endpoint_url='https://nyc3.digitaloceanspaces.com',
aws_access_key_id=settings.SPACES_ACCESS_KEY_ID,
aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET)
+ self.space_name = space_name
def create(self, name="new-space-name"):
"""Create a new Space"""
self._client.create_bucket(Bucket=name)
- def list(self):
+ def list_spaces(self):
"""List all buckets on your account"""
response = self._client.list_buckets()
- spaces = [space['Name'] for space in response['Buckets']]
+ return [space['Name'] for space in response['Buckets']]
- print("Spaces List: %s" % spaces)
+ def upload(self, local_path, dest_path, public=False):
+ """Upload a file to Spaces"""
+ self._client.upload_file(local_path, self.space_name, dest_path)
+ if public:
+ self._client.put_object_acl(
+ ACL="public-read",
+ Bucket=self.space_name,
+ Key=dest_path
+ )
+ | Add upload to Spaces API client | ## Code Before:
"""Digital Ocean Spaces interaction"""
import boto3
from django.conf import settings
class SpacesBucket():
"""Interact with Spaces buckets"""
def __init__(self):
session = boto3.session.Session()
self._client = session.client('s3',
region_name='nyc3',
endpoint_url='https://nyc3.digitaloceanspaces.com',
aws_access_key_id=settings.SPACES_ACCESS_KEY_ID,
aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET)
def create(self, name="new-space-name"):
"""Create a new Space"""
self._client.create_bucket(Bucket=name)
def list(self):
"""List all buckets on your account"""
response = self._client.list_buckets()
spaces = [space['Name'] for space in response['Buckets']]
print("Spaces List: %s" % spaces)
## Instruction:
Add upload to Spaces API client
## Code After:
"""Digital Ocean Spaces interaction"""
import boto3
from django.conf import settings
class SpacesBucket():
"""Interact with Spaces buckets"""
def __init__(self, space_name="lutris"):
session = boto3.session.Session()
self._client = session.client('s3',
region_name='nyc3',
endpoint_url='https://nyc3.digitaloceanspaces.com',
aws_access_key_id=settings.SPACES_ACCESS_KEY_ID,
aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET)
self.space_name = space_name
def create(self, name="new-space-name"):
"""Create a new Space"""
self._client.create_bucket(Bucket=name)
def list_spaces(self):
"""List all buckets on your account"""
response = self._client.list_buckets()
return [space['Name'] for space in response['Buckets']]
def upload(self, local_path, dest_path, public=False):
"""Upload a file to Spaces"""
self._client.upload_file(local_path, self.space_name, dest_path)
if public:
self._client.put_object_acl(
ACL="public-read",
Bucket=self.space_name,
Key=dest_path
)
| // ... existing code ...
"""Interact with Spaces buckets"""
def __init__(self, space_name="lutris"):
session = boto3.session.Session()
// ... modified code ...
aws_secret_access_key=settings.SPACES_ACCESS_KEY_SECRET)
self.space_name = space_name
...
def list_spaces(self):
"""List all buckets on your account"""
...
response = self._client.list_buckets()
return [space['Name'] for space in response['Buckets']]
def upload(self, local_path, dest_path, public=False):
"""Upload a file to Spaces"""
self._client.upload_file(local_path, self.space_name, dest_path)
if public:
self._client.put_object_acl(
ACL="public-read",
Bucket=self.space_name,
Key=dest_path
)
// ... rest of the code ... |
164fe2780554ddca5f66273e11efea37cfaf1368 | numba/tests/issues/test_issue_204.py | numba/tests/issues/test_issue_204.py | from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
print closure_modulo(100, 48)
| from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
| Fix tests for python 3 | Fix tests for python 3
| Python | bsd-2-clause | GaZ3ll3/numba,pombredanne/numba,ssarangi/numba,stefanseefeld/numba,GaZ3ll3/numba,shiquanwang/numba,jriehl/numba,gdementen/numba,sklam/numba,jriehl/numba,ssarangi/numba,stonebig/numba,sklam/numba,GaZ3ll3/numba,seibert/numba,numba/numba,gmarkall/numba,sklam/numba,GaZ3ll3/numba,gmarkall/numba,stonebig/numba,seibert/numba,seibert/numba,IntelLabs/numba,pombredanne/numba,seibert/numba,numba/numba,jriehl/numba,IntelLabs/numba,shiquanwang/numba,gmarkall/numba,pitrou/numba,IntelLabs/numba,numba/numba,sklam/numba,pombredanne/numba,gdementen/numba,cpcloud/numba,stuartarchibald/numba,GaZ3ll3/numba,IntelLabs/numba,pombredanne/numba,jriehl/numba,pitrou/numba,stonebig/numba,stefanseefeld/numba,stefanseefeld/numba,stuartarchibald/numba,cpcloud/numba,ssarangi/numba,gdementen/numba,gmarkall/numba,cpcloud/numba,stonebig/numba,numba/numba,numba/numba,stefanseefeld/numba,stefanseefeld/numba,stuartarchibald/numba,ssarangi/numba,ssarangi/numba,gdementen/numba,seibert/numba,cpcloud/numba,pitrou/numba,stuartarchibald/numba,jriehl/numba,pombredanne/numba,pitrou/numba,stonebig/numba,IntelLabs/numba,gdementen/numba,stuartarchibald/numba,shiquanwang/numba,pitrou/numba,cpcloud/numba,gmarkall/numba,sklam/numba | from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
+ def test_closure_modulo():
- print closure_modulo(100, 48)
+ assert closure_modulo(100, 48) == 4
+ if __name__ == '__main__':
+ test_closure_modulo()
+ | Fix tests for python 3 | ## Code Before:
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
print closure_modulo(100, 48)
## Instruction:
Fix tests for python 3
## Code After:
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
| // ... existing code ...
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
// ... rest of the code ... |
e8b8c257c71b6c02fa691557618261e6832fba94 | faker/providers/ssn/uk_UA/__init__.py | faker/providers/ssn/uk_UA/__init__.py | from __future__ import unicode_literals
from .. import Provider as SsnProvider
# Note: as there no SSN in Ukraine
# we get value added tax identification number (VATIN) here.
# It is also called "Ідентифікаційний номер платника податків" (in ukrainian).
# It contains only digits and length if 12.
class Provider(SsnProvider):
ssn_formats = ("############",)
| from __future__ import unicode_literals
from datetime import date
from .. import Provider as SsnProvider
from faker.providers.date_time import Provider as DateTimeProvider
class Provider(SsnProvider):
@classmethod
def ssn(cls):
"""
Ukrainian "Реєстраційний номер облікової картки платника податків"
also known as "Ідентифікаційний номер фізичної особи".
"""
digits = []
# Number of days between 1899-12-31 and a birth date
for digit in str((DateTimeProvider.date_object() -
date(1899, 12, 31)).days):
digits.append(int(digit))
# Person's sequence number
for _ in range(4):
digits.append(cls.random_int(0, 9))
checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
digits[8]*7)
# Remainder of a checksum divided by 11 or 1 if it equals to 10
digits.append(checksum % 11 % 10)
return ''.join(str(digit) for digit in digits)
| Make the Ukrainian SSN provider realer | Make the Ukrainian SSN provider realer
| Python | mit | joke2k/faker,danhuss/faker,trtd/faker,joke2k/faker | from __future__ import unicode_literals
+
+ from datetime import date
+
from .. import Provider as SsnProvider
+ from faker.providers.date_time import Provider as DateTimeProvider
-
-
- # Note: as there no SSN in Ukraine
- # we get value added tax identification number (VATIN) here.
- # It is also called "Ідентифікаційний номер платника податків" (in ukrainian).
- # It contains only digits and length if 12.
class Provider(SsnProvider):
- ssn_formats = ("############",)
+ @classmethod
+ def ssn(cls):
+ """
+ Ukrainian "Реєстраційний номер облікової картки платника податків"
+ also known as "Ідентифікаційний номер фізичної особи".
+ """
+ digits = []
+ # Number of days between 1899-12-31 and a birth date
+ for digit in str((DateTimeProvider.date_object() -
+ date(1899, 12, 31)).days):
+ digits.append(int(digit))
+
+ # Person's sequence number
+ for _ in range(4):
+ digits.append(cls.random_int(0, 9))
+
+ checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
+ digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
+ digits[8]*7)
+ # Remainder of a checksum divided by 11 or 1 if it equals to 10
+ digits.append(checksum % 11 % 10)
+
+ return ''.join(str(digit) for digit in digits)
+ | Make the Ukrainian SSN provider realer | ## Code Before:
from __future__ import unicode_literals
from .. import Provider as SsnProvider
# Note: as there no SSN in Ukraine
# we get value added tax identification number (VATIN) here.
# It is also called "Ідентифікаційний номер платника податків" (in ukrainian).
# It contains only digits and length if 12.
class Provider(SsnProvider):
ssn_formats = ("############",)
## Instruction:
Make the Ukrainian SSN provider realer
## Code After:
from __future__ import unicode_literals
from datetime import date
from .. import Provider as SsnProvider
from faker.providers.date_time import Provider as DateTimeProvider
class Provider(SsnProvider):
@classmethod
def ssn(cls):
"""
Ukrainian "Реєстраційний номер облікової картки платника податків"
also known as "Ідентифікаційний номер фізичної особи".
"""
digits = []
# Number of days between 1899-12-31 and a birth date
for digit in str((DateTimeProvider.date_object() -
date(1899, 12, 31)).days):
digits.append(int(digit))
# Person's sequence number
for _ in range(4):
digits.append(cls.random_int(0, 9))
checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
digits[8]*7)
# Remainder of a checksum divided by 11 or 1 if it equals to 10
digits.append(checksum % 11 % 10)
return ''.join(str(digit) for digit in digits)
| # ... existing code ...
from __future__ import unicode_literals
from datetime import date
from .. import Provider as SsnProvider
from faker.providers.date_time import Provider as DateTimeProvider
# ... modified code ...
class Provider(SsnProvider):
@classmethod
def ssn(cls):
"""
Ukrainian "Реєстраційний номер облікової картки платника податків"
also known as "Ідентифікаційний номер фізичної особи".
"""
digits = []
# Number of days between 1899-12-31 and a birth date
for digit in str((DateTimeProvider.date_object() -
date(1899, 12, 31)).days):
digits.append(int(digit))
# Person's sequence number
for _ in range(4):
digits.append(cls.random_int(0, 9))
checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
digits[8]*7)
# Remainder of a checksum divided by 11 or 1 if it equals to 10
digits.append(checksum % 11 % 10)
return ''.join(str(digit) for digit in digits)
# ... rest of the code ... |
3b146038ca6aebfdc11920cc688903124ccc2b3a | src/ggrc/converters/handlers/document.py | src/ggrc/converters/handlers/document.py |
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
pass
def set_obj_attr(self):
pass
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
|
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
def set_obj_attr(self):
self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
| Add import parser for url and evidence | Add import parser for url and evidence
| Python | apache-2.0 | selahssea/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core |
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
- pass
+ documents = []
+ for line in self.raw_value.splitlines():
+ link, title = line.split(None, 1) if " " in line else (line, line)
+ documents.append(models.Document(
+ link=link,
+ title=title,
+ modified_by_id=get_current_user_id(),
+ context=self.row_converter.obj.context,
+ ))
+
+ return documents
def set_obj_attr(self):
- pass
+ self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
| Add import parser for url and evidence | ## Code Before:
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
pass
def set_obj_attr(self):
pass
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
## Instruction:
Add import parser for url and evidence
## Code After:
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
def set_obj_attr(self):
self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
| ...
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
...
def set_obj_attr(self):
self.value = self.parse_item()
... |
1f5d52f18df2fba70b53acd681ebb381f532adff | tests/conftest.py | tests/conftest.py | import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
# XXX is this the right scope for this? This will remove log/ at the end of
# the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
| import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
| Document expected behaviour instead of leaving XXX comment | Document expected behaviour instead of leaving XXX comment
| Python | agpl-3.0 | wakermahmud/sync-engine,ErinCall/sync-engine,nylas/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,nylas/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,rmasters/inbox,rmasters/inbox,Eagles2F/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,ErinCall/sync-engine,closeio/nylas,jobscore/sync-engine,gale320/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,wakermahmud/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,nylas/sync-engine,rmasters/inbox,wakermahmud/sync-engine,jobscore/sync-engine,gale320/sync-engine,rmasters/inbox | import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
- # XXX is this the right scope for this? This will remove log/ at the end of
- # the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
+
+ Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
| Document expected behaviour instead of leaving XXX comment | ## Code Before:
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
# XXX is this the right scope for this? This will remove log/ at the end of
# the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
## Instruction:
Document expected behaviour instead of leaving XXX comment
## Code After:
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
| # ... existing code ...
@pytest.fixture(scope='session')
# ... modified code ...
for setup but then call inbox.server.log.get_logger().
Testing log directory is removed at the end of the test run!
"""
# ... rest of the code ... |
540c5f2969e75a0f461e9d46090cfe8d92c53b00 | Simulator/plot.py | Simulator/plot.py | from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
return 'history_' + y + 'txt'
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
y = y + 'txt'
i = len(y) - 1
while i>=0 :
if y[i]=='\\' or y[i]=='/' :
break
i-=1
if i>=0 :
return y[:i+1] + 'history_' + y[i+1:]
else:
return 'history_' + y
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| Remove history name error for absolute paths | Remove history name error for absolute paths
| Python | mit | aayushkapadia/chemical_reaction_simulator | from Simulator import *
import XMLParser
import textToXML
+
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
+ y = y + 'txt'
+
+ i = len(y) - 1
+ while i>=0 :
+ if y[i]=='\\' or y[i]=='/' :
+ break
+ i-=1
+
+ if i>=0 :
+ return y[:i+1] + 'history_' + y[i+1:]
+ else:
- return 'history_' + y + 'txt'
+ return 'history_' + y
+
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| Remove history name error for absolute paths | ## Code Before:
from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
return 'history_' + y + 'txt'
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
## Instruction:
Remove history name error for absolute paths
## Code After:
from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
y = y + 'txt'
i = len(y) - 1
while i>=0 :
if y[i]=='\\' or y[i]=='/' :
break
i-=1
if i>=0 :
return y[:i+1] + 'history_' + y[i+1:]
else:
return 'history_' + y
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| # ... existing code ...
def getHistoryFileName(xmlFileName):
# ... modified code ...
y = xmlFileName[:-3]
y = y + 'txt'
i = len(y) - 1
while i>=0 :
if y[i]=='\\' or y[i]=='/' :
break
i-=1
if i>=0 :
return y[:i+1] + 'history_' + y[i+1:]
else:
return 'history_' + y
# ... rest of the code ... |
4271d2ce0fc1cd2db4dab30aa59fece48c83f0bf | go/base/models.py | go/base/models.py | from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.conf import settings
from vumi.persist.riak_manager import RiakManager
from go.vumitools.account import AccountStore
from go.base.utils import vumi_api_for_user
def get_account_store():
return AccountStore(RiakManager.from_config(
settings.VUMI_API_CONFIG['riak_manager']))
def create_user_profile(sender, instance, created, **kwargs):
if created:
account = get_account_store().new_user(unicode(instance.username))
UserProfile.objects.create(user=instance, user_account=account.key)
user_api = vumi_api_for_user(instance)
# Enable search for the contact & group stores
user_api.contact_store.contacts.enable_search()
user_api.contact_store.groups.enable_search()
post_save.connect(create_user_profile, sender=User,
dispatch_uid='go.base.models.create_user_profile')
class UserProfile(models.Model):
"""A profile for a user"""
user = models.OneToOneField('auth.User')
user_account = models.CharField(max_length=100)
def __unicode__(self):
return u' '.join([self.user.first_name, self.user.last_name])
def get_user_account(self):
return get_account_store().get_user(self.user_account)
| from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.conf import settings
from vumi.persist.riak_manager import RiakManager
from go.vumitools.account import AccountStore
from go.base.utils import vumi_api_for_user
def get_account_store():
return AccountStore(RiakManager.from_config(
settings.VUMI_API_CONFIG['riak_manager']))
def create_user_profile(sender, instance, created, **kwargs):
if created:
account = get_account_store().new_user(unicode(instance.username))
UserProfile.objects.create(user=instance, user_account=account.key)
user_api = vumi_api_for_user(instance)
# Enable search for the contact & group stores
user_api.contact_store.contacts.enable_search()
user_api.contact_store.groups.enable_search()
post_save.connect(create_user_profile, sender=User,
dispatch_uid='go.base.models.create_user_profile')
class UserProfile(models.Model):
"""A profile for a user"""
user = models.OneToOneField('auth.User')
user_account = models.CharField(max_length=100)
def __unicode__(self):
return u' '.join([self.user.first_name, self.user.last_name])
def get_user_account(self):
return get_account_store().get_user(self.user_account)
| Enable search whenever a user profile is saved (to allow easier recovery from accounts created incorrectly). | Enable search whenever a user profile is saved (to allow easier recovery from accounts created incorrectly).
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.conf import settings
from vumi.persist.riak_manager import RiakManager
from go.vumitools.account import AccountStore
from go.base.utils import vumi_api_for_user
def get_account_store():
return AccountStore(RiakManager.from_config(
settings.VUMI_API_CONFIG['riak_manager']))
def create_user_profile(sender, instance, created, **kwargs):
if created:
account = get_account_store().new_user(unicode(instance.username))
UserProfile.objects.create(user=instance, user_account=account.key)
- user_api = vumi_api_for_user(instance)
+ user_api = vumi_api_for_user(instance)
- # Enable search for the contact & group stores
+ # Enable search for the contact & group stores
- user_api.contact_store.contacts.enable_search()
+ user_api.contact_store.contacts.enable_search()
- user_api.contact_store.groups.enable_search()
+ user_api.contact_store.groups.enable_search()
post_save.connect(create_user_profile, sender=User,
dispatch_uid='go.base.models.create_user_profile')
class UserProfile(models.Model):
"""A profile for a user"""
user = models.OneToOneField('auth.User')
user_account = models.CharField(max_length=100)
def __unicode__(self):
return u' '.join([self.user.first_name, self.user.last_name])
def get_user_account(self):
return get_account_store().get_user(self.user_account)
| Enable search whenever a user profile is saved (to allow easier recovery from accounts created incorrectly). | ## Code Before:
from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.conf import settings
from vumi.persist.riak_manager import RiakManager
from go.vumitools.account import AccountStore
from go.base.utils import vumi_api_for_user
def get_account_store():
return AccountStore(RiakManager.from_config(
settings.VUMI_API_CONFIG['riak_manager']))
def create_user_profile(sender, instance, created, **kwargs):
if created:
account = get_account_store().new_user(unicode(instance.username))
UserProfile.objects.create(user=instance, user_account=account.key)
user_api = vumi_api_for_user(instance)
# Enable search for the contact & group stores
user_api.contact_store.contacts.enable_search()
user_api.contact_store.groups.enable_search()
post_save.connect(create_user_profile, sender=User,
dispatch_uid='go.base.models.create_user_profile')
class UserProfile(models.Model):
"""A profile for a user"""
user = models.OneToOneField('auth.User')
user_account = models.CharField(max_length=100)
def __unicode__(self):
return u' '.join([self.user.first_name, self.user.last_name])
def get_user_account(self):
return get_account_store().get_user(self.user_account)
## Instruction:
Enable search whenever a user profile is saved (to allow easier recovery from accounts created incorrectly).
## Code After:
from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.conf import settings
from vumi.persist.riak_manager import RiakManager
from go.vumitools.account import AccountStore
from go.base.utils import vumi_api_for_user
def get_account_store():
return AccountStore(RiakManager.from_config(
settings.VUMI_API_CONFIG['riak_manager']))
def create_user_profile(sender, instance, created, **kwargs):
if created:
account = get_account_store().new_user(unicode(instance.username))
UserProfile.objects.create(user=instance, user_account=account.key)
user_api = vumi_api_for_user(instance)
# Enable search for the contact & group stores
user_api.contact_store.contacts.enable_search()
user_api.contact_store.groups.enable_search()
post_save.connect(create_user_profile, sender=User,
dispatch_uid='go.base.models.create_user_profile')
class UserProfile(models.Model):
"""A profile for a user"""
user = models.OneToOneField('auth.User')
user_account = models.CharField(max_length=100)
def __unicode__(self):
return u' '.join([self.user.first_name, self.user.last_name])
def get_user_account(self):
return get_account_store().get_user(self.user_account)
| ...
UserProfile.objects.create(user=instance, user_account=account.key)
user_api = vumi_api_for_user(instance)
# Enable search for the contact & group stores
user_api.contact_store.contacts.enable_search()
user_api.contact_store.groups.enable_search()
... |
d028f66964249bab928a29d92ab4cff075352546 | integration/main.py | integration/main.py | from spec import Spec, skip
class Tessera(Spec):
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
| from contextlib import contextmanager
import os
from shutil import rmtree
from tempfile import mkdtemp
from spec import Spec, skip
@contextmanager
def _tmp():
try:
tempdir = mkdtemp()
yield tempdir
finally:
rmtree(tempdir)
@contextmanager
def _db():
with _tmp() as tempdir:
from tessera import app, db
# Temp db location
path = os.path.join(tempdir, 'tessera.db')
dbfile = 'sqlite:///{0}'.format(path)
# Inform app of that location & setup
app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile))
db.create_all()
# Let test have its way with that temp db
yield db
class Config(object):
pass
def _config(**options):
config = Config()
for key, value in options.iteritems():
setattr(config, key, value)
class Tessera(Spec):
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def creates_a_nonempty_database_schema(self):
with _db() as db:
meta = db.MetaData()
meta.reflect(db.engine)
assert len(meta.tables) > 0
| Add temp DB test harness + basic test | Add temp DB test harness + basic test
| Python | apache-2.0 | tessera-metrics/tessera,jmptrader/tessera,aalpern/tessera,Slach/tessera,filippog/tessera,aalpern/tessera,aalpern/tessera,section-io/tessera,urbanairship/tessera,aalpern/tessera,urbanairship/tessera,Slach/tessera,jmptrader/tessera,urbanairship/tessera,Slach/tessera,urbanairship/tessera,urbanairship/tessera,tessera-metrics/tessera,section-io/tessera,aalpern/tessera,tessera-metrics/tessera,filippog/tessera,tessera-metrics/tessera,section-io/tessera,jmptrader/tessera,jmptrader/tessera,filippog/tessera,section-io/tessera,Slach/tessera,jmptrader/tessera,tessera-metrics/tessera | + from contextlib import contextmanager
+ import os
+ from shutil import rmtree
+ from tempfile import mkdtemp
+
from spec import Spec, skip
+
+
+ @contextmanager
+ def _tmp():
+ try:
+ tempdir = mkdtemp()
+ yield tempdir
+ finally:
+ rmtree(tempdir)
+
+ @contextmanager
+ def _db():
+ with _tmp() as tempdir:
+ from tessera import app, db
+ # Temp db location
+ path = os.path.join(tempdir, 'tessera.db')
+ dbfile = 'sqlite:///{0}'.format(path)
+ # Inform app of that location & setup
+ app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile))
+ db.create_all()
+ # Let test have its way with that temp db
+ yield db
+
+
+ class Config(object):
+ pass
+
+ def _config(**options):
+ config = Config()
+ for key, value in options.iteritems():
+ setattr(config, key, value)
class Tessera(Spec):
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
+ def creates_a_nonempty_database_schema(self):
+ with _db() as db:
+ meta = db.MetaData()
+ meta.reflect(db.engine)
+ assert len(meta.tables) > 0
+ | Add temp DB test harness + basic test | ## Code Before:
from spec import Spec, skip
class Tessera(Spec):
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
## Instruction:
Add temp DB test harness + basic test
## Code After:
from contextlib import contextmanager
import os
from shutil import rmtree
from tempfile import mkdtemp
from spec import Spec, skip
@contextmanager
def _tmp():
try:
tempdir = mkdtemp()
yield tempdir
finally:
rmtree(tempdir)
@contextmanager
def _db():
with _tmp() as tempdir:
from tessera import app, db
# Temp db location
path = os.path.join(tempdir, 'tessera.db')
dbfile = 'sqlite:///{0}'.format(path)
# Inform app of that location & setup
app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile))
db.create_all()
# Let test have its way with that temp db
yield db
class Config(object):
pass
def _config(**options):
config = Config()
for key, value in options.iteritems():
setattr(config, key, value)
class Tessera(Spec):
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def creates_a_nonempty_database_schema(self):
with _db() as db:
meta = db.MetaData()
meta.reflect(db.engine)
assert len(meta.tables) > 0
| ...
from contextlib import contextmanager
import os
from shutil import rmtree
from tempfile import mkdtemp
from spec import Spec, skip
@contextmanager
def _tmp():
try:
tempdir = mkdtemp()
yield tempdir
finally:
rmtree(tempdir)
@contextmanager
def _db():
with _tmp() as tempdir:
from tessera import app, db
# Temp db location
path = os.path.join(tempdir, 'tessera.db')
dbfile = 'sqlite:///{0}'.format(path)
# Inform app of that location & setup
app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile))
db.create_all()
# Let test have its way with that temp db
yield db
class Config(object):
pass
def _config(**options):
config = Config()
for key, value in options.iteritems():
setattr(config, key, value)
...
assert tessera.db
def creates_a_nonempty_database_schema(self):
with _db() as db:
meta = db.MetaData()
meta.reflect(db.engine)
assert len(meta.tables) > 0
... |
f264a06db669df1017df60d932b301dac7208233 | sqk/datasets/models.py | sqk/datasets/models.py | from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
| from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
| Add is_label_name field to Feature model | Add is_label_name field to Feature model
| Python | bsd-3-clause | sloria/sepal,sloria/sepal,sloria/sepal,sloria/sepal,sloria/sepal | from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
+ is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
| Add is_label_name field to Feature model | ## Code Before:
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
## Instruction:
Add is_label_name field to Feature model
## Code After:
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
| ...
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
... |
25351cd6b9119ea27123a2fddbbcc274c3620886 | examples/examples.py | examples/examples.py | from __future__ import print_function, division
import numpy as np
import matplotlib.pylab as plt
import seaborn as sns
from multidensity import MultiDensity
from skewstudent import SkewStudent
def estimate_bivariate_mle():
ndim = 2
size = (1000, ndim)
data = np.random.normal(size=size)
eta, lam = 4, -.9
skst = SkewStudent(eta=eta, lam=lam)
data = skst.rvs(size=size)
out = MultiDensity.fit_mle(data=data)
print(out)
mdens = MultiDensity()
mdens.from_theta(out.x)
fig, axes = plt.subplots(nrows=size[1], ncols=1)
for innov, ax in zip(data.T, axes):
sns.kdeplot(innov, ax=ax)
lines = [ax.get_lines()[0].get_xdata() for ax in axes]
lines = np.vstack(lines).T
marginals = mdens.marginals(lines)
for line, margin, ax in zip(lines.T, marginals.T, axes):
ax.plot(line, margin)
plt.show()
if __name__ == '__main__':
estimate_bivariate_mle()
| from __future__ import print_function, division
import numpy as np
import matplotlib.pylab as plt
import seaborn as sns
from multidensity import MultiDensity
from skewstudent import SkewStudent
def estimate_bivariate_mle():
ndim = 2
size = (1000, ndim)
data = np.random.normal(size=size)
eta, lam = 4, -.9
skst = SkewStudent(eta=eta, lam=lam)
data = skst.rvs(size=size)
out = MultiDensity.fit_mle(data=data)
print(out)
mdens = MultiDensity()
mdens.from_theta(out.x)
fig, axes = plt.subplots(nrows=size[1], ncols=1)
for innov, ax in zip(data.T, axes):
sns.kdeplot(innov, ax=ax, label='data')
lines = [ax.get_lines()[0].get_xdata() for ax in axes]
lines = np.vstack(lines).T
marginals = mdens.marginals(lines)
for line, margin, ax in zip(lines.T, marginals.T, axes):
ax.plot(line, margin, label='fitted')
ax.legend()
plt.show()
if __name__ == '__main__':
estimate_bivariate_mle()
| Add plot legend in the example | Add plot legend in the example
| Python | mit | khrapovs/multidensity | from __future__ import print_function, division
import numpy as np
import matplotlib.pylab as plt
import seaborn as sns
from multidensity import MultiDensity
from skewstudent import SkewStudent
def estimate_bivariate_mle():
ndim = 2
size = (1000, ndim)
data = np.random.normal(size=size)
eta, lam = 4, -.9
skst = SkewStudent(eta=eta, lam=lam)
data = skst.rvs(size=size)
out = MultiDensity.fit_mle(data=data)
print(out)
mdens = MultiDensity()
mdens.from_theta(out.x)
fig, axes = plt.subplots(nrows=size[1], ncols=1)
for innov, ax in zip(data.T, axes):
- sns.kdeplot(innov, ax=ax)
+ sns.kdeplot(innov, ax=ax, label='data')
lines = [ax.get_lines()[0].get_xdata() for ax in axes]
lines = np.vstack(lines).T
marginals = mdens.marginals(lines)
for line, margin, ax in zip(lines.T, marginals.T, axes):
- ax.plot(line, margin)
+ ax.plot(line, margin, label='fitted')
+ ax.legend()
plt.show()
if __name__ == '__main__':
estimate_bivariate_mle()
| Add plot legend in the example | ## Code Before:
from __future__ import print_function, division
import numpy as np
import matplotlib.pylab as plt
import seaborn as sns
from multidensity import MultiDensity
from skewstudent import SkewStudent
def estimate_bivariate_mle():
ndim = 2
size = (1000, ndim)
data = np.random.normal(size=size)
eta, lam = 4, -.9
skst = SkewStudent(eta=eta, lam=lam)
data = skst.rvs(size=size)
out = MultiDensity.fit_mle(data=data)
print(out)
mdens = MultiDensity()
mdens.from_theta(out.x)
fig, axes = plt.subplots(nrows=size[1], ncols=1)
for innov, ax in zip(data.T, axes):
sns.kdeplot(innov, ax=ax)
lines = [ax.get_lines()[0].get_xdata() for ax in axes]
lines = np.vstack(lines).T
marginals = mdens.marginals(lines)
for line, margin, ax in zip(lines.T, marginals.T, axes):
ax.plot(line, margin)
plt.show()
if __name__ == '__main__':
estimate_bivariate_mle()
## Instruction:
Add plot legend in the example
## Code After:
from __future__ import print_function, division
import numpy as np
import matplotlib.pylab as plt
import seaborn as sns
from multidensity import MultiDensity
from skewstudent import SkewStudent
def estimate_bivariate_mle():
ndim = 2
size = (1000, ndim)
data = np.random.normal(size=size)
eta, lam = 4, -.9
skst = SkewStudent(eta=eta, lam=lam)
data = skst.rvs(size=size)
out = MultiDensity.fit_mle(data=data)
print(out)
mdens = MultiDensity()
mdens.from_theta(out.x)
fig, axes = plt.subplots(nrows=size[1], ncols=1)
for innov, ax in zip(data.T, axes):
sns.kdeplot(innov, ax=ax, label='data')
lines = [ax.get_lines()[0].get_xdata() for ax in axes]
lines = np.vstack(lines).T
marginals = mdens.marginals(lines)
for line, margin, ax in zip(lines.T, marginals.T, axes):
ax.plot(line, margin, label='fitted')
ax.legend()
plt.show()
if __name__ == '__main__':
estimate_bivariate_mle()
| // ... existing code ...
for innov, ax in zip(data.T, axes):
sns.kdeplot(innov, ax=ax, label='data')
// ... modified code ...
for line, margin, ax in zip(lines.T, marginals.T, axes):
ax.plot(line, margin, label='fitted')
ax.legend()
// ... rest of the code ... |
bca3c8f7b2c12b86e0d200009d23201bdc05d716 | make_spectra.py | make_spectra.py | import randspectra as rs
import sys
import os.path as path
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
| import randspectra as rs
import sys
import os.path as path
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
halo.save_file()
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
| Handle the case where the savefile already exists by moving it out of the way | Handle the case where the savefile already exists by moving it out of the way
| Python | mit | sbird/vw_spectra | import randspectra as rs
import sys
import os.path as path
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
+ halo.save_file()
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
| Handle the case where the savefile already exists by moving it out of the way | ## Code Before:
import randspectra as rs
import sys
import os.path as path
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
## Instruction:
Handle the case where the savefile already exists by moving it out of the way
## Code After:
import randspectra as rs
import sys
import os.path as path
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
halo.save_file()
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
| ...
halo = rs.RandSpectra(snapnum, base)
halo.save_file()
halo.get_observer_tau("Si",2)
... |
2094f2ef5a47703a881643b8ca25a632fe54e892 | under_overfitting.py | under_overfitting.py | import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([("polynomial_features", poly_features),
("linear_regression", model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring="mean_squared_error", cv=10)
print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
degrees[i], -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([('polynomial_features', poly_features),
('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring='mean_squared_error', cv=10)
print('Degree {:>2}: mse = {}, std = {}'.format(
d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| Complete walk of polynomial degrees to find most balance between under and overfitting | Complete walk of polynomial degrees to find most balance between under and overfitting
| Python | mit | noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit | import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
- pipeline = Pipeline([("polynomial_features", poly_features),
+ pipeline = Pipeline([('polynomial_features', poly_features),
- ("linear_regression", model)])
+ ('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
- scoring="mean_squared_error", cv=10)
+ scoring='mean_squared_error', cv=10)
- print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
+ print('Degree {:>2}: mse = {}, std = {}'.format(
- degrees[i], -scores.mean(), scores.std()))
+ d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| Complete walk of polynomial degrees to find most balance between under and overfitting | ## Code Before:
import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([("polynomial_features", poly_features),
("linear_regression", model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring="mean_squared_error", cv=10)
print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
degrees[i], -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
## Instruction:
Complete walk of polynomial degrees to find most balance between under and overfitting
## Code After:
import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([('polynomial_features', poly_features),
('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring='mean_squared_error', cv=10)
print('Degree {:>2}: mse = {}, std = {}'.format(
d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| # ... existing code ...
model = LinearRegression()
pipeline = Pipeline([('polynomial_features', poly_features),
('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
# ... modified code ...
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring='mean_squared_error', cv=10)
print('Degree {:>2}: mse = {}, std = {}'.format(
d, -scores.mean(), scores.std()))
# ... rest of the code ... |
8b07dde78e753f6dce663481a68856024ed2fc49 | plutokore/__init__.py | plutokore/__init__.py | from .environments.makino import MakinoProfile
from .environments.king import KingProfile
from .jet import AstroJet
from . import luminosity
from . import plotting
from . import simulations
from . import helpers
from . import io
__all__ = [
'environments',
'luminosity',
'plotting',
'simulations',
'jet',
'helpers',
'io',
]
| from .environments.makino import MakinoProfile
from .environments.king import KingProfile
from .jet import AstroJet
from . import luminosity
from . import plotting
from . import simulations
from . import helpers
from . import io
from . import configuration
__all__ = [
'environments',
'luminosity',
'plotting',
'simulations',
'jet',
'helpers',
'io',
'configuration',
]
| Add configuration module to package exports | Add configuration module to package exports
| Python | mit | opcon/plutokore,opcon/plutokore | from .environments.makino import MakinoProfile
from .environments.king import KingProfile
from .jet import AstroJet
from . import luminosity
from . import plotting
from . import simulations
from . import helpers
from . import io
+ from . import configuration
__all__ = [
'environments',
'luminosity',
'plotting',
'simulations',
'jet',
'helpers',
'io',
+ 'configuration',
]
| Add configuration module to package exports | ## Code Before:
from .environments.makino import MakinoProfile
from .environments.king import KingProfile
from .jet import AstroJet
from . import luminosity
from . import plotting
from . import simulations
from . import helpers
from . import io
__all__ = [
'environments',
'luminosity',
'plotting',
'simulations',
'jet',
'helpers',
'io',
]
## Instruction:
Add configuration module to package exports
## Code After:
from .environments.makino import MakinoProfile
from .environments.king import KingProfile
from .jet import AstroJet
from . import luminosity
from . import plotting
from . import simulations
from . import helpers
from . import io
from . import configuration
__all__ = [
'environments',
'luminosity',
'plotting',
'simulations',
'jet',
'helpers',
'io',
'configuration',
]
| // ... existing code ...
from . import io
from . import configuration
// ... modified code ...
'io',
'configuration',
]
// ... rest of the code ... |
b7c52258d39e5c0ee8fba2be87e8e671e0c583c3 | xclib/postfix_io.py | xclib/postfix_io.py | import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
| import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
elif line == 'quit':
yield ('quit',)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
| Add quit command to postfix | Add quit command to postfix
| Python | mit | jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth | import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
+ elif line == 'quit':
+ yield ('quit',)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
| Add quit command to postfix | ## Code Before:
import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
## Instruction:
Add quit command to postfix
## Code After:
import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
elif line == 'quit':
yield ('quit',)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
| # ... existing code ...
yield ('isuser',) + match.group(1,2)
elif line == 'quit':
yield ('quit',)
else:
# ... rest of the code ... |
6fe588ea915d65fdab00b53f883b0a72ef6cf564 | tests/test_apd.py | tests/test_apd.py | import json
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
def test_output_strips_email_spaces():
json_str = scraper(open(INPUT_FILE))
data = json.loads(json_str)
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"[email protected]",
"[email protected]",
"[email protected]",
]
assert offensive_field == expected
| import json
import os
import pytest
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
@pytest.fixture
def data():
json_str = scraper(open(INPUT_FILE))
artifact_dir = os.getenv('CIRCLE_ARTIFACTS')
if artifact_dir:
artifact_file = os.path.join(artifact_dir, 'apd.json')
open(artifact_file, 'w').write(json_str)
return json.loads(json_str)
def test_output_strips_email_spaces(data):
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"[email protected]",
"[email protected]",
"[email protected]",
]
assert offensive_field == expected
| Switch to pytest fixture and generate artifact for circle ci | Switch to pytest fixture and generate artifact for circle ci
| Python | mit | sfbrigade/sf-openreferral-datalib | import json
+ import os
+
+ import pytest
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
- def test_output_strips_email_spaces():
+ @pytest.fixture
+ def data():
json_str = scraper(open(INPUT_FILE))
+ artifact_dir = os.getenv('CIRCLE_ARTIFACTS')
+ if artifact_dir:
+ artifact_file = os.path.join(artifact_dir, 'apd.json')
+ open(artifact_file, 'w').write(json_str)
+
- data = json.loads(json_str)
+ return json.loads(json_str)
+
+
+ def test_output_strips_email_spaces(data):
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"[email protected]",
"[email protected]",
"[email protected]",
]
assert offensive_field == expected
| Switch to pytest fixture and generate artifact for circle ci | ## Code Before:
import json
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
def test_output_strips_email_spaces():
json_str = scraper(open(INPUT_FILE))
data = json.loads(json_str)
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"[email protected]",
"[email protected]",
"[email protected]",
]
assert offensive_field == expected
## Instruction:
Switch to pytest fixture and generate artifact for circle ci
## Code After:
import json
import os
import pytest
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
@pytest.fixture
def data():
json_str = scraper(open(INPUT_FILE))
artifact_dir = os.getenv('CIRCLE_ARTIFACTS')
if artifact_dir:
artifact_file = os.path.join(artifact_dir, 'apd.json')
open(artifact_file, 'w').write(json_str)
return json.loads(json_str)
def test_output_strips_email_spaces(data):
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"[email protected]",
"[email protected]",
"[email protected]",
]
assert offensive_field == expected
| # ... existing code ...
import json
import os
import pytest
# ... modified code ...
@pytest.fixture
def data():
json_str = scraper(open(INPUT_FILE))
...
artifact_dir = os.getenv('CIRCLE_ARTIFACTS')
if artifact_dir:
artifact_file = os.path.join(artifact_dir, 'apd.json')
open(artifact_file, 'w').write(json_str)
return json.loads(json_str)
def test_output_strips_email_spaces(data):
offensive_field = data[70]["locations"][0]["emails"]
# ... rest of the code ... |
3220b356297ec5fe61888a906543d0ee993f9f31 | website/tests/test_database.py | website/tests/test_database.py | import database
def test_encode_csv():
attributes = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
'+', 'R', 'H', 204, 'exon1', 123, False
)
result = database.encode_csv(*attributes)
assert result == '+RH0cc:exon1:7b'
def test_decode_csv():
encoded_csv = '+RH0cc:exon1:7b'
result = database.decode_csv(encoded_csv)
assert result == dict(zip(
('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
('+', 'R', 'H', 68, 204, 'exon1', 123, False)
))
| import database
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
| Add more tests to database | Add more tests to database
| Python | lgpl-2.1 | reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB | import database
def test_encode_csv():
- attributes = (
+ test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
- '+', 'R', 'H', 204, 'exon1', 123, False
+ (('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
+ (('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
+ for attributes, correct_result in test_data:
- result = database.encode_csv(*attributes)
+ result = database.encode_csv(*attributes)
- assert result == '+RH0cc:exon1:7b'
+ assert result == correct_result
def test_decode_csv():
- encoded_csv = '+RH0cc:exon1:7b'
+ keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
+ test_data = (
+ ('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
+ ('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
+ )
+ for encoded_csv, correct_result in test_data:
- result = database.decode_csv(encoded_csv)
+ result = database.decode_csv(encoded_csv)
+ assert result == dict(zip(keys, correct_result))
- assert result == dict(zip(
- ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
- ('+', 'R', 'H', 68, 204, 'exon1', 123, False)
- ))
| Add more tests to database | ## Code Before:
import database
def test_encode_csv():
attributes = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
'+', 'R', 'H', 204, 'exon1', 123, False
)
result = database.encode_csv(*attributes)
assert result == '+RH0cc:exon1:7b'
def test_decode_csv():
encoded_csv = '+RH0cc:exon1:7b'
result = database.decode_csv(encoded_csv)
assert result == dict(zip(
('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
('+', 'R', 'H', 68, 204, 'exon1', 123, False)
))
## Instruction:
Add more tests to database
## Code After:
import database
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
| # ... existing code ...
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
# ... modified code ...
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
# ... rest of the code ... |
47bb8e983dad168451d65c0032f5568357a8d359 | battlesnake/plugins/imc2/triggers.py | battlesnake/plugins/imc2/triggers.py | import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
| import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
| Adjust IMC2 trigger regex to handle multiple colons correctly. | Adjust IMC2 trigger regex to handle multiple colons correctly.
| Python | bsd-3-clause | gtaylor/btmux_battlesnake | import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
- line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
+ line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
| Adjust IMC2 trigger regex to handle multiple colons correctly. | ## Code Before:
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
## Instruction:
Adjust IMC2 trigger regex to handle multiple colons correctly.
## Code After:
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
| # ... existing code ...
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
# ... rest of the code ... |
7c68e3b00e7c66c0223617447e16a7159118d284 | goldstone/addons/utils.py | goldstone/addons/utils.py | """Addon utilities."""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def update_addon_node():
"""Update the persistent resource graph's Addon node.
This is much simpler than the update_xxxxx_nodes functions that update
nodes for cloud entities. There will be only one Addon node in the table,
and all add-ons will be owned by it. If we're running for the first time,
the Addon node needs to be created. If it's already there, we leave it
alone.
"""
from goldstone.core.models import Addon
Addon.objects.get_or_create(native_id="Add-on", native_name="Add-on")
| """Addon utilities."""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def update_addon_node():
"""Update the persistent resource graph's Addon node.
This is much simpler than the update_xxxxx_nodes functions that update
nodes for cloud entities. There will be only one Addon node in the table,
and all add-ons will be owned by it. If we're running for the first time,
the Addon node needs to be created. If it's already there, we leave it
alone.
This also differs from update_xxxxx_nodes by returning the Addon node that
is found or created.
"""
from goldstone.core.models import Addon
result, _ = Addon.objects.get_or_create(native_id="Add-on",
native_name="Add-on")
return result
| Change update_addon_node() to return the Addon node, whether created or found. | Change update_addon_node() to return the Addon node, whether created or found.
| Python | apache-2.0 | slashk/goldstone-server,slashk/goldstone-server,Solinea/goldstone-server,slashk/goldstone-server,slashk/goldstone-server,Solinea/goldstone-server,Solinea/goldstone-server,Solinea/goldstone-server,Solinea/goldstone-server,slashk/goldstone-server | """Addon utilities."""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def update_addon_node():
"""Update the persistent resource graph's Addon node.
This is much simpler than the update_xxxxx_nodes functions that update
nodes for cloud entities. There will be only one Addon node in the table,
and all add-ons will be owned by it. If we're running for the first time,
the Addon node needs to be created. If it's already there, we leave it
alone.
+ This also differs from update_xxxxx_nodes by returning the Addon node that
+ is found or created.
+
"""
from goldstone.core.models import Addon
- Addon.objects.get_or_create(native_id="Add-on", native_name="Add-on")
+ result, _ = Addon.objects.get_or_create(native_id="Add-on",
+ native_name="Add-on")
+ return result
+ | Change update_addon_node() to return the Addon node, whether created or found. | ## Code Before:
"""Addon utilities."""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def update_addon_node():
"""Update the persistent resource graph's Addon node.
This is much simpler than the update_xxxxx_nodes functions that update
nodes for cloud entities. There will be only one Addon node in the table,
and all add-ons will be owned by it. If we're running for the first time,
the Addon node needs to be created. If it's already there, we leave it
alone.
"""
from goldstone.core.models import Addon
Addon.objects.get_or_create(native_id="Add-on", native_name="Add-on")
## Instruction:
Change update_addon_node() to return the Addon node, whether created or found.
## Code After:
"""Addon utilities."""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def update_addon_node():
"""Update the persistent resource graph's Addon node.
This is much simpler than the update_xxxxx_nodes functions that update
nodes for cloud entities. There will be only one Addon node in the table,
and all add-ons will be owned by it. If we're running for the first time,
the Addon node needs to be created. If it's already there, we leave it
alone.
This also differs from update_xxxxx_nodes by returning the Addon node that
is found or created.
"""
from goldstone.core.models import Addon
result, _ = Addon.objects.get_or_create(native_id="Add-on",
native_name="Add-on")
return result
| ...
This also differs from update_xxxxx_nodes by returning the Addon node that
is found or created.
"""
...
result, _ = Addon.objects.get_or_create(native_id="Add-on",
native_name="Add-on")
return result
... |
6bcd2ffc67dfbb1265d4df1f69de8e8b45376889 | src/foremast/slacknotify/slack_notification.py | src/foremast/slacknotify/slack_notification.py | """Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| """Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| Resolve `message` variable missing error | fix: Resolve `message` variable missing error
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
+ message = get_template(
+ template_file='slack-templates/pipeline-prepare-ran.j2',
+ info=self.info)
+
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| Resolve `message` variable missing error | ## Code Before:
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
## Instruction:
Resolve `message` variable missing error
## Code After:
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| ...
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
... |
ea3660bcc1a9f7be619def8e26dd7b0ab4a873cf | estmator_project/est_client/forms.py | estmator_project/est_client/forms.py | from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
'first_name': TextInput(attrs={'required': True}),
'last_name': TextInput(attrs={'required': True}),
'title': TextInput(attrs={'required': True}),
'cell': TextInput(attrs={'required': True}),
'email': TextInput(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
'phone': TextInput(attrs={'required': True}),
'address': TextInput(attrs={'required': True}),
'city': TextInput(attrs={'required': True}),
'postal': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| Make fields required on new client and company | Make fields required on new client and company
| Python | mit | Estmator/EstmatorApp,Estmator/EstmatorApp,Estmator/EstmatorApp | from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
+ 'first_name': TextInput(attrs={'required': True}),
+ 'last_name': TextInput(attrs={'required': True}),
+ 'title': TextInput(attrs={'required': True}),
+ 'cell': TextInput(attrs={'required': True}),
+ 'email': TextInput(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
+ 'phone': TextInput(attrs={'required': True}),
+ 'address': TextInput(attrs={'required': True}),
+ 'city': TextInput(attrs={'required': True}),
+ 'postal': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| Make fields required on new client and company | ## Code Before:
from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
## Instruction:
Make fields required on new client and company
## Code After:
from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
'first_name': TextInput(attrs={'required': True}),
'last_name': TextInput(attrs={'required': True}),
'title': TextInput(attrs={'required': True}),
'cell': TextInput(attrs={'required': True}),
'email': TextInput(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
'phone': TextInput(attrs={'required': True}),
'address': TextInput(attrs={'required': True}),
'city': TextInput(attrs={'required': True}),
'postal': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| ...
'company': Select(attrs={'required': True}),
'first_name': TextInput(attrs={'required': True}),
'last_name': TextInput(attrs={'required': True}),
'title': TextInput(attrs={'required': True}),
'cell': TextInput(attrs={'required': True}),
'email': TextInput(attrs={'required': True}),
}
...
'company_name': TextInput(attrs={'required': True}),
'phone': TextInput(attrs={'required': True}),
'address': TextInput(attrs={'required': True}),
'city': TextInput(attrs={'required': True}),
'postal': TextInput(attrs={'required': True}),
}
... |
b6ee793158d549f3d04d42ecbeb1c63605d6258f | src/setup.py | src/setup.py | import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
| import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
| Add C++ compilation flag to ensure deterministic behavior | Add C++ compilation flag to ensure deterministic behavior
More information: https://github.com/spotify/annoy/pull/205
| Python | apache-2.0 | bittremieux/ANN-SoLo,bittremieux/ANN-SoLo | import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
- compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
+ compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
| Add C++ compilation flag to ensure deterministic behavior | ## Code Before:
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
## Instruction:
Add C++ compilation flag to ensure deterministic behavior
## Code After:
import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
| ...
compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
... |
8562a58501aaa3f53a6aef5a0c1fab60aafb7c61 | scuole/states/models.py | scuole/states/models.py | from __future__ import absolute_import, unicode_literals
from localflavor.us.models import USStateField
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
from scuole.core.models import PersonnelBase
from scuole.stats.models import SchoolYear, StatsBase
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class State(models.Model):
name = USStateField(_('State name'))
slug = models.SlugField()
shape = models.MultiPolygonField(_('State shape'), srid=4326, null=True)
objects = models.GeoManager()
def __str__(self):
return self.name
@python_2_unicode_compatible
class StateStats(StatsBase):
state = models.ForeignKey(State, related_name='stats')
year = models.ForeignKey(SchoolYear, related_name='state_stats')
class Meta:
unique_together = ('state', 'year',)
verbose_name_plural = _('State stats')
def __str__(self):
return '{0} {1}'.format(self.year.name, self.state.name)
@python_2_unicode_compatible
class Commissioner(PersonnelBase):
state = models.OneToOneField(State, related_name='commissioner_of')
def __str__(self):
return 'Texas Education Commissioner'
| from __future__ import absolute_import, unicode_literals
from localflavor.us.models import USStateField
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
from scuole.core.models import PersonnelBase
from scuole.stats.models import SchoolYear, StatsBase
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class State(models.Model):
name = USStateField(_('State name'))
slug = models.SlugField()
shape = models.MultiPolygonField(_('State shape'), srid=4326, null=True)
objects = models.GeoManager()
def __str__(self):
return self.name
def get_absolute_url(self):
from django.core.urlresolvers import reverse
return reverse('states:detail', kwargs={
'slug': self.slug,
})
@python_2_unicode_compatible
class StateStats(StatsBase):
state = models.ForeignKey(State, related_name='stats')
year = models.ForeignKey(SchoolYear, related_name='state_stats')
class Meta:
unique_together = ('state', 'year',)
verbose_name_plural = _('State stats')
def __str__(self):
return '{0} {1}'.format(self.year.name, self.state.name)
@python_2_unicode_compatible
class Commissioner(PersonnelBase):
state = models.OneToOneField(State, related_name='commissioner_of')
def __str__(self):
return 'Texas Education Commissioner'
| Add get_absolute_url to State model | Add get_absolute_url to State model
| Python | mit | texastribune/scuole,texastribune/scuole,texastribune/scuole,texastribune/scuole | from __future__ import absolute_import, unicode_literals
from localflavor.us.models import USStateField
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
from scuole.core.models import PersonnelBase
from scuole.stats.models import SchoolYear, StatsBase
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class State(models.Model):
name = USStateField(_('State name'))
slug = models.SlugField()
shape = models.MultiPolygonField(_('State shape'), srid=4326, null=True)
objects = models.GeoManager()
def __str__(self):
return self.name
+ def get_absolute_url(self):
+ from django.core.urlresolvers import reverse
+ return reverse('states:detail', kwargs={
+ 'slug': self.slug,
+ })
+
@python_2_unicode_compatible
class StateStats(StatsBase):
state = models.ForeignKey(State, related_name='stats')
year = models.ForeignKey(SchoolYear, related_name='state_stats')
class Meta:
unique_together = ('state', 'year',)
verbose_name_plural = _('State stats')
def __str__(self):
return '{0} {1}'.format(self.year.name, self.state.name)
@python_2_unicode_compatible
class Commissioner(PersonnelBase):
state = models.OneToOneField(State, related_name='commissioner_of')
def __str__(self):
return 'Texas Education Commissioner'
| Add get_absolute_url to State model | ## Code Before:
from __future__ import absolute_import, unicode_literals
from localflavor.us.models import USStateField
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
from scuole.core.models import PersonnelBase
from scuole.stats.models import SchoolYear, StatsBase
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class State(models.Model):
name = USStateField(_('State name'))
slug = models.SlugField()
shape = models.MultiPolygonField(_('State shape'), srid=4326, null=True)
objects = models.GeoManager()
def __str__(self):
return self.name
@python_2_unicode_compatible
class StateStats(StatsBase):
state = models.ForeignKey(State, related_name='stats')
year = models.ForeignKey(SchoolYear, related_name='state_stats')
class Meta:
unique_together = ('state', 'year',)
verbose_name_plural = _('State stats')
def __str__(self):
return '{0} {1}'.format(self.year.name, self.state.name)
@python_2_unicode_compatible
class Commissioner(PersonnelBase):
state = models.OneToOneField(State, related_name='commissioner_of')
def __str__(self):
return 'Texas Education Commissioner'
## Instruction:
Add get_absolute_url to State model
## Code After:
from __future__ import absolute_import, unicode_literals
from localflavor.us.models import USStateField
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
from scuole.core.models import PersonnelBase
from scuole.stats.models import SchoolYear, StatsBase
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class State(models.Model):
name = USStateField(_('State name'))
slug = models.SlugField()
shape = models.MultiPolygonField(_('State shape'), srid=4326, null=True)
objects = models.GeoManager()
def __str__(self):
return self.name
def get_absolute_url(self):
from django.core.urlresolvers import reverse
return reverse('states:detail', kwargs={
'slug': self.slug,
})
@python_2_unicode_compatible
class StateStats(StatsBase):
state = models.ForeignKey(State, related_name='stats')
year = models.ForeignKey(SchoolYear, related_name='state_stats')
class Meta:
unique_together = ('state', 'year',)
verbose_name_plural = _('State stats')
def __str__(self):
return '{0} {1}'.format(self.year.name, self.state.name)
@python_2_unicode_compatible
class Commissioner(PersonnelBase):
state = models.OneToOneField(State, related_name='commissioner_of')
def __str__(self):
return 'Texas Education Commissioner'
| # ... existing code ...
def get_absolute_url(self):
from django.core.urlresolvers import reverse
return reverse('states:detail', kwargs={
'slug': self.slug,
})
# ... rest of the code ... |
b0202e8882f792feb041070baff7370cacf73751 | tests/test_api.py | tests/test_api.py |
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
|
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
class TestNewApi(TestCase):
def setUp(self):
self.process = subprocess.Popen(['openfisca', 'serve'])
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
| Test france compatibility with the new API | Test france compatibility with the new API
| Python | agpl-3.0 | antoinearnoud/openfisca-france,sgmap/openfisca-france,sgmap/openfisca-france,antoinearnoud/openfisca-france |
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
+
+ class TestNewApi(TestCase):
+
+ def setUp(self):
+ self.process = subprocess.Popen(['openfisca', 'serve'])
+
+ def tearDown(self):
+ self.process.terminate()
+
+ def test_response(self):
+ try:
+ subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
+ except subprocess.CalledProcessError:
+ raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
+ | Test france compatibility with the new API | ## Code Before:
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
## Instruction:
Test france compatibility with the new API
## Code After:
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
class TestNewApi(TestCase):
def setUp(self):
self.process = subprocess.Popen(['openfisca', 'serve'])
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
| ...
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
class TestNewApi(TestCase):
def setUp(self):
self.process = subprocess.Popen(['openfisca', 'serve'])
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
... |
106868c0c4b3bb947d251a8416bbd3698af5948b | backend/session/permissions.py | backend/session/permissions.py |
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
return view.action == 'retrieve' or request.user.is_staff
def has_object_permission(self, request, view, obj):
return request.user.is_staff or obj == request.user
| from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
| Fix IsStaffOrTargetUser permission when no user in request. | Fix IsStaffOrTargetUser permission when no user in request.
| Python | mit | ThreeDRadio/playlists,ThreeDRadio/playlists,ThreeDRadio/playlists | -
-
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
- return view.action == 'retrieve' or request.user.is_staff
+ if view.action == 'retrieve':
+ return True
+ else:
+ return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
+ if hasattr(request, 'user'):
- return request.user.is_staff or obj == request.user
+ return request.user.is_staff or obj == request.user
+ return False
| Fix IsStaffOrTargetUser permission when no user in request. | ## Code Before:
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
return view.action == 'retrieve' or request.user.is_staff
def has_object_permission(self, request, view, obj):
return request.user.is_staff or obj == request.user
## Instruction:
Fix IsStaffOrTargetUser permission when no user in request.
## Code After:
from rest_framework import permissions
class IsStaffOrTargetUser(permissions.BasePermission):
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
| ...
from rest_framework import permissions
...
def has_permission(self, request, view):
if view.action == 'retrieve':
return True
else:
return hasattr(request, 'user') and request.user.is_staff
...
def has_object_permission(self, request, view, obj):
if hasattr(request, 'user'):
return request.user.is_staff or obj == request.user
return False
... |
57560385ef05ba6a2234e43795a037a487f26cfd | djaml/utils.py | djaml/utils.py | import imp
from os import listdir
from os.path import dirname, splitext
from django.template import loaders
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.',1)[1], loader)
for loader in get_submodules(loaders)
if hasattr(loader, 'load_template_source')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module)
for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]])
for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0]
for module in listdir(package_path)
if module.endswith(MODULE_EXTENSIONS)])
return contents
| import imp
from os import listdir
from os.path import dirname, splitext
from django.template import loaders
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.',1)[1], loader)
for loader in get_submodules(loaders)
if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module)
for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]])
for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0]
for module in listdir(package_path)
if module.endswith(MODULE_EXTENSIONS)])
return contents
| Fix submodule attribute check for Django 1.4 compatibility | Fix submodule attribute check for Django 1.4 compatibility
| Python | mit | chartjes/djaml | import imp
from os import listdir
from os.path import dirname, splitext
from django.template import loaders
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.',1)[1], loader)
for loader in get_submodules(loaders)
- if hasattr(loader, 'load_template_source')]
+ if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module)
for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]])
for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0]
for module in listdir(package_path)
if module.endswith(MODULE_EXTENSIONS)])
return contents
| Fix submodule attribute check for Django 1.4 compatibility | ## Code Before:
import imp
from os import listdir
from os.path import dirname, splitext
from django.template import loaders
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.',1)[1], loader)
for loader in get_submodules(loaders)
if hasattr(loader, 'load_template_source')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module)
for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]])
for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0]
for module in listdir(package_path)
if module.endswith(MODULE_EXTENSIONS)])
return contents
## Instruction:
Fix submodule attribute check for Django 1.4 compatibility
## Code After:
import imp
from os import listdir
from os.path import dirname, splitext
from django.template import loaders
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.',1)[1], loader)
for loader in get_submodules(loaders)
if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module)
for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]])
for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0]
for module in listdir(package_path)
if module.endswith(MODULE_EXTENSIONS)])
return contents
| // ... existing code ...
for loader in get_submodules(loaders)
if hasattr(loader, 'Loader')]
// ... rest of the code ... |
2e0286632b9120fe6a788db4483911513a39fe04 | fabfile.py | fabfile.py | from fabric.api import * # noqa
env.hosts = [
'104.131.30.135',
]
env.user = "root"
env.directory = "/home/django/api.freemusic.ninja"
env.deploy_path = "/home/django/django_project"
def deploy():
with cd(env.directory):
run("git pull --rebase")
sudo("pip3 install -r requirements.txt")
sudo("python3 manage.py collectstatic --noinput", user='django')
sudo("python3 manage.py migrate --noinput", user='django')
run("rm -f {deploy_path}".format(deploy_path=env.deploy_path))
run("ln -s {project_path} {deploy_path}".format(
project_path=env.directory, deploy_path=env.deploy_path))
run("service gunicorn restart")
def dbshell():
with cd(env.directory):
sudo("python3 manage.py dbshell", user='django')
def shell():
with cd(env.directory):
sudo("python3 manage.py shell", user='django')
def migrate():
with cd(env.directory):
sudo("python3 manage.py migrate", user='django')
def gunicorn_restart():
run("service gunicorn restart")
| from fabric.api import * # noqa
env.hosts = [
'104.131.30.135',
]
env.user = "root"
env.directory = "/home/django/api.freemusic.ninja"
env.deploy_path = "/home/django/django_project"
def deploy():
with cd(env.directory):
run("git reset --hard origin/master")
sudo("pip3 install -r requirements.txt")
sudo("python3 manage.py collectstatic --noinput", user='django')
sudo("python3 manage.py migrate --noinput", user='django')
run("rm -f {deploy_path}".format(deploy_path=env.deploy_path))
run("ln -s {project_path} {deploy_path}".format(
project_path=env.directory, deploy_path=env.deploy_path))
run("service gunicorn restart")
def dbshell():
with cd(env.directory):
sudo("python3 manage.py dbshell", user='django')
def shell():
with cd(env.directory):
sudo("python3 manage.py shell", user='django')
def migrate():
with cd(env.directory):
sudo("python3 manage.py migrate", user='django')
def gunicorn_restart():
run("service gunicorn restart")
| Reset to upstream master instead of rebasing during deployment | Reset to upstream master instead of rebasing during deployment
| Python | bsd-3-clause | FreeMusicNinja/api.freemusic.ninja | from fabric.api import * # noqa
env.hosts = [
'104.131.30.135',
]
env.user = "root"
env.directory = "/home/django/api.freemusic.ninja"
env.deploy_path = "/home/django/django_project"
def deploy():
with cd(env.directory):
- run("git pull --rebase")
+ run("git reset --hard origin/master")
sudo("pip3 install -r requirements.txt")
sudo("python3 manage.py collectstatic --noinput", user='django')
sudo("python3 manage.py migrate --noinput", user='django')
run("rm -f {deploy_path}".format(deploy_path=env.deploy_path))
run("ln -s {project_path} {deploy_path}".format(
project_path=env.directory, deploy_path=env.deploy_path))
run("service gunicorn restart")
def dbshell():
with cd(env.directory):
sudo("python3 manage.py dbshell", user='django')
def shell():
with cd(env.directory):
sudo("python3 manage.py shell", user='django')
def migrate():
with cd(env.directory):
sudo("python3 manage.py migrate", user='django')
def gunicorn_restart():
run("service gunicorn restart")
| Reset to upstream master instead of rebasing during deployment | ## Code Before:
from fabric.api import * # noqa
env.hosts = [
'104.131.30.135',
]
env.user = "root"
env.directory = "/home/django/api.freemusic.ninja"
env.deploy_path = "/home/django/django_project"
def deploy():
with cd(env.directory):
run("git pull --rebase")
sudo("pip3 install -r requirements.txt")
sudo("python3 manage.py collectstatic --noinput", user='django')
sudo("python3 manage.py migrate --noinput", user='django')
run("rm -f {deploy_path}".format(deploy_path=env.deploy_path))
run("ln -s {project_path} {deploy_path}".format(
project_path=env.directory, deploy_path=env.deploy_path))
run("service gunicorn restart")
def dbshell():
with cd(env.directory):
sudo("python3 manage.py dbshell", user='django')
def shell():
with cd(env.directory):
sudo("python3 manage.py shell", user='django')
def migrate():
with cd(env.directory):
sudo("python3 manage.py migrate", user='django')
def gunicorn_restart():
run("service gunicorn restart")
## Instruction:
Reset to upstream master instead of rebasing during deployment
## Code After:
from fabric.api import * # noqa
env.hosts = [
'104.131.30.135',
]
env.user = "root"
env.directory = "/home/django/api.freemusic.ninja"
env.deploy_path = "/home/django/django_project"
def deploy():
with cd(env.directory):
run("git reset --hard origin/master")
sudo("pip3 install -r requirements.txt")
sudo("python3 manage.py collectstatic --noinput", user='django')
sudo("python3 manage.py migrate --noinput", user='django')
run("rm -f {deploy_path}".format(deploy_path=env.deploy_path))
run("ln -s {project_path} {deploy_path}".format(
project_path=env.directory, deploy_path=env.deploy_path))
run("service gunicorn restart")
def dbshell():
with cd(env.directory):
sudo("python3 manage.py dbshell", user='django')
def shell():
with cd(env.directory):
sudo("python3 manage.py shell", user='django')
def migrate():
with cd(env.directory):
sudo("python3 manage.py migrate", user='django')
def gunicorn_restart():
run("service gunicorn restart")
| ...
with cd(env.directory):
run("git reset --hard origin/master")
sudo("pip3 install -r requirements.txt")
... |
21a4c6c5cdf3461ef2bd6048a7399044e8b1a0e8 | spyder_unittest/backend/pytestworker.py | spyder_unittest/backend/pytestworker.py |
# Standard library imports
import sys
# Third party imports
import pytest
pytest.main(sys.argv[1:])
|
# Standard library imports
import sys
# Third party imports
import pytest
class SpyderPlugin():
"""Pytest plugin which reports in format suitable for Spyder."""
def pytest_itemcollected(self, item):
"""Called by py.test when a test item is collected."""
name = item.name
module = item.parent.name
module = module.replace('/', '.') # convert path to dotted path
if module.endswith('.py'):
module = module[:-3]
print('pytest_item_collected(name={}, module={})'.format(name, module))
pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
| Add py.test plugin which prints out test names as they are collected | Add py.test plugin which prints out test names as they are collected
| Python | mit | jitseniesen/spyder-unittest |
# Standard library imports
import sys
# Third party imports
import pytest
- pytest.main(sys.argv[1:])
+ class SpyderPlugin():
+ """Pytest plugin which reports in format suitable for Spyder."""
+
+ def pytest_itemcollected(self, item):
+ """Called by py.test when a test item is collected."""
+ name = item.name
+ module = item.parent.name
+ module = module.replace('/', '.') # convert path to dotted path
+ if module.endswith('.py'):
+ module = module[:-3]
+ print('pytest_item_collected(name={}, module={})'.format(name, module))
+
+
+ pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
+ | Add py.test plugin which prints out test names as they are collected | ## Code Before:
# Standard library imports
import sys
# Third party imports
import pytest
pytest.main(sys.argv[1:])
## Instruction:
Add py.test plugin which prints out test names as they are collected
## Code After:
# Standard library imports
import sys
# Third party imports
import pytest
class SpyderPlugin():
"""Pytest plugin which reports in format suitable for Spyder."""
def pytest_itemcollected(self, item):
"""Called by py.test when a test item is collected."""
name = item.name
module = item.parent.name
module = module.replace('/', '.') # convert path to dotted path
if module.endswith('.py'):
module = module[:-3]
print('pytest_item_collected(name={}, module={})'.format(name, module))
pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
| # ... existing code ...
class SpyderPlugin():
"""Pytest plugin which reports in format suitable for Spyder."""
def pytest_itemcollected(self, item):
"""Called by py.test when a test item is collected."""
name = item.name
module = item.parent.name
module = module.replace('/', '.') # convert path to dotted path
if module.endswith('.py'):
module = module[:-3]
print('pytest_item_collected(name={}, module={})'.format(name, module))
pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
# ... rest of the code ... |
543c7307f26553d78bf3f18b5f93a2bc23cfb875 | reports/admin.py | reports/admin.py | from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at']
search_fields = ['addressed_to', 'reported_from', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
| from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at', 'content']
search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
| Fix some issues, because of models change | Fix some issues, because of models change
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
- list_filter = ['created_at']
+ list_filter = ['created_at', 'content']
- search_fields = ['addressed_to', 'reported_from', 'content', 'signed_from']
+ search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
| Fix some issues, because of models change | ## Code Before:
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at']
search_fields = ['addressed_to', 'reported_from', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
## Instruction:
Fix some issues, because of models change
## Code After:
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at', 'content']
search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
| ...
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at', 'content']
search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
... |
e44eb0bd99b4dec1b78707c7343fc6d9b647c7bb | scripts/write_antenna_location_file.py | scripts/write_antenna_location_file.py | import pandas as pd
from hera_mc import mc, geo_handling
import datetime
parser = mc.get_mc_argument_parser()
parser.add_argument('--file', help="file name to save antenna locations to",
default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv')
args = parser.parse_args()
filename = args.file
db = mc.connect_to_mc_db(args)
locations = geo_handling.get_all_locations(args)
df = pd.DataFrame(locations)
df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation',
'antenna_number', 'start_date', 'stop_date']]
df.to_csv(filename, index=False)
| import pandas as pd
from hera_mc import mc, geo_handling
import datetime
parser = mc.get_mc_argument_parser()
parser.add_argument('--file', help="file name to save antenna locations to",
default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv')
args = parser.parse_args()
filename = args.file
db = mc.connect_to_mc_db(args)
locations = geo_handling.get_all_locations(args)
cofa_loc = geo_handling.cofa()
locations.append({'station_name': cofa_loc.station_name,
'station_type': cofa_loc.station_type_name,
'longitude': cofa_loc.lon,
'latitude': cofa_loc.lat,
'elevation': cofa_loc.elevation,
'antenna_number': None,
'start_date': cofa_loc.created_date,
'stop_date': None})
df = pd.DataFrame(locations)
df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation',
'antenna_number', 'start_date', 'stop_date']]
df.to_csv(filename, index=False)
| Add cofa information to antenna location files | Add cofa information to antenna location files
| Python | bsd-2-clause | HERA-Team/hera_mc,HERA-Team/Monitor_and_Control,HERA-Team/hera_mc | import pandas as pd
from hera_mc import mc, geo_handling
import datetime
parser = mc.get_mc_argument_parser()
parser.add_argument('--file', help="file name to save antenna locations to",
default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv')
args = parser.parse_args()
filename = args.file
db = mc.connect_to_mc_db(args)
locations = geo_handling.get_all_locations(args)
+ cofa_loc = geo_handling.cofa()
+ locations.append({'station_name': cofa_loc.station_name,
+ 'station_type': cofa_loc.station_type_name,
+ 'longitude': cofa_loc.lon,
+ 'latitude': cofa_loc.lat,
+ 'elevation': cofa_loc.elevation,
+ 'antenna_number': None,
+ 'start_date': cofa_loc.created_date,
+ 'stop_date': None})
df = pd.DataFrame(locations)
df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation',
'antenna_number', 'start_date', 'stop_date']]
df.to_csv(filename, index=False)
| Add cofa information to antenna location files | ## Code Before:
import pandas as pd
from hera_mc import mc, geo_handling
import datetime
parser = mc.get_mc_argument_parser()
parser.add_argument('--file', help="file name to save antenna locations to",
default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv')
args = parser.parse_args()
filename = args.file
db = mc.connect_to_mc_db(args)
locations = geo_handling.get_all_locations(args)
df = pd.DataFrame(locations)
df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation',
'antenna_number', 'start_date', 'stop_date']]
df.to_csv(filename, index=False)
## Instruction:
Add cofa information to antenna location files
## Code After:
import pandas as pd
from hera_mc import mc, geo_handling
import datetime
parser = mc.get_mc_argument_parser()
parser.add_argument('--file', help="file name to save antenna locations to",
default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv')
args = parser.parse_args()
filename = args.file
db = mc.connect_to_mc_db(args)
locations = geo_handling.get_all_locations(args)
cofa_loc = geo_handling.cofa()
locations.append({'station_name': cofa_loc.station_name,
'station_type': cofa_loc.station_type_name,
'longitude': cofa_loc.lon,
'latitude': cofa_loc.lat,
'elevation': cofa_loc.elevation,
'antenna_number': None,
'start_date': cofa_loc.created_date,
'stop_date': None})
df = pd.DataFrame(locations)
df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation',
'antenna_number', 'start_date', 'stop_date']]
df.to_csv(filename, index=False)
| // ... existing code ...
locations = geo_handling.get_all_locations(args)
cofa_loc = geo_handling.cofa()
locations.append({'station_name': cofa_loc.station_name,
'station_type': cofa_loc.station_type_name,
'longitude': cofa_loc.lon,
'latitude': cofa_loc.lat,
'elevation': cofa_loc.elevation,
'antenna_number': None,
'start_date': cofa_loc.created_date,
'stop_date': None})
df = pd.DataFrame(locations)
// ... rest of the code ... |
fb9ca96431a4f72135245705359eb1f6d340a536 | moksha/api/hub/__init__.py | moksha/api/hub/__init__.py |
from consumer import *
from hub import *
|
from consumer import *
from hub import *
from moksha.hub.reactor import reactor
from moksha.hub.hub import MokshaHub
| Make the MokshaHub and reactor available in the moksha.api.hub module | Make the MokshaHub and reactor available in the moksha.api.hub module
| Python | apache-2.0 | lmacken/moksha,pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha,ralphbean/moksha,lmacken/moksha,lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,ralphbean/moksha,ralphbean/moksha,pombredanne/moksha,mokshaproject/moksha |
from consumer import *
from hub import *
+ from moksha.hub.reactor import reactor
+ from moksha.hub.hub import MokshaHub
+ | Make the MokshaHub and reactor available in the moksha.api.hub module | ## Code Before:
from consumer import *
from hub import *
## Instruction:
Make the MokshaHub and reactor available in the moksha.api.hub module
## Code After:
from consumer import *
from hub import *
from moksha.hub.reactor import reactor
from moksha.hub.hub import MokshaHub
| ...
from hub import *
from moksha.hub.reactor import reactor
from moksha.hub.hub import MokshaHub
... |
3b091fba819f1ad69d0ce9e9038ccf5d14fea215 | tests/core/tests/test_mixins.py | tests/core/tests/test_mixins.py | from core.models import Category
from django.test.testcases import TestCase
from django.urls import reverse
class ExportViewMixinTest(TestCase):
def setUp(self):
self.url = reverse('export-category')
self.cat1 = Category.objects.create(name='Cat 1')
self.cat2 = Category.objects.create(name='Cat 2')
def test_get(self):
response = self.client.get(self.url)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertTrue(response['Content-Type'], 'text/html')
def test_post(self):
data = {
'file_format': '0',
}
response = self.client.post(self.url, data)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertTrue(response.has_header("Content-Disposition"))
self.assertTrue(response['Content-Type'], 'text/csv')
| from core.models import Category
from django.test.testcases import TestCase
from django.urls import reverse
class ExportViewMixinTest(TestCase):
def setUp(self):
self.url = reverse('export-category')
self.cat1 = Category.objects.create(name='Cat 1')
self.cat2 = Category.objects.create(name='Cat 2')
def test_get(self):
response = self.client.get(self.url)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertEquals(response['Content-Type'], 'text/html; charset=utf-8')
def test_post(self):
data = {
'file_format': '0',
}
response = self.client.post(self.url, data)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertTrue(response.has_header("Content-Disposition"))
self.assertEquals(response['Content-Type'], 'text/csv')
| Correct mistaken assertTrue() -> assertEquals() | Correct mistaken assertTrue() -> assertEquals()
| Python | bsd-2-clause | bmihelac/django-import-export,bmihelac/django-import-export,bmihelac/django-import-export,jnns/django-import-export,jnns/django-import-export,jnns/django-import-export,django-import-export/django-import-export,django-import-export/django-import-export,django-import-export/django-import-export,django-import-export/django-import-export,jnns/django-import-export,bmihelac/django-import-export | from core.models import Category
from django.test.testcases import TestCase
from django.urls import reverse
class ExportViewMixinTest(TestCase):
def setUp(self):
self.url = reverse('export-category')
self.cat1 = Category.objects.create(name='Cat 1')
self.cat2 = Category.objects.create(name='Cat 2')
def test_get(self):
response = self.client.get(self.url)
self.assertContains(response, self.cat1.name, status_code=200)
- self.assertTrue(response['Content-Type'], 'text/html')
+ self.assertEquals(response['Content-Type'], 'text/html; charset=utf-8')
def test_post(self):
data = {
'file_format': '0',
}
response = self.client.post(self.url, data)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertTrue(response.has_header("Content-Disposition"))
- self.assertTrue(response['Content-Type'], 'text/csv')
+ self.assertEquals(response['Content-Type'], 'text/csv')
| Correct mistaken assertTrue() -> assertEquals() | ## Code Before:
from core.models import Category
from django.test.testcases import TestCase
from django.urls import reverse
class ExportViewMixinTest(TestCase):
def setUp(self):
self.url = reverse('export-category')
self.cat1 = Category.objects.create(name='Cat 1')
self.cat2 = Category.objects.create(name='Cat 2')
def test_get(self):
response = self.client.get(self.url)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertTrue(response['Content-Type'], 'text/html')
def test_post(self):
data = {
'file_format': '0',
}
response = self.client.post(self.url, data)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertTrue(response.has_header("Content-Disposition"))
self.assertTrue(response['Content-Type'], 'text/csv')
## Instruction:
Correct mistaken assertTrue() -> assertEquals()
## Code After:
from core.models import Category
from django.test.testcases import TestCase
from django.urls import reverse
class ExportViewMixinTest(TestCase):
def setUp(self):
self.url = reverse('export-category')
self.cat1 = Category.objects.create(name='Cat 1')
self.cat2 = Category.objects.create(name='Cat 2')
def test_get(self):
response = self.client.get(self.url)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertEquals(response['Content-Type'], 'text/html; charset=utf-8')
def test_post(self):
data = {
'file_format': '0',
}
response = self.client.post(self.url, data)
self.assertContains(response, self.cat1.name, status_code=200)
self.assertTrue(response.has_header("Content-Disposition"))
self.assertEquals(response['Content-Type'], 'text/csv')
| # ... existing code ...
self.assertContains(response, self.cat1.name, status_code=200)
self.assertEquals(response['Content-Type'], 'text/html; charset=utf-8')
# ... modified code ...
self.assertTrue(response.has_header("Content-Disposition"))
self.assertEquals(response['Content-Type'], 'text/csv')
# ... rest of the code ... |
3873fe6b33665267a04f80faec63eaaa19ea00bd | portal/pages/models.py | portal/pages/models.py | from django.db import models
from wagtail.wagtailcore.models import Page as WagtailPage
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel
class Page(WagtailPage):
parent_page_types = ['home.HomePage', 'Page']
body = RichTextField()
indexed_fields = ('body', )
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body', classname="full"),
]
| from django.db import models
from wagtail.wagtailcore.models import Page as WagtailPage
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel
class Page(WagtailPage):
body = RichTextField()
indexed_fields = ('body', )
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body', classname="full"),
]
| Allow pages to appear anywhere they aren't excluded | Allow pages to appear anywhere they aren't excluded
| Python | isc | Ecotrust/marineplanner-core,Ecotrust/marineplanner-core,Ecotrust/marineplanner-core,Ecotrust/marineplanner-core,MidAtlanticPortal/marco-portal2,MidAtlanticPortal/marco-portal2,MidAtlanticPortal/marco-portal2,MidAtlanticPortal/marco-portal2,Ecotrust/marineplanner-core | from django.db import models
from wagtail.wagtailcore.models import Page as WagtailPage
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel
class Page(WagtailPage):
- parent_page_types = ['home.HomePage', 'Page']
body = RichTextField()
indexed_fields = ('body', )
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body', classname="full"),
]
| Allow pages to appear anywhere they aren't excluded | ## Code Before:
from django.db import models
from wagtail.wagtailcore.models import Page as WagtailPage
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel
class Page(WagtailPage):
parent_page_types = ['home.HomePage', 'Page']
body = RichTextField()
indexed_fields = ('body', )
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body', classname="full"),
]
## Instruction:
Allow pages to appear anywhere they aren't excluded
## Code After:
from django.db import models
from wagtail.wagtailcore.models import Page as WagtailPage
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel
class Page(WagtailPage):
body = RichTextField()
indexed_fields = ('body', )
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body', classname="full"),
]
| ...
class Page(WagtailPage):
... |
fa2fb3387912474eff2b6c2a14d6304fcf5cd1f8 | erasmus/cogs/bible/testing_server_preferences_group.py | erasmus/cogs/bible/testing_server_preferences_group.py | from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group(self)
self.daily_bread.initialize_from_parent(self)
| from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group('serverprefs')
self.daily_bread.initialize_from_parent(self)
| Use serverprefs localizer for TestingServerPreferencesGroup | Use serverprefs localizer for TestingServerPreferencesGroup
| Python | bsd-3-clause | bryanforbes/Erasmus | from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
- self.localizer = parent.localizer.for_group(self)
+ self.localizer = parent.localizer.for_group('serverprefs')
self.daily_bread.initialize_from_parent(self)
| Use serverprefs localizer for TestingServerPreferencesGroup | ## Code Before:
from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group(self)
self.daily_bread.initialize_from_parent(self)
## Instruction:
Use serverprefs localizer for TestingServerPreferencesGroup
## Code After:
from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group('serverprefs')
self.daily_bread.initialize_from_parent(self)
| // ... existing code ...
self.bot = parent.bot
self.localizer = parent.localizer.for_group('serverprefs')
// ... rest of the code ... |
31c0863d088488da5dd85e2cbe3c01c6b01aa4a2 | system_tests/test_default.py | system_tests/test_default.py |
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
|
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
| Fix system tests when running on GCE | Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.
| Python | apache-2.0 | googleapis/google-auth-library-python,googleapis/google-auth-library-python |
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
- else:
- assert project_id is None
verify_refresh(credentials)
| Fix system tests when running on GCE | ## Code Before:
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
## Instruction:
Fix system tests when running on GCE
## Code After:
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
| // ... existing code ...
assert project_id is not None
// ... rest of the code ... |
5deb33c244242d36a16a8c08ff816368b345a8f3 | qr_code/qrcode/image.py | qr_code/qrcode/image.py | import logging
from qrcode.image.svg import SvgPathImage as _SvgPathImage
logger = logging.getLogger('django')
try:
from qrcode.image.pil import PilImage as _PilImageOrFallback
except ImportError:
logger.info("Pillow is not installed. No support available for PNG format.")
from qrcode.image.svg import SvgPathImage as _PilImageOrFallback
SVG_FORMAT_NAME = 'svg'
PNG_FORMAT_NAME = 'png'
SvgPathImage = _SvgPathImage
PilImageOrFallback = _PilImageOrFallback
def has_png_support():
return PilImageOrFallback is not SvgPathImage
def get_supported_image_format(image_format):
image_format = image_format.lower()
if image_format not in [SVG_FORMAT_NAME, PNG_FORMAT_NAME]:
logger.warning('Unknown image format: %s' % image_format)
image_format = SVG_FORMAT_NAME
elif image_format == PNG_FORMAT_NAME and not has_png_support():
logger.warning("No support available for PNG format, SVG will be used instead. Please install Pillow for PNG support.")
image_format = SVG_FORMAT_NAME
return image_format
| import logging
from qrcode.image.svg import SvgPathImage as _SvgPathImage
logger = logging.getLogger('django')
try:
from qrcode.image.pil import PilImage as _PilImageOrFallback
except ImportError: # pragma: no cover
logger.info("Pillow is not installed. No support available for PNG format.")
from qrcode.image.svg import SvgPathImage as _PilImageOrFallback
SVG_FORMAT_NAME = 'svg'
PNG_FORMAT_NAME = 'png'
SvgPathImage = _SvgPathImage
PilImageOrFallback = _PilImageOrFallback
def has_png_support():
return PilImageOrFallback is not SvgPathImage
def get_supported_image_format(image_format):
image_format = image_format.lower()
if image_format not in [SVG_FORMAT_NAME, PNG_FORMAT_NAME]:
logger.warning('Unknown image format: %s' % image_format)
image_format = SVG_FORMAT_NAME
elif image_format == PNG_FORMAT_NAME and not has_png_support():
logger.warning(
"No support available for PNG format, SVG will be used instead. Please install Pillow for PNG support.")
image_format = SVG_FORMAT_NAME
return image_format
| Exclude handling of the situation where Pillow is not available from test coverage. | Exclude handling of the situation where Pillow is not available from test coverage.
| Python | bsd-3-clause | dprog-philippe-docourt/django-qr-code,dprog-philippe-docourt/django-qr-code,dprog-philippe-docourt/django-qr-code | import logging
from qrcode.image.svg import SvgPathImage as _SvgPathImage
+
logger = logging.getLogger('django')
try:
from qrcode.image.pil import PilImage as _PilImageOrFallback
- except ImportError:
+ except ImportError: # pragma: no cover
logger.info("Pillow is not installed. No support available for PNG format.")
from qrcode.image.svg import SvgPathImage as _PilImageOrFallback
SVG_FORMAT_NAME = 'svg'
PNG_FORMAT_NAME = 'png'
SvgPathImage = _SvgPathImage
PilImageOrFallback = _PilImageOrFallback
def has_png_support():
return PilImageOrFallback is not SvgPathImage
def get_supported_image_format(image_format):
image_format = image_format.lower()
if image_format not in [SVG_FORMAT_NAME, PNG_FORMAT_NAME]:
logger.warning('Unknown image format: %s' % image_format)
image_format = SVG_FORMAT_NAME
elif image_format == PNG_FORMAT_NAME and not has_png_support():
+ logger.warning(
- logger.warning("No support available for PNG format, SVG will be used instead. Please install Pillow for PNG support.")
+ "No support available for PNG format, SVG will be used instead. Please install Pillow for PNG support.")
image_format = SVG_FORMAT_NAME
return image_format
| Exclude handling of the situation where Pillow is not available from test coverage. | ## Code Before:
import logging
from qrcode.image.svg import SvgPathImage as _SvgPathImage
logger = logging.getLogger('django')
try:
from qrcode.image.pil import PilImage as _PilImageOrFallback
except ImportError:
logger.info("Pillow is not installed. No support available for PNG format.")
from qrcode.image.svg import SvgPathImage as _PilImageOrFallback
SVG_FORMAT_NAME = 'svg'
PNG_FORMAT_NAME = 'png'
SvgPathImage = _SvgPathImage
PilImageOrFallback = _PilImageOrFallback
def has_png_support():
return PilImageOrFallback is not SvgPathImage
def get_supported_image_format(image_format):
image_format = image_format.lower()
if image_format not in [SVG_FORMAT_NAME, PNG_FORMAT_NAME]:
logger.warning('Unknown image format: %s' % image_format)
image_format = SVG_FORMAT_NAME
elif image_format == PNG_FORMAT_NAME and not has_png_support():
logger.warning("No support available for PNG format, SVG will be used instead. Please install Pillow for PNG support.")
image_format = SVG_FORMAT_NAME
return image_format
## Instruction:
Exclude handling of the situation where Pillow is not available from test coverage.
## Code After:
import logging
from qrcode.image.svg import SvgPathImage as _SvgPathImage
logger = logging.getLogger('django')
try:
from qrcode.image.pil import PilImage as _PilImageOrFallback
except ImportError: # pragma: no cover
logger.info("Pillow is not installed. No support available for PNG format.")
from qrcode.image.svg import SvgPathImage as _PilImageOrFallback
SVG_FORMAT_NAME = 'svg'
PNG_FORMAT_NAME = 'png'
SvgPathImage = _SvgPathImage
PilImageOrFallback = _PilImageOrFallback
def has_png_support():
return PilImageOrFallback is not SvgPathImage
def get_supported_image_format(image_format):
image_format = image_format.lower()
if image_format not in [SVG_FORMAT_NAME, PNG_FORMAT_NAME]:
logger.warning('Unknown image format: %s' % image_format)
image_format = SVG_FORMAT_NAME
elif image_format == PNG_FORMAT_NAME and not has_png_support():
logger.warning(
"No support available for PNG format, SVG will be used instead. Please install Pillow for PNG support.")
image_format = SVG_FORMAT_NAME
return image_format
| // ... existing code ...
from qrcode.image.svg import SvgPathImage as _SvgPathImage
logger = logging.getLogger('django')
// ... modified code ...
from qrcode.image.pil import PilImage as _PilImageOrFallback
except ImportError: # pragma: no cover
logger.info("Pillow is not installed. No support available for PNG format.")
...
elif image_format == PNG_FORMAT_NAME and not has_png_support():
logger.warning(
"No support available for PNG format, SVG will be used instead. Please install Pillow for PNG support.")
image_format = SVG_FORMAT_NAME
// ... rest of the code ... |
e8935189659e882f534f5605086dc76ce7ce881b | rdrf/rdrf/admin.py | rdrf/rdrf/admin.py | from django.contrib import admin
from models import *
from registry.groups.models import User
class SectionAdmin(admin.ModelAdmin):
list_display = ('code', 'display_name')
class RegistryFormAdmin(admin.ModelAdmin):
list_display = ('registry', 'name', 'sections')
class RegistryAdmin(admin.ModelAdmin):
def queryset(self, request):
if not request.user.is_superuser:
user = User.objects.get(user=request.user)
return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()])
return Registry.objects.all()
admin.site.register(CDEPermittedValue)
admin.site.register(CDEPermittedValueGroup)
admin.site.register(CommonDataElement)
admin.site.register(Wizard)
admin.site.register(RegistryForm, RegistryFormAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Registry, RegistryAdmin) | from django.contrib import admin
from models import *
from registry.groups.models import User
class SectionAdmin(admin.ModelAdmin):
list_display = ('code', 'display_name')
class RegistryFormAdmin(admin.ModelAdmin):
list_display = ('registry', 'name', 'sections')
class RegistryAdmin(admin.ModelAdmin):
def queryset(self, request):
if not request.user.is_superuser:
user = User.objects.get(user=request.user)
return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()])
return Registry.objects.all()
def has_add_permission(self, request):
if request.user.is_superuser:
return True
return False
admin.site.register(CDEPermittedValue)
admin.site.register(CDEPermittedValueGroup)
admin.site.register(CommonDataElement)
admin.site.register(Wizard)
admin.site.register(RegistryForm, RegistryFormAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Registry, RegistryAdmin) | Disable adding registries for non-superusers | Disable adding registries for non-superusers
| Python | agpl-3.0 | muccg/rdrf,muccg/rdrf,muccg/rdrf,muccg/rdrf,muccg/rdrf | from django.contrib import admin
from models import *
from registry.groups.models import User
class SectionAdmin(admin.ModelAdmin):
list_display = ('code', 'display_name')
class RegistryFormAdmin(admin.ModelAdmin):
list_display = ('registry', 'name', 'sections')
class RegistryAdmin(admin.ModelAdmin):
def queryset(self, request):
if not request.user.is_superuser:
user = User.objects.get(user=request.user)
return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()])
return Registry.objects.all()
+ def has_add_permission(self, request):
+ if request.user.is_superuser:
+ return True
+ return False
+
admin.site.register(CDEPermittedValue)
admin.site.register(CDEPermittedValueGroup)
admin.site.register(CommonDataElement)
admin.site.register(Wizard)
admin.site.register(RegistryForm, RegistryFormAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Registry, RegistryAdmin) | Disable adding registries for non-superusers | ## Code Before:
from django.contrib import admin
from models import *
from registry.groups.models import User
class SectionAdmin(admin.ModelAdmin):
list_display = ('code', 'display_name')
class RegistryFormAdmin(admin.ModelAdmin):
list_display = ('registry', 'name', 'sections')
class RegistryAdmin(admin.ModelAdmin):
def queryset(self, request):
if not request.user.is_superuser:
user = User.objects.get(user=request.user)
return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()])
return Registry.objects.all()
admin.site.register(CDEPermittedValue)
admin.site.register(CDEPermittedValueGroup)
admin.site.register(CommonDataElement)
admin.site.register(Wizard)
admin.site.register(RegistryForm, RegistryFormAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Registry, RegistryAdmin)
## Instruction:
Disable adding registries for non-superusers
## Code After:
from django.contrib import admin
from models import *
from registry.groups.models import User
class SectionAdmin(admin.ModelAdmin):
list_display = ('code', 'display_name')
class RegistryFormAdmin(admin.ModelAdmin):
list_display = ('registry', 'name', 'sections')
class RegistryAdmin(admin.ModelAdmin):
def queryset(self, request):
if not request.user.is_superuser:
user = User.objects.get(user=request.user)
return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()])
return Registry.objects.all()
def has_add_permission(self, request):
if request.user.is_superuser:
return True
return False
admin.site.register(CDEPermittedValue)
admin.site.register(CDEPermittedValueGroup)
admin.site.register(CommonDataElement)
admin.site.register(Wizard)
admin.site.register(RegistryForm, RegistryFormAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Registry, RegistryAdmin) | ...
def has_add_permission(self, request):
if request.user.is_superuser:
return True
return False
... |
857124a12f10e3954c114c2b6b688857b80a77a5 | Spectrum.py | Spectrum.py | from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength) | from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
| Remove simple testing from inside class module | Remove simple testing from inside class module
| Python | mit | jason-neal/spectrum_overload,jason-neal/spectrum_overload,jason-neal/spectrum_overload | from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
- x = [1,2,3,4,5,6]
- y = [1,1,0.9,0.95,1,1]
- test = Spectrum()
- print(test)
- print(test.pixel, test.flux)
- test.pixel = x
- test.flux = y
- print(test)
- print(test.pixel, test.flux, test.wavelength)
-
- test2 = Spectrum(x, flux=y)
- print(test2)
- print(test2.pixel, test.flux)
- print(test2.wavelength) | Remove simple testing from inside class module | ## Code Before:
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength)
## Instruction:
Remove simple testing from inside class module
## Code After:
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
| # ... existing code ...
# ... rest of the code ... |
3f48d0fb0e44d35f29990c0d32c032ecee8fbe65 | conftest.py | conftest.py | import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| Read our .env when we test. | Read our .env when we test.
| Python | apache-2.0 | hello-base/web,hello-base/web,hello-base/web,hello-base/web | import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
+ import dotenv
+ dotenv.read_dotenv()
+
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| Read our .env when we test. | ## Code Before:
import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
## Instruction:
Read our .env when we test.
## Code After:
import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| // ... existing code ...
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
// ... rest of the code ... |
58b46dbc62c98372ed300eeb20b5ecb80a11ddb3 | test/test-mime.py | test/test-mime.py | from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
| from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
| Test getting comment for Mime type | Test getting comment for Mime type
| Python | lgpl-2.1 | 0312birdzhang/pyxdg | from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
+
+ pdf1.get_comment()
| Test getting comment for Mime type | ## Code Before:
from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
## Instruction:
Test getting comment for Mime type
## Code After:
from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
| // ... existing code ...
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
// ... rest of the code ... |
22d72a2daf1cd7ba46ded5f75a2322357762a86c | fireplace/cards/gvg/druid.py | fireplace/cards/gvg/druid.py | from ..utils import *
##
# Minions
# Druid of the Fang
class GVG_080:
def action(self):
if self.poweredUp:
self.morph("GVG_080t")
| from ..utils import *
##
# Minions
# Attack Mode (Anodized Robo Cub)
class GVG_030a:
action = buffSelf("GVG_030ae")
# Tank Mode (Anodized Robo Cub)
class GVG_030b:
action = buffSelf("GVG_030be")
# Gift of Mana (Grove Tender)
class GVG_032a:
def action(self):
for player in self.game.players:
player.maxMana += 1
player.usedMana -= 1
# Gift of Cards (Grove Tender)
class GVG_032b:
def action(self):
for player in self.game.players:
player.draw()
# Druid of the Fang
class GVG_080:
def action(self):
if self.poweredUp:
self.morph("GVG_080t")
| Implement Anodized Robo Cub, Grove Tender | Implement Anodized Robo Cub, Grove Tender
| Python | agpl-3.0 | Ragowit/fireplace,liujimj/fireplace,Ragowit/fireplace,Meerkov/fireplace,beheh/fireplace,butozerca/fireplace,oftc-ftw/fireplace,jleclanche/fireplace,amw2104/fireplace,liujimj/fireplace,NightKev/fireplace,amw2104/fireplace,butozerca/fireplace,smallnamespace/fireplace,Meerkov/fireplace,oftc-ftw/fireplace,smallnamespace/fireplace | from ..utils import *
##
# Minions
+
+ # Attack Mode (Anodized Robo Cub)
+ class GVG_030a:
+ action = buffSelf("GVG_030ae")
+
+ # Tank Mode (Anodized Robo Cub)
+ class GVG_030b:
+ action = buffSelf("GVG_030be")
+
+
+ # Gift of Mana (Grove Tender)
+ class GVG_032a:
+ def action(self):
+ for player in self.game.players:
+ player.maxMana += 1
+ player.usedMana -= 1
+
+ # Gift of Cards (Grove Tender)
+ class GVG_032b:
+ def action(self):
+ for player in self.game.players:
+ player.draw()
+
# Druid of the Fang
class GVG_080:
def action(self):
if self.poweredUp:
self.morph("GVG_080t")
| Implement Anodized Robo Cub, Grove Tender | ## Code Before:
from ..utils import *
##
# Minions
# Druid of the Fang
class GVG_080:
def action(self):
if self.poweredUp:
self.morph("GVG_080t")
## Instruction:
Implement Anodized Robo Cub, Grove Tender
## Code After:
from ..utils import *
##
# Minions
# Attack Mode (Anodized Robo Cub)
class GVG_030a:
action = buffSelf("GVG_030ae")
# Tank Mode (Anodized Robo Cub)
class GVG_030b:
action = buffSelf("GVG_030be")
# Gift of Mana (Grove Tender)
class GVG_032a:
def action(self):
for player in self.game.players:
player.maxMana += 1
player.usedMana -= 1
# Gift of Cards (Grove Tender)
class GVG_032b:
def action(self):
for player in self.game.players:
player.draw()
# Druid of the Fang
class GVG_080:
def action(self):
if self.poweredUp:
self.morph("GVG_080t")
| ...
# Attack Mode (Anodized Robo Cub)
class GVG_030a:
action = buffSelf("GVG_030ae")
# Tank Mode (Anodized Robo Cub)
class GVG_030b:
action = buffSelf("GVG_030be")
# Gift of Mana (Grove Tender)
class GVG_032a:
def action(self):
for player in self.game.players:
player.maxMana += 1
player.usedMana -= 1
# Gift of Cards (Grove Tender)
class GVG_032b:
def action(self):
for player in self.game.players:
player.draw()
# Druid of the Fang
... |
469c1dc9de1c986beda853b13909bdc5d3ff2b92 | stagecraft/urls.py | stagecraft/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
| Make redirect view pass the GET query string to the new location | Make redirect view pass the GET query string to the new location
| Python | mit | alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
- permanent=True)),
+ permanent=True,
+ query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
| Make redirect view pass the GET query string to the new location | ## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
## Instruction:
Make redirect view pass the GET query string to the new location
## Code After:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
| # ... existing code ...
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
# ... rest of the code ... |
1cab84d3f3726df2a7cfe4e5ad8efee81051c73e | tests/test_patched_stream.py | tests/test_patched_stream.py | import nose
import StringIO
import cle
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
| import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
| Add tests for loading binaries with malformed sections | Add tests for loading binaries with malformed sections
| Python | bsd-2-clause | angr/cle | import nose
import StringIO
+ import os
import cle
+
+ tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
+ def test_malformed_sections():
+ ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
+ nose.tools.assert_equal(len(ld.main_object.segments), 1)
+ nose.tools.assert_equal(len(ld.main_object.sections), 0)
+ | Add tests for loading binaries with malformed sections | ## Code Before:
import nose
import StringIO
import cle
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
## Instruction:
Add tests for loading binaries with malformed sections
## Code After:
import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
| ...
import StringIO
import os
...
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
...
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
... |
058882a1d0e4ac458fe8cab972010e17c248ee81 | wate/views.py | wate/views.py | from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = ""
# First, print the header
for item in header:
retval += ( item + ", " )
retval += ( "<br>"*2 )
# Now print each user
for user in users:
for item in user:
retval += ( str(item) + " " )
retval += "<br>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
| from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = '<table border="1">'
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
# Now print each user
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
| Make a table for the front page | Make a table for the front page
| Python | mit | jamesmunns/wate,jamesmunns/wate,jamesmunns/wate | from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
- retval = ""
+ retval = '<table border="1">'
+
# First, print the header
+ retval += '<tr>'
for item in header:
- retval += ( item + ", " )
- retval += ( "<br>"*2 )
+ retval += "<th>{}</th>".format(item)
+ retval += '</tr>'
# Now print each user
for user in users:
+ retval += "<tr>"
for item in user:
- retval += ( str(item) + " " )
+ retval += "<td>{}</td>".format(item)
- retval += "<br>"
+ retval += "</tr>"
+
+ retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
| Make a table for the front page | ## Code Before:
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = ""
# First, print the header
for item in header:
retval += ( item + ", " )
retval += ( "<br>"*2 )
# Now print each user
for user in users:
for item in user:
retval += ( str(item) + " " )
retval += "<br>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
## Instruction:
Make a table for the front page
## Code After:
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = '<table border="1">'
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
# Now print each user
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
| # ... existing code ...
retval = '<table border="1">'
# ... modified code ...
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
...
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
# ... rest of the code ... |
a4f41648cd0318694d551b067309539df475c2d7 | tests/test_function_calls.py | tests/test_function_calls.py | from thinglang.runner import run
def test_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip() | from thinglang.runner import run
def test_zero_arg_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip() | Test for method argument calls | Test for method argument calls
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | from thinglang.runner import run
- def test_function_calls():
+ def test_zero_arg_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
+
+
+ def test_multi_arg_function_calls():
+ assert run("""
+ thing Program
+ does start
+ text arg_val = "some value"
+ self.say_hello(1, "hello", arg_val)
+
+ does say_hello with arg1, arg2, arg3
+ Output.write("in say_hello", arg1, arg2, arg3)
+ """).output == """
+ in say_hello 1 hello some value
+ """.strip() | Test for method argument calls | ## Code Before:
from thinglang.runner import run
def test_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
## Instruction:
Test for method argument calls
## Code After:
from thinglang.runner import run
def test_zero_arg_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip() | // ... existing code ...
def test_zero_arg_function_calls():
assert run("""
// ... modified code ...
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip()
// ... rest of the code ... |
50c44a5708d1c054207eba264e1cdf9d1f6718da | deployer/logger.py | deployer/logger.py | from __future__ import absolute_import
import logging
from logging.handlers import SysLogHandler
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL, TOTEM_ENV, \
LOG_IDENTIFIER
def init_logging(name=None):
app_logger = logging.getLogger(name)
app_logger.setLevel(LOG_ROOT_LEVEL)
app_logger.propagate = False
if TOTEM_ENV == 'local':
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
app_logger.addHandler(handler)
else:
formatter = logging.Formatter(
'{0}[%(process)d]: %(name)s: %(message)s'
.format(LOG_IDENTIFIER))
handler = logging.handlers.SysLogHandler(
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger
def init_celery_logging(*args, **kwargs):
init_logging('celery')
| from __future__ import absolute_import
import logging
from logging.handlers import SysLogHandler
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL, TOTEM_ENV, \
LOG_IDENTIFIER
def init_logging(name=None):
app_logger = logging.getLogger(name)
app_logger.setLevel(LOG_ROOT_LEVEL)
app_logger.propagate = False
if TOTEM_ENV == 'local':
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
app_logger.addHandler(handler)
else:
formatter = logging.Formatter(
'{0}[%(process)d]: %(name)s: %(message)s'
.format(LOG_IDENTIFIER))
handler = logging.handlers.SysLogHandler(
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
handler.setLevel(LOG_ROOT_LEVEL)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger
def init_celery_logging(*args, **kwargs):
init_logging('celery')
| Set log level for handler | Set log level for handler
| Python | mit | totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer | from __future__ import absolute_import
import logging
from logging.handlers import SysLogHandler
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL, TOTEM_ENV, \
LOG_IDENTIFIER
def init_logging(name=None):
app_logger = logging.getLogger(name)
app_logger.setLevel(LOG_ROOT_LEVEL)
app_logger.propagate = False
if TOTEM_ENV == 'local':
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
app_logger.addHandler(handler)
else:
formatter = logging.Formatter(
'{0}[%(process)d]: %(name)s: %(message)s'
.format(LOG_IDENTIFIER))
handler = logging.handlers.SysLogHandler(
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
- handler.setLevel(logging.INFO)
+ handler.setLevel(LOG_ROOT_LEVEL)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger
def init_celery_logging(*args, **kwargs):
init_logging('celery')
| Set log level for handler | ## Code Before:
from __future__ import absolute_import
import logging
from logging.handlers import SysLogHandler
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL, TOTEM_ENV, \
LOG_IDENTIFIER
def init_logging(name=None):
app_logger = logging.getLogger(name)
app_logger.setLevel(LOG_ROOT_LEVEL)
app_logger.propagate = False
if TOTEM_ENV == 'local':
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
app_logger.addHandler(handler)
else:
formatter = logging.Formatter(
'{0}[%(process)d]: %(name)s: %(message)s'
.format(LOG_IDENTIFIER))
handler = logging.handlers.SysLogHandler(
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger
def init_celery_logging(*args, **kwargs):
init_logging('celery')
## Instruction:
Set log level for handler
## Code After:
from __future__ import absolute_import
import logging
from logging.handlers import SysLogHandler
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL, TOTEM_ENV, \
LOG_IDENTIFIER
def init_logging(name=None):
app_logger = logging.getLogger(name)
app_logger.setLevel(LOG_ROOT_LEVEL)
app_logger.propagate = False
if TOTEM_ENV == 'local':
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
app_logger.addHandler(handler)
else:
formatter = logging.Formatter(
'{0}[%(process)d]: %(name)s: %(message)s'
.format(LOG_IDENTIFIER))
handler = logging.handlers.SysLogHandler(
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
handler.setLevel(LOG_ROOT_LEVEL)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger
def init_celery_logging(*args, **kwargs):
init_logging('celery')
| // ... existing code ...
handler.setFormatter(formatter)
handler.setLevel(LOG_ROOT_LEVEL)
app_logger.addHandler(handler)
// ... rest of the code ... |
d3a0c400e50d34b9829b05d26eef5eac878aa091 | enhanced_cbv/views/list.py | enhanced_cbv/views/list.py | from django.core.exceptions import ImproperlyConfigured
from django.views.generic import ListView
class ListFilteredMixin(object):
"""
Mixin that adds support for django-filter
"""
filter_set = None
def get_filter_set(self):
if self.filter_set:
return self.filter_set
else:
raise ImproperlyConfigured(
"ListFilterMixin requires either a definition of "
"'filter_set' or an implementation of 'get_filter()'")
def get_base_queryset(self):
"""
We can decided to either alter the queryset before or after applying the
FilterSet
"""
return super(ListFilteredMixin, self).get_queryset()
def get_constructed_filter(self):
# We need to store the instantiated FilterSet cause we use it in
# get_queryset and in get_context_data
if getattr(self, 'constructed_filter', None):
return self.constructed_filter
else:
f = self.get_filter_set()(self.request.GET,
queryset=self.get_base_queryset())
self.constructed_filter = f
return f
def get_queryset(self):
return self.get_constructed_filter().qs
def get_context_data(self, **kwargs):
kwargs.update({'filter': self.get_constructed_filter()})
return super(ListFilteredMixin, self).get_context_data(**kwargs)
class ListFilteredView(ListFilteredMixin, ListView):
"""
A list view that can be filtered by django-filter
"""
| from django.core.exceptions import ImproperlyConfigured
from django.views.generic import ListView
class ListFilteredMixin(object):
"""
Mixin that adds support for django-filter
"""
filter_set = None
def get_filter_set(self):
if self.filter_set:
return self.filter_set
else:
raise ImproperlyConfigured(
"ListFilterMixin requires either a definition of "
"'filter_set' or an implementation of 'get_filter()'")
def get_filter_set_kwargs(self):
"""
Returns the keyword arguments for instanciating the filterset.
"""
return {
'data': self.request.GET,
'queryset': self.get_base_queryset(),
}
def get_base_queryset(self):
"""
We can decided to either alter the queryset before or after applying the
FilterSet
"""
return super(ListFilteredMixin, self).get_queryset()
def get_constructed_filter(self):
# We need to store the instantiated FilterSet cause we use it in
# get_queryset and in get_context_data
if getattr(self, 'constructed_filter', None):
return self.constructed_filter
else:
f = self.get_filter_set()(**self.get_filter_set_kwargs())
self.constructed_filter = f
return f
def get_queryset(self):
return self.get_constructed_filter().qs
def get_context_data(self, **kwargs):
kwargs.update({'filter': self.get_constructed_filter()})
return super(ListFilteredMixin, self).get_context_data(**kwargs)
class ListFilteredView(ListFilteredMixin, ListView):
"""
A list view that can be filtered by django-filter
"""
| Add get_filter_set_kwargs for instanciating FilterSet with additional arguments | Add get_filter_set_kwargs for instanciating FilterSet with additional arguments | Python | bsd-3-clause | rasca/django-enhanced-cbv,matuu/django-enhanced-cbv,matuu/django-enhanced-cbv,rasca/django-enhanced-cbv | from django.core.exceptions import ImproperlyConfigured
from django.views.generic import ListView
class ListFilteredMixin(object):
"""
Mixin that adds support for django-filter
"""
filter_set = None
def get_filter_set(self):
if self.filter_set:
return self.filter_set
else:
raise ImproperlyConfigured(
"ListFilterMixin requires either a definition of "
"'filter_set' or an implementation of 'get_filter()'")
+ def get_filter_set_kwargs(self):
+ """
+ Returns the keyword arguments for instanciating the filterset.
+ """
+ return {
+ 'data': self.request.GET,
+ 'queryset': self.get_base_queryset(),
+ }
+
def get_base_queryset(self):
"""
We can decided to either alter the queryset before or after applying the
FilterSet
"""
return super(ListFilteredMixin, self).get_queryset()
def get_constructed_filter(self):
# We need to store the instantiated FilterSet cause we use it in
# get_queryset and in get_context_data
if getattr(self, 'constructed_filter', None):
return self.constructed_filter
else:
+ f = self.get_filter_set()(**self.get_filter_set_kwargs())
- f = self.get_filter_set()(self.request.GET,
- queryset=self.get_base_queryset())
self.constructed_filter = f
return f
def get_queryset(self):
return self.get_constructed_filter().qs
def get_context_data(self, **kwargs):
kwargs.update({'filter': self.get_constructed_filter()})
return super(ListFilteredMixin, self).get_context_data(**kwargs)
class ListFilteredView(ListFilteredMixin, ListView):
"""
A list view that can be filtered by django-filter
"""
| Add get_filter_set_kwargs for instanciating FilterSet with additional arguments | ## Code Before:
from django.core.exceptions import ImproperlyConfigured
from django.views.generic import ListView
class ListFilteredMixin(object):
"""
Mixin that adds support for django-filter
"""
filter_set = None
def get_filter_set(self):
if self.filter_set:
return self.filter_set
else:
raise ImproperlyConfigured(
"ListFilterMixin requires either a definition of "
"'filter_set' or an implementation of 'get_filter()'")
def get_base_queryset(self):
"""
We can decided to either alter the queryset before or after applying the
FilterSet
"""
return super(ListFilteredMixin, self).get_queryset()
def get_constructed_filter(self):
# We need to store the instantiated FilterSet cause we use it in
# get_queryset and in get_context_data
if getattr(self, 'constructed_filter', None):
return self.constructed_filter
else:
f = self.get_filter_set()(self.request.GET,
queryset=self.get_base_queryset())
self.constructed_filter = f
return f
def get_queryset(self):
return self.get_constructed_filter().qs
def get_context_data(self, **kwargs):
kwargs.update({'filter': self.get_constructed_filter()})
return super(ListFilteredMixin, self).get_context_data(**kwargs)
class ListFilteredView(ListFilteredMixin, ListView):
"""
A list view that can be filtered by django-filter
"""
## Instruction:
Add get_filter_set_kwargs for instanciating FilterSet with additional arguments
## Code After:
from django.core.exceptions import ImproperlyConfigured
from django.views.generic import ListView
class ListFilteredMixin(object):
"""
Mixin that adds support for django-filter
"""
filter_set = None
def get_filter_set(self):
if self.filter_set:
return self.filter_set
else:
raise ImproperlyConfigured(
"ListFilterMixin requires either a definition of "
"'filter_set' or an implementation of 'get_filter()'")
def get_filter_set_kwargs(self):
"""
Returns the keyword arguments for instanciating the filterset.
"""
return {
'data': self.request.GET,
'queryset': self.get_base_queryset(),
}
def get_base_queryset(self):
"""
We can decided to either alter the queryset before or after applying the
FilterSet
"""
return super(ListFilteredMixin, self).get_queryset()
def get_constructed_filter(self):
# We need to store the instantiated FilterSet cause we use it in
# get_queryset and in get_context_data
if getattr(self, 'constructed_filter', None):
return self.constructed_filter
else:
f = self.get_filter_set()(**self.get_filter_set_kwargs())
self.constructed_filter = f
return f
def get_queryset(self):
return self.get_constructed_filter().qs
def get_context_data(self, **kwargs):
kwargs.update({'filter': self.get_constructed_filter()})
return super(ListFilteredMixin, self).get_context_data(**kwargs)
class ListFilteredView(ListFilteredMixin, ListView):
"""
A list view that can be filtered by django-filter
"""
| ...
def get_filter_set_kwargs(self):
"""
Returns the keyword arguments for instanciating the filterset.
"""
return {
'data': self.request.GET,
'queryset': self.get_base_queryset(),
}
def get_base_queryset(self):
...
else:
f = self.get_filter_set()(**self.get_filter_set_kwargs())
self.constructed_filter = f
... |
eb2b91d30244fd44b45ffc21b963256150b59152 | frappe/patches/v11_0/reload_and_rename_view_log.py | frappe/patches/v11_0/reload_and_rename_view_log.py | import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
frappe.reload_doc('core', 'doctype', 'view_log', force=True)
frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`")
frappe.delete_doc('DocType', 'View log')
frappe.reload_doc('core', 'doctype', 'view_log', force=True)
else:
frappe.reload_doc('core', 'doctype', 'view_log')
| import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
# for mac users direct renaming would not work since mysql for mac saves table name in lower case
# so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589 ,
# https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
# here we are creating a temp table to store view log data
frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`")
# deleting old View log table
frappe.db.sql("DROP table `tabView log`")
frappe.delete_doc('DocType', 'View log')
# reloading view log doctype to create `tabView Log` table
frappe.reload_doc('core', 'doctype', 'view_log')
frappe.db.commit()
# Move the data to newly created `tabView Log` table
frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`")
# Delete temporary table
frappe.db.sql("DROP table `ViewLogTemp`")
else:
frappe.reload_doc('core', 'doctype', 'view_log')
| Fix rename view log patch for mac users | Fix rename view log patch for mac users
for mac users direct renaming would not work
since mysql for mac saves table name in lower case,
so while renaming `tabView log` to `tabView Log` we get
"Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589
https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
| Python | mit | mhbu50/frappe,yashodhank/frappe,vjFaLk/frappe,adityahase/frappe,mhbu50/frappe,almeidapaulopt/frappe,saurabh6790/frappe,adityahase/frappe,vjFaLk/frappe,yashodhank/frappe,saurabh6790/frappe,frappe/frappe,frappe/frappe,vjFaLk/frappe,yashodhank/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,StrellaGroup/frappe,yashodhank/frappe,vjFaLk/frappe,mhbu50/frappe,adityahase/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,saurabh6790/frappe,frappe/frappe,saurabh6790/frappe,adityahase/frappe | import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
- frappe.reload_doc('core', 'doctype', 'view_log', force=True)
- frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`")
+ # for mac users direct renaming would not work since mysql for mac saves table name in lower case
+ # so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error
+ # more info https://stackoverflow.com/a/44753093/5955589 ,
+ # https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
+
+ # here we are creating a temp table to store view log data
+ frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`")
+
+ # deleting old View log table
+ frappe.db.sql("DROP table `tabView log`")
frappe.delete_doc('DocType', 'View log')
+
+ # reloading view log doctype to create `tabView Log` table
- frappe.reload_doc('core', 'doctype', 'view_log', force=True)
+ frappe.reload_doc('core', 'doctype', 'view_log')
+ frappe.db.commit()
+
+ # Move the data to newly created `tabView Log` table
+ frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`")
+
+ # Delete temporary table
+ frappe.db.sql("DROP table `ViewLogTemp`")
else:
frappe.reload_doc('core', 'doctype', 'view_log')
| Fix rename view log patch for mac users | ## Code Before:
import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
frappe.reload_doc('core', 'doctype', 'view_log', force=True)
frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`")
frappe.delete_doc('DocType', 'View log')
frappe.reload_doc('core', 'doctype', 'view_log', force=True)
else:
frappe.reload_doc('core', 'doctype', 'view_log')
## Instruction:
Fix rename view log patch for mac users
## Code After:
import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
# for mac users direct renaming would not work since mysql for mac saves table name in lower case
# so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589 ,
# https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
# here we are creating a temp table to store view log data
frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`")
# deleting old View log table
frappe.db.sql("DROP table `tabView log`")
frappe.delete_doc('DocType', 'View log')
# reloading view log doctype to create `tabView Log` table
frappe.reload_doc('core', 'doctype', 'view_log')
frappe.db.commit()
# Move the data to newly created `tabView Log` table
frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`")
# Delete temporary table
frappe.db.sql("DROP table `ViewLogTemp`")
else:
frappe.reload_doc('core', 'doctype', 'view_log')
| # ... existing code ...
if frappe.db.exists('DocType', 'View log'):
# for mac users direct renaming would not work since mysql for mac saves table name in lower case
# so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589 ,
# https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
# here we are creating a temp table to store view log data
frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`")
# deleting old View log table
frappe.db.sql("DROP table `tabView log`")
frappe.delete_doc('DocType', 'View log')
# reloading view log doctype to create `tabView Log` table
frappe.reload_doc('core', 'doctype', 'view_log')
frappe.db.commit()
# Move the data to newly created `tabView Log` table
frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`")
# Delete temporary table
frappe.db.sql("DROP table `ViewLogTemp`")
else:
# ... rest of the code ... |
16d0f3f0ca4ce59f08e598b6f9f25bb6dc8e1713 | benchmark/benchmark.py | benchmark/benchmark.py | import time
import sys
from utils import format_duration
if sys.platform == "win32":
default_timer = time.clock
else:
default_timer = time.time
class Benchmark():
def __init__(self, func, name="", repeat=5):
self.func = func
self.repeat = repeat
self.name = name
self.verbose = False
def run(self, conn):
self.results = []
for x in range(self.repeat):
start = default_timer()
self.func()
end = default_timer()
elapsed = end - start
self.results.append(elapsed)
conn.rollback()
return min(self.results)
def __str__(self):
s = format_duration(min(self.results))
if self.verbose:
s_min = format_duration(min(self.results))
s_avg = format_duration(sum(self.results) / len(self.results))
s_max = format_duration(max(self.results))
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
return " ".join(s)
| import time
import sys
from utils import format_duration
if sys.platform == "win32":
default_timer = time.clock
else:
default_timer = time.time
class Benchmark():
def __init__(self, func, name="", repeat=5):
self.func = func
self.repeat = repeat
self.name = name
self.verbose = False
def run(self, conn):
self.results = []
for x in range(self.repeat):
start = default_timer()
self.func()
end = default_timer()
elapsed = end - start
self.results.append(elapsed)
conn.rollback()
return min(self.results)
def __str__(self):
s = format_duration(min(self.results))
if self.verbose:
s_min = format_duration(min(self.results))
s_avg = format_duration(sum(self.results) / len(self.results))
s_max = format_duration(max(self.results))
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
return s
| Fix bad console output formatting | Fix bad console output formatting
| Python | mit | jameshy/libtree,conceptsandtraining/libtree | import time
import sys
from utils import format_duration
if sys.platform == "win32":
default_timer = time.clock
else:
default_timer = time.time
class Benchmark():
def __init__(self, func, name="", repeat=5):
self.func = func
self.repeat = repeat
self.name = name
self.verbose = False
def run(self, conn):
self.results = []
for x in range(self.repeat):
start = default_timer()
self.func()
end = default_timer()
elapsed = end - start
self.results.append(elapsed)
conn.rollback()
return min(self.results)
def __str__(self):
s = format_duration(min(self.results))
if self.verbose:
s_min = format_duration(min(self.results))
s_avg = format_duration(sum(self.results) / len(self.results))
s_max = format_duration(max(self.results))
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
- return " ".join(s)
+ return s
| Fix bad console output formatting | ## Code Before:
import time
import sys
from utils import format_duration
if sys.platform == "win32":
default_timer = time.clock
else:
default_timer = time.time
class Benchmark():
def __init__(self, func, name="", repeat=5):
self.func = func
self.repeat = repeat
self.name = name
self.verbose = False
def run(self, conn):
self.results = []
for x in range(self.repeat):
start = default_timer()
self.func()
end = default_timer()
elapsed = end - start
self.results.append(elapsed)
conn.rollback()
return min(self.results)
def __str__(self):
s = format_duration(min(self.results))
if self.verbose:
s_min = format_duration(min(self.results))
s_avg = format_duration(sum(self.results) / len(self.results))
s_max = format_duration(max(self.results))
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
return " ".join(s)
## Instruction:
Fix bad console output formatting
## Code After:
import time
import sys
from utils import format_duration
if sys.platform == "win32":
default_timer = time.clock
else:
default_timer = time.time
class Benchmark():
def __init__(self, func, name="", repeat=5):
self.func = func
self.repeat = repeat
self.name = name
self.verbose = False
def run(self, conn):
self.results = []
for x in range(self.repeat):
start = default_timer()
self.func()
end = default_timer()
elapsed = end - start
self.results.append(elapsed)
conn.rollback()
return min(self.results)
def __str__(self):
s = format_duration(min(self.results))
if self.verbose:
s_min = format_duration(min(self.results))
s_avg = format_duration(sum(self.results) / len(self.results))
s_max = format_duration(max(self.results))
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
return s
| // ... existing code ...
s_avg, s_max, s_all)
return s
// ... rest of the code ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.