commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
0ea1153438c1d98232a921c8d14d401a541e95fd
examples/regex/regex_parser.py
examples/regex/regex_parser.py
from __future__ import print_function, division, absolute_import, unicode_literals from parser_base import RegexParser import model class RegexSemantics(object): def __init__(self): super(RegexSemantics, self).__init__() self._count = 0 def START(self, ast): return model.Regex(ast) def CHOICE(self, ast): return model.Choice(ast.opts) def SEQUENCE(self, ast): if not ast.terms: return model.Empty() elif len(ast.terms) < 2: return ast.terms[0] else: return model.Sequence(ast.terms) def CLOSURE(self, ast): return model.Closure(ast) def SUBEXP(self, ast): return ast def LITERAL(self, ast): return model.Literal(ast) def translate(regex, trace=False): parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() return model.render()
from __future__ import print_function, division, absolute_import, unicode_literals from parser_base import RegexParser import model class RegexSemantics(object): def __init__(self): super(RegexSemantics, self).__init__() self._count = 0 def START(self, ast): return model.Regex(ast) def CHOICE(self, ast): return model.Choice(ast.opts) def SEQUENCE(self, ast): if not ast.terms: return model.Empty() elif len(ast.terms) < 2: return ast.terms[0] else: return model.Sequence(ast.terms) def CLOSURE(self, ast): return model.Closure(ast) def SUBEXP(self, ast): return ast def LITERAL(self, ast): return model.Literal(ast) def translate(regex, trace=False): parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() return model.render().encode("ascii")
Fix regex example, the model must not be a unicode string.
Fix regex example, the model must not be a unicode string.
Python
bsd-2-clause
vmuriart/grako,frnknglrt/grako
from __future__ import print_function, division, absolute_import, unicode_literals from parser_base import RegexParser import model class RegexSemantics(object): def __init__(self): super(RegexSemantics, self).__init__() self._count = 0 def START(self, ast): return model.Regex(ast) def CHOICE(self, ast): return model.Choice(ast.opts) def SEQUENCE(self, ast): if not ast.terms: return model.Empty() elif len(ast.terms) < 2: return ast.terms[0] else: return model.Sequence(ast.terms) def CLOSURE(self, ast): return model.Closure(ast) def SUBEXP(self, ast): return ast def LITERAL(self, ast): return model.Literal(ast) def translate(regex, trace=False): parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() - return model.render() + return model.render().encode("ascii")
Fix regex example, the model must not be a unicode string.
## Code Before: from __future__ import print_function, division, absolute_import, unicode_literals from parser_base import RegexParser import model class RegexSemantics(object): def __init__(self): super(RegexSemantics, self).__init__() self._count = 0 def START(self, ast): return model.Regex(ast) def CHOICE(self, ast): return model.Choice(ast.opts) def SEQUENCE(self, ast): if not ast.terms: return model.Empty() elif len(ast.terms) < 2: return ast.terms[0] else: return model.Sequence(ast.terms) def CLOSURE(self, ast): return model.Closure(ast) def SUBEXP(self, ast): return ast def LITERAL(self, ast): return model.Literal(ast) def translate(regex, trace=False): parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() return model.render() ## Instruction: Fix regex example, the model must not be a unicode string. ## Code After: from __future__ import print_function, division, absolute_import, unicode_literals from parser_base import RegexParser import model class RegexSemantics(object): def __init__(self): super(RegexSemantics, self).__init__() self._count = 0 def START(self, ast): return model.Regex(ast) def CHOICE(self, ast): return model.Choice(ast.opts) def SEQUENCE(self, ast): if not ast.terms: return model.Empty() elif len(ast.terms) < 2: return ast.terms[0] else: return model.Sequence(ast.terms) def CLOSURE(self, ast): return model.Closure(ast) def SUBEXP(self, ast): return ast def LITERAL(self, ast): return model.Literal(ast) def translate(regex, trace=False): parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() return model.render().encode("ascii")
... model.set_rule_numbers() return model.render().encode("ascii") ...
cc3a0f230c2f64fd2e4d974c536e9d2e99d89992
tilezilla/errors.py
tilezilla/errors.py
class ConfigException(Exception): pass class FillValueException(Exception): """ All of a tile is "fill" values """ pass
class ConfigException(Exception): pass class FillValueException(Exception): """ All of a tile is "fill" values """ pass class ProductNotFoundException(Exception): pass
Add exception for empty db search return
Add exception for empty db search return
Python
bsd-3-clause
ceholden/tilezilla,ceholden/landsat_tile,ceholden/landsat_tiles,ceholden/landsat_tile,ceholden/landsat_tiles
class ConfigException(Exception): pass class FillValueException(Exception): """ All of a tile is "fill" values """ pass + + class ProductNotFoundException(Exception): + pass +
Add exception for empty db search return
## Code Before: class ConfigException(Exception): pass class FillValueException(Exception): """ All of a tile is "fill" values """ pass ## Instruction: Add exception for empty db search return ## Code After: class ConfigException(Exception): pass class FillValueException(Exception): """ All of a tile is "fill" values """ pass class ProductNotFoundException(Exception): pass
// ... existing code ... pass class ProductNotFoundException(Exception): pass // ... rest of the code ...
19af4b5c8c849750dd0885ea4fcfb651545b7985
migrations/002_add_month_start.py
migrations/002_add_month_start.py
from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') record = Record(document) collection.save(record.to_mongo())
from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') if '_updated_at' in document: document.pop('_updated_at') record = Record(document) collection.save(record.to_mongo())
Remove disallowed fields before resaving on migrations.
Remove disallowed fields before resaving on migrations. - TODO: fix this properly.
Python
mit
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') + if '_updated_at' in document: + document.pop('_updated_at') record = Record(document) collection.save(record.to_mongo())
Remove disallowed fields before resaving on migrations.
## Code Before: from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') record = Record(document) collection.save(record.to_mongo()) ## Instruction: Remove disallowed fields before resaving on migrations. ## Code After: from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') if '_updated_at' in document: document.pop('_updated_at') record = Record(document) collection.save(record.to_mongo())
... document.pop('_week_start_at') if '_updated_at' in document: document.pop('_updated_at') record = Record(document) ...
35bb090dd926d4327fa046ee2da64c4cb5b38a47
app/notify_client/email_branding_client.py
app/notify_client/email_branding_client.py
from app.notify_client import NotifyAdminAPIClient, cache class EmailBrandingClient(NotifyAdminAPIClient): @cache.set("email_branding-{branding_id}") def get_email_branding(self, branding_id): return self.get(url="/email-branding/{}".format(branding_id)) @cache.set("email_branding") def get_all_email_branding(self, sort_key=None): brandings = self.get(url="/email-branding")["email_branding"] if sort_key and sort_key in brandings[0]: brandings.sort(key=lambda branding: branding[sort_key].lower()) return brandings @cache.delete("email_branding") def create_email_branding(self, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding", data=data) @cache.delete("email_branding") @cache.delete("email_branding-{branding_id}") def update_email_branding(self, branding_id, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding/{}".format(branding_id), data=data) email_branding_client = EmailBrandingClient()
from app.notify_client import NotifyAdminAPIClient, cache class EmailBrandingClient(NotifyAdminAPIClient): @cache.set("email_branding-{branding_id}") def get_email_branding(self, branding_id): return self.get(url="/email-branding/{}".format(branding_id)) @cache.set("email_branding") def get_all_email_branding(self): return self.get(url="/email-branding")["email_branding"] @cache.delete("email_branding") def create_email_branding(self, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding", data=data) @cache.delete("email_branding") @cache.delete("email_branding-{branding_id}") def update_email_branding(self, branding_id, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding/{}".format(branding_id), data=data) email_branding_client = EmailBrandingClient()
Remove old way of sorting
Remove old way of sorting This is redundant since the model layer has built-in sorting. It’s also not a good separation of concerns for something presentational (sort order) to be in the API client layer.
Python
mit
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
from app.notify_client import NotifyAdminAPIClient, cache class EmailBrandingClient(NotifyAdminAPIClient): @cache.set("email_branding-{branding_id}") def get_email_branding(self, branding_id): return self.get(url="/email-branding/{}".format(branding_id)) @cache.set("email_branding") - def get_all_email_branding(self, sort_key=None): + def get_all_email_branding(self): - brandings = self.get(url="/email-branding")["email_branding"] + return self.get(url="/email-branding")["email_branding"] - if sort_key and sort_key in brandings[0]: - brandings.sort(key=lambda branding: branding[sort_key].lower()) - return brandings @cache.delete("email_branding") def create_email_branding(self, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding", data=data) @cache.delete("email_branding") @cache.delete("email_branding-{branding_id}") def update_email_branding(self, branding_id, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding/{}".format(branding_id), data=data) email_branding_client = EmailBrandingClient()
Remove old way of sorting
## Code Before: from app.notify_client import NotifyAdminAPIClient, cache class EmailBrandingClient(NotifyAdminAPIClient): @cache.set("email_branding-{branding_id}") def get_email_branding(self, branding_id): return self.get(url="/email-branding/{}".format(branding_id)) @cache.set("email_branding") def get_all_email_branding(self, sort_key=None): brandings = self.get(url="/email-branding")["email_branding"] if sort_key and sort_key in brandings[0]: brandings.sort(key=lambda branding: branding[sort_key].lower()) return brandings @cache.delete("email_branding") def create_email_branding(self, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding", data=data) @cache.delete("email_branding") @cache.delete("email_branding-{branding_id}") def update_email_branding(self, branding_id, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding/{}".format(branding_id), data=data) email_branding_client = EmailBrandingClient() ## Instruction: Remove old way of sorting ## Code After: from app.notify_client import NotifyAdminAPIClient, cache class EmailBrandingClient(NotifyAdminAPIClient): @cache.set("email_branding-{branding_id}") def get_email_branding(self, branding_id): return self.get(url="/email-branding/{}".format(branding_id)) @cache.set("email_branding") def get_all_email_branding(self): return self.get(url="/email-branding")["email_branding"] @cache.delete("email_branding") def create_email_branding(self, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding", data=data) @cache.delete("email_branding") @cache.delete("email_branding-{branding_id}") def update_email_branding(self, branding_id, logo, name, text, colour, brand_type): data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type} return self.post(url="/email-branding/{}".format(branding_id), data=data) email_branding_client = EmailBrandingClient()
# ... existing code ... @cache.set("email_branding") def get_all_email_branding(self): return self.get(url="/email-branding")["email_branding"] # ... rest of the code ...
ee4d08b4795ed0818a48d97f5635c7ec2ba163fb
shopify_auth/backends.py
shopify_auth/backends.py
from django.contrib.auth.backends import RemoteUserBackend class ShopUserBackend(RemoteUserBackend): def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs): if not myshopify_domain or not token or not request: return user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain) if not user: return user.token = token user.save(update_fields=['token']) return user
from django.contrib.auth.backends import RemoteUserBackend class ShopUserBackend(RemoteUserBackend): def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs): if not myshopify_domain or not token or not request: return try: user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain) except TypeError: # Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain) if not user: return user.token = token user.save(update_fields=['token']) return user
Add regression fix for Django < 1.11
Add regression fix for Django < 1.11
Python
mit
discolabs/django-shopify-auth,discolabs/django-shopify-auth
from django.contrib.auth.backends import RemoteUserBackend class ShopUserBackend(RemoteUserBackend): - def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs): if not myshopify_domain or not token or not request: return + try: - user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain) + user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain) + except TypeError: + # Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend + user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain) + if not user: return user.token = token user.save(update_fields=['token']) return user
Add regression fix for Django < 1.11
## Code Before: from django.contrib.auth.backends import RemoteUserBackend class ShopUserBackend(RemoteUserBackend): def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs): if not myshopify_domain or not token or not request: return user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain) if not user: return user.token = token user.save(update_fields=['token']) return user ## Instruction: Add regression fix for Django < 1.11 ## Code After: from django.contrib.auth.backends import RemoteUserBackend class ShopUserBackend(RemoteUserBackend): def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs): if not myshopify_domain or not token or not request: return try: user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain) except TypeError: # Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain) if not user: return user.token = token user.save(update_fields=['token']) return user
# ... existing code ... class ShopUserBackend(RemoteUserBackend): def authenticate(self, request=None, myshopify_domain=None, token=None, **kwargs): # ... modified code ... try: user = super(ShopUserBackend, self).authenticate(request=request, remote_user=myshopify_domain) except TypeError: # Django < 1.11 does not have request as a mandatory parameter for RemoteUserBackend user = super(ShopUserBackend, self).authenticate(remote_user=myshopify_domain) if not user: # ... rest of the code ...
cc06a15f734a6ed46561a99d1040a08582833a09
src/puzzle/heuristics/acrostic.py
src/puzzle/heuristics/acrostic.py
from puzzle.heuristics.acrostics import _acrostic_iter class Acrostic(_acrostic_iter.AcrosticIter): """Best available Acrostic solver.""" pass
from puzzle.heuristics.acrostics import _acrostic_search class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).
Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).
Python
mit
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
- from puzzle.heuristics.acrostics import _acrostic_iter + from puzzle.heuristics.acrostics import _acrostic_search - class Acrostic(_acrostic_iter.AcrosticIter): + class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).
## Code Before: from puzzle.heuristics.acrostics import _acrostic_iter class Acrostic(_acrostic_iter.AcrosticIter): """Best available Acrostic solver.""" pass ## Instruction: Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS). ## Code After: from puzzle.heuristics.acrostics import _acrostic_search class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
# ... existing code ... from puzzle.heuristics.acrostics import _acrostic_search # ... modified code ... class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" # ... rest of the code ...
c383e06d51d4e59d400ab6fd62eff2359ab4e728
python/the_birthday_bar.py
python/the_birthday_bar.py
import itertools import collections def sliding_window(n, seq): """ Copied from toolz https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window A sequence of overlapping subsequences >>> list(sliding_window(2, [1, 2, 3, 4])) [(1, 2), (2, 3), (3, 4)] This function creates a sliding window suitable for transformations like sliding means / smoothing >>> mean = lambda seq: float(sum(seq)) / len(seq) >>> list(map(mean, sliding_window(2, [1, 2, 3, 4]))) [1.5, 2.5, 3.5] """ return zip(*(collections.deque(itertools.islice(it, i), 0) or it for i, it in enumerate(itertools.tee(seq, n)))) def birthday_chocolate(squares, day, month): birthday_chocolates = 0 for piece in sliding_window(month, squares): if sum(piece) == day: birthday_chocolates += 1 return birthday_chocolates _ = int(input().strip()) SQUARES = list(map(int, input().strip().split(' '))) DAY, MONTH = map(int, input().strip().split(' ')) print(birthday_chocolate(SQUARES, DAY, MONTH))
import itertools import collections def sliding_window(n, seq): """ Copied from toolz https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window A sequence of overlapping subsequences >>> list(sliding_window(2, [1, 2, 3, 4])) [(1, 2), (2, 3), (3, 4)] This function creates a sliding window suitable for transformations like sliding means / smoothing >>> mean = lambda seq: float(sum(seq)) / len(seq) >>> list(map(mean, sliding_window(2, [1, 2, 3, 4]))) [1.5, 2.5, 3.5] """ return zip(*(collections.deque(itertools.islice(it, i), 0) or it for i, it in enumerate(itertools.tee(seq, n)))) def birthday_chocolate(squares, day, month): consecutive_sums = map(lambda piece: sum(piece), sliding_window(month, squares)) birthday_bars = list(filter(lambda consecutive_sum: day == consecutive_sum, consecutive_sums)) return len(birthday_bars) _ = int(input().strip()) SQUARES = list(map(int, input().strip().split(' '))) DAY, MONTH = map(int, input().strip().split(' ')) print(birthday_chocolate(SQUARES, DAY, MONTH))
Refactor to use map and filter
Refactor to use map and filter
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
import itertools import collections def sliding_window(n, seq): """ Copied from toolz https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window A sequence of overlapping subsequences >>> list(sliding_window(2, [1, 2, 3, 4])) [(1, 2), (2, 3), (3, 4)] This function creates a sliding window suitable for transformations like sliding means / smoothing >>> mean = lambda seq: float(sum(seq)) / len(seq) >>> list(map(mean, sliding_window(2, [1, 2, 3, 4]))) [1.5, 2.5, 3.5] """ return zip(*(collections.deque(itertools.islice(it, i), 0) or it for i, it in enumerate(itertools.tee(seq, n)))) def birthday_chocolate(squares, day, month): + consecutive_sums = map(lambda piece: sum(piece), sliding_window(month, squares)) + birthday_bars = list(filter(lambda consecutive_sum: day == consecutive_sum, + consecutive_sums)) + return len(birthday_bars) - birthday_chocolates = 0 - for piece in sliding_window(month, squares): - if sum(piece) == day: - birthday_chocolates += 1 - return birthday_chocolates _ = int(input().strip()) SQUARES = list(map(int, input().strip().split(' '))) DAY, MONTH = map(int, input().strip().split(' ')) print(birthday_chocolate(SQUARES, DAY, MONTH))
Refactor to use map and filter
## Code Before: import itertools import collections def sliding_window(n, seq): """ Copied from toolz https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window A sequence of overlapping subsequences >>> list(sliding_window(2, [1, 2, 3, 4])) [(1, 2), (2, 3), (3, 4)] This function creates a sliding window suitable for transformations like sliding means / smoothing >>> mean = lambda seq: float(sum(seq)) / len(seq) >>> list(map(mean, sliding_window(2, [1, 2, 3, 4]))) [1.5, 2.5, 3.5] """ return zip(*(collections.deque(itertools.islice(it, i), 0) or it for i, it in enumerate(itertools.tee(seq, n)))) def birthday_chocolate(squares, day, month): birthday_chocolates = 0 for piece in sliding_window(month, squares): if sum(piece) == day: birthday_chocolates += 1 return birthday_chocolates _ = int(input().strip()) SQUARES = list(map(int, input().strip().split(' '))) DAY, MONTH = map(int, input().strip().split(' ')) print(birthday_chocolate(SQUARES, DAY, MONTH)) ## Instruction: Refactor to use map and filter ## Code After: import itertools import collections def sliding_window(n, seq): """ Copied from toolz https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window A sequence of overlapping subsequences >>> list(sliding_window(2, [1, 2, 3, 4])) [(1, 2), (2, 3), (3, 4)] This function creates a sliding window suitable for transformations like sliding means / smoothing >>> mean = lambda seq: float(sum(seq)) / len(seq) >>> list(map(mean, sliding_window(2, [1, 2, 3, 4]))) [1.5, 2.5, 3.5] """ return zip(*(collections.deque(itertools.islice(it, i), 0) or it for i, it in enumerate(itertools.tee(seq, n)))) def birthday_chocolate(squares, day, month): consecutive_sums = map(lambda piece: sum(piece), sliding_window(month, squares)) birthday_bars = list(filter(lambda consecutive_sum: day == consecutive_sum, consecutive_sums)) return len(birthday_bars) _ = int(input().strip()) SQUARES = list(map(int, input().strip().split(' '))) DAY, MONTH = map(int, input().strip().split(' ')) print(birthday_chocolate(SQUARES, DAY, MONTH))
// ... existing code ... def birthday_chocolate(squares, day, month): consecutive_sums = map(lambda piece: sum(piece), sliding_window(month, squares)) birthday_bars = list(filter(lambda consecutive_sum: day == consecutive_sum, consecutive_sums)) return len(birthday_bars) // ... rest of the code ...
fca148d85b0deb16c988473ddab651529653e9de
cheroot/__init__.py
cheroot/__init__.py
"""High-performance, pure-Python HTTP server used by CherryPy.""" from __future__ import absolute_import, division, print_function __metaclass__ = type try: import pkg_resources except ImportError: pass try: __version__ = pkg_resources.get_distribution('cheroot').version except Exception: __version__ = 'unknown'
"""High-performance, pure-Python HTTP server used by CherryPy.""" try: import pkg_resources except ImportError: pass try: __version__ = pkg_resources.get_distribution('cheroot').version except Exception: __version__ = 'unknown'
Remove compatibility code from cheroot
Remove compatibility code from cheroot
Python
bsd-3-clause
cherrypy/cheroot
"""High-performance, pure-Python HTTP server used by CherryPy.""" - - from __future__ import absolute_import, division, print_function - __metaclass__ = type try: import pkg_resources except ImportError: pass try: __version__ = pkg_resources.get_distribution('cheroot').version except Exception: __version__ = 'unknown'
Remove compatibility code from cheroot
## Code Before: """High-performance, pure-Python HTTP server used by CherryPy.""" from __future__ import absolute_import, division, print_function __metaclass__ = type try: import pkg_resources except ImportError: pass try: __version__ = pkg_resources.get_distribution('cheroot').version except Exception: __version__ = 'unknown' ## Instruction: Remove compatibility code from cheroot ## Code After: """High-performance, pure-Python HTTP server used by CherryPy.""" try: import pkg_resources except ImportError: pass try: __version__ = pkg_resources.get_distribution('cheroot').version except Exception: __version__ = 'unknown'
... """High-performance, pure-Python HTTP server used by CherryPy.""" ...
4f72617702881bde979648a8ddf240b0d721cf4e
girder/app/app/__init__.py
girder/app/app/__init__.py
from .configuration import Configuration from girder.utility import setting_utilities from .constants import Features from girder.plugin import GirderPlugin @setting_utilities.validator({ Features.NOTEBOOKS }) class AppPlugin(GirderPlugin): DISPLAY_NAME = 'OpenChemistry App' def validateSettings(self, event): pass def load(self, info): info['apiRoot'].configuration = Configuration()
from .configuration import Configuration from girder.utility import setting_utilities from .constants import Features from girder.plugin import GirderPlugin @setting_utilities.validator({ Features.NOTEBOOKS }) def validateSettings(event): pass class AppPlugin(GirderPlugin): DISPLAY_NAME = 'OpenChemistry App' def load(self, info): info['apiRoot'].configuration = Configuration()
Put validateSettings() after girder decorator
Put validateSettings() after girder decorator This was mistakenly put inside the class body. It needs to be after the girder decorator instead. Signed-off-by: Patrick Avery <[email protected]>
Python
bsd-3-clause
OpenChemistry/mongochemserver
from .configuration import Configuration from girder.utility import setting_utilities from .constants import Features from girder.plugin import GirderPlugin @setting_utilities.validator({ Features.NOTEBOOKS }) + def validateSettings(event): + pass class AppPlugin(GirderPlugin): DISPLAY_NAME = 'OpenChemistry App' - def validateSettings(self, event): - pass - def load(self, info): info['apiRoot'].configuration = Configuration()
Put validateSettings() after girder decorator
## Code Before: from .configuration import Configuration from girder.utility import setting_utilities from .constants import Features from girder.plugin import GirderPlugin @setting_utilities.validator({ Features.NOTEBOOKS }) class AppPlugin(GirderPlugin): DISPLAY_NAME = 'OpenChemistry App' def validateSettings(self, event): pass def load(self, info): info['apiRoot'].configuration = Configuration() ## Instruction: Put validateSettings() after girder decorator ## Code After: from .configuration import Configuration from girder.utility import setting_utilities from .constants import Features from girder.plugin import GirderPlugin @setting_utilities.validator({ Features.NOTEBOOKS }) def validateSettings(event): pass class AppPlugin(GirderPlugin): DISPLAY_NAME = 'OpenChemistry App' def load(self, info): info['apiRoot'].configuration = Configuration()
... }) def validateSettings(event): pass ... def load(self, info): ...
075d6f1b8f232c1ae7cb7d288da8f8d1040f49c9
hooks/pre_gen_project.py
hooks/pre_gen_project.py
repo_name = '{{ cookiecutter.repo_name }}' assert_msg = 'Repo name should be valid Python identifier!' if hasattr(repo_name, 'isidentifier'): assert repo_name.isidentifier(), assert_msg else: import re identifier_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") assert bool(identifier_re.match(repo_name)), assert_msg
import sys import cookiecutter # Ensure cookiecutter is recent enough cookiecutter_min_version = '1.3.0' if cookiecutter.__version__ < cookiecutter_min_version: print("--------------------------------------------------------------") print("!! Your cookiecutter is too old, at least %s is required !!" % cookiecutter_min_version) print("--------------------------------------------------------------") sys.exit(1) # Ensure the selected repo name is usable repo_name = '{{ cookiecutter.repo_name }}' assert_msg = 'Repo name should be valid Python identifier!' if hasattr(repo_name, 'isidentifier'): assert repo_name.isidentifier(), assert_msg else: import re identifier_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") assert bool(identifier_re.match(repo_name)), assert_msg
Add check for cookiecutter version - at least 1.3.0 is required now
Add check for cookiecutter version - at least 1.3.0 is required now
Python
isc
thorgate/django-project-template,thorgate/django-project-template,thorgate/django-project-template,thorgate/django-project-template,thorgate/django-project-template
+ import sys + + import cookiecutter + + + # Ensure cookiecutter is recent enough + cookiecutter_min_version = '1.3.0' + if cookiecutter.__version__ < cookiecutter_min_version: + print("--------------------------------------------------------------") + print("!! Your cookiecutter is too old, at least %s is required !!" % cookiecutter_min_version) + print("--------------------------------------------------------------") + sys.exit(1) + + + # Ensure the selected repo name is usable repo_name = '{{ cookiecutter.repo_name }}' assert_msg = 'Repo name should be valid Python identifier!' if hasattr(repo_name, 'isidentifier'): assert repo_name.isidentifier(), assert_msg else: import re identifier_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") assert bool(identifier_re.match(repo_name)), assert_msg
Add check for cookiecutter version - at least 1.3.0 is required now
## Code Before: repo_name = '{{ cookiecutter.repo_name }}' assert_msg = 'Repo name should be valid Python identifier!' if hasattr(repo_name, 'isidentifier'): assert repo_name.isidentifier(), assert_msg else: import re identifier_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") assert bool(identifier_re.match(repo_name)), assert_msg ## Instruction: Add check for cookiecutter version - at least 1.3.0 is required now ## Code After: import sys import cookiecutter # Ensure cookiecutter is recent enough cookiecutter_min_version = '1.3.0' if cookiecutter.__version__ < cookiecutter_min_version: print("--------------------------------------------------------------") print("!! Your cookiecutter is too old, at least %s is required !!" % cookiecutter_min_version) print("--------------------------------------------------------------") sys.exit(1) # Ensure the selected repo name is usable repo_name = '{{ cookiecutter.repo_name }}' assert_msg = 'Repo name should be valid Python identifier!' if hasattr(repo_name, 'isidentifier'): assert repo_name.isidentifier(), assert_msg else: import re identifier_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") assert bool(identifier_re.match(repo_name)), assert_msg
# ... existing code ... import sys import cookiecutter # Ensure cookiecutter is recent enough cookiecutter_min_version = '1.3.0' if cookiecutter.__version__ < cookiecutter_min_version: print("--------------------------------------------------------------") print("!! Your cookiecutter is too old, at least %s is required !!" % cookiecutter_min_version) print("--------------------------------------------------------------") sys.exit(1) # Ensure the selected repo name is usable repo_name = '{{ cookiecutter.repo_name }}' # ... rest of the code ...
52d9ed9c08ef0686a891e3428349b70d74a7ecf8
scripts/munge_fah_data.py
scripts/munge_fah_data.py
import numpy as np import os import glob import mdtraj as md import fahmunge import pandas as pd projects = pd.read_csv("./projects.csv", index_col=0) output_path = "/data/choderalab/fah/munged/" for (project, location, pdb) in projects.itertuples(): print(project, location, pdb) allatom_output_path = os.path.join(output_path, str(project), "allatoms/") protein_output_path = os.path.join(output_path, str(project), "protein/") fahmunge.automation.make_path(allatom_output_path) fahmunge.automation.make_path(protein_output_path) fahmunge.automation.merge_fah_trajectories(location, allatom_output_path, pdb) trj0 = md.load(pdb) # Hacky temporary solution. top, bonds = trj0.top.to_dataframe() protein_atom_indices = top.index[top.chainID == 0].values fahmunge.automation.strip_water(allatom_output_path, protein_output_path, protein_atom_indices)
import numpy as np import os import glob import mdtraj as md import fahmunge import pandas as pd projects = pd.read_csv("./projects.csv", index_col=0) output_path = "/data/choderalab/fah/munged/" for (project, location, pdb) in projects.itertuples(): print(project, location, pdb) allatom_output_path = os.path.join(output_path, "allatoms/", "%s/" % project) protein_output_path = os.path.join(output_path, "protein/", "%s/" % project) fahmunge.automation.make_path(allatom_output_path) fahmunge.automation.make_path(protein_output_path) fahmunge.automation.merge_fah_trajectories(location, allatom_output_path, pdb) trj0 = md.load(pdb) # Hacky temporary solution. top, bonds = trj0.top.to_dataframe() protein_atom_indices = top.index[top.chainID == 0].values fahmunge.automation.strip_water(allatom_output_path, protein_output_path, protein_atom_indices)
Change output data structure to support faster rsync
Change output data structure to support faster rsync
Python
lgpl-2.1
steven-albanese/FAHMunge,kyleabeauchamp/FAHMunge,choderalab/FAHMunge
import numpy as np import os import glob import mdtraj as md import fahmunge import pandas as pd projects = pd.read_csv("./projects.csv", index_col=0) output_path = "/data/choderalab/fah/munged/" for (project, location, pdb) in projects.itertuples(): print(project, location, pdb) - allatom_output_path = os.path.join(output_path, str(project), "allatoms/") + allatom_output_path = os.path.join(output_path, "allatoms/", "%s/" % project) - protein_output_path = os.path.join(output_path, str(project), "protein/") + protein_output_path = os.path.join(output_path, "protein/", "%s/" % project) fahmunge.automation.make_path(allatom_output_path) fahmunge.automation.make_path(protein_output_path) fahmunge.automation.merge_fah_trajectories(location, allatom_output_path, pdb) trj0 = md.load(pdb) # Hacky temporary solution. top, bonds = trj0.top.to_dataframe() protein_atom_indices = top.index[top.chainID == 0].values fahmunge.automation.strip_water(allatom_output_path, protein_output_path, protein_atom_indices)
Change output data structure to support faster rsync
## Code Before: import numpy as np import os import glob import mdtraj as md import fahmunge import pandas as pd projects = pd.read_csv("./projects.csv", index_col=0) output_path = "/data/choderalab/fah/munged/" for (project, location, pdb) in projects.itertuples(): print(project, location, pdb) allatom_output_path = os.path.join(output_path, str(project), "allatoms/") protein_output_path = os.path.join(output_path, str(project), "protein/") fahmunge.automation.make_path(allatom_output_path) fahmunge.automation.make_path(protein_output_path) fahmunge.automation.merge_fah_trajectories(location, allatom_output_path, pdb) trj0 = md.load(pdb) # Hacky temporary solution. top, bonds = trj0.top.to_dataframe() protein_atom_indices = top.index[top.chainID == 0].values fahmunge.automation.strip_water(allatom_output_path, protein_output_path, protein_atom_indices) ## Instruction: Change output data structure to support faster rsync ## Code After: import numpy as np import os import glob import mdtraj as md import fahmunge import pandas as pd projects = pd.read_csv("./projects.csv", index_col=0) output_path = "/data/choderalab/fah/munged/" for (project, location, pdb) in projects.itertuples(): print(project, location, pdb) allatom_output_path = os.path.join(output_path, "allatoms/", "%s/" % project) protein_output_path = os.path.join(output_path, "protein/", "%s/" % project) fahmunge.automation.make_path(allatom_output_path) fahmunge.automation.make_path(protein_output_path) fahmunge.automation.merge_fah_trajectories(location, allatom_output_path, pdb) trj0 = md.load(pdb) # Hacky temporary solution. top, bonds = trj0.top.to_dataframe() protein_atom_indices = top.index[top.chainID == 0].values fahmunge.automation.strip_water(allatom_output_path, protein_output_path, protein_atom_indices)
# ... existing code ... print(project, location, pdb) allatom_output_path = os.path.join(output_path, "allatoms/", "%s/" % project) protein_output_path = os.path.join(output_path, "protein/", "%s/" % project) fahmunge.automation.make_path(allatom_output_path) # ... rest of the code ...
98dfc5569fb1ae58905f8b6a36deeda324dcdd7b
cronos/teilar/models.py
cronos/teilar/models.py
from django.db import models class Departments(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Department name", max_length = 200) class Teachers(models.Model): urlid = models.CharField("URL ID", max_length = 30, unique = True) name = models.CharField("Teacher name", max_length = 100) email = models.EmailField("Teacher's mail", null = True) department = models.CharField("Teacher's department", max_length = 100, null = True) def __unicode__(self): return self.name
from django.db import models class Departments(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Department name", max_length = 200) deprecated = models.BooleanField(default = False) def __unicode__(self): return self.name class Teachers(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Teacher name", max_length = 100) email = models.EmailField("Teacher's mail", null = True) department = models.CharField("Teacher's department", max_length = 100, null = True) deprecated = models.BooleanField(default = False) def __unicode__(self): return self.name
Add deprecated flag for teachers and departments
Add deprecated flag for teachers and departments
Python
agpl-3.0
LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr
from django.db import models class Departments(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Department name", max_length = 200) + deprecated = models.BooleanField(default = False) - - class Teachers(models.Model): - urlid = models.CharField("URL ID", max_length = 30, unique = True) - name = models.CharField("Teacher name", max_length = 100) - email = models.EmailField("Teacher's mail", null = True) - department = models.CharField("Teacher's department", max_length = 100, null = True) def __unicode__(self): return self.name + class Teachers(models.Model): + urlid = models.IntegerField(unique = True) + name = models.CharField("Teacher name", max_length = 100) + email = models.EmailField("Teacher's mail", null = True) + department = models.CharField("Teacher's department", max_length = 100, null = True) + deprecated = models.BooleanField(default = False) + + def __unicode__(self): + return self.name +
Add deprecated flag for teachers and departments
## Code Before: from django.db import models class Departments(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Department name", max_length = 200) class Teachers(models.Model): urlid = models.CharField("URL ID", max_length = 30, unique = True) name = models.CharField("Teacher name", max_length = 100) email = models.EmailField("Teacher's mail", null = True) department = models.CharField("Teacher's department", max_length = 100, null = True) def __unicode__(self): return self.name ## Instruction: Add deprecated flag for teachers and departments ## Code After: from django.db import models class Departments(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Department name", max_length = 200) deprecated = models.BooleanField(default = False) def __unicode__(self): return self.name class Teachers(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Teacher name", max_length = 100) email = models.EmailField("Teacher's mail", null = True) department = models.CharField("Teacher's department", max_length = 100, null = True) deprecated = models.BooleanField(default = False) def __unicode__(self): return self.name
... name = models.CharField("Department name", max_length = 200) deprecated = models.BooleanField(default = False) def __unicode__(self): return self.name ... class Teachers(models.Model): urlid = models.IntegerField(unique = True) name = models.CharField("Teacher name", max_length = 100) ... department = models.CharField("Teacher's department", max_length = 100, null = True) deprecated = models.BooleanField(default = False) ...
d0367aacfea7c238c476772a2c83f7826b1e9de5
corehq/apps/export/tasks.py
corehq/apps/export/tasks.py
from celery.task import task from corehq.apps.export.export import get_export_file, rebuild_export from couchexport.models import Format from couchexport.tasks import escape_quotes from soil.util import expose_cached_download @task def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete() @task(queue='background_queue', ignore_result=True, last_access_cutoff=None, filter=None) def rebuild_export_task(export_instance): rebuild_export(export_instance)
from celery.task import task from corehq.apps.export.export import get_export_file, rebuild_export from couchexport.models import Format from couchexport.tasks import escape_quotes from soil.util import expose_cached_download @task def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete() @task(queue='background_queue', ignore_result=True) def rebuild_export_task(export_instance, last_access_cutoff=None, filter=None): rebuild_export(export_instance, last_access_cutoff, filter)
Fix botched keyword args in rebuild_export_task()
Fix botched keyword args in rebuild_export_task()
Python
bsd-3-clause
dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
from celery.task import task from corehq.apps.export.export import get_export_file, rebuild_export from couchexport.models import Format from couchexport.tasks import escape_quotes from soil.util import expose_cached_download @task def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete() - @task(queue='background_queue', ignore_result=True, last_access_cutoff=None, filter=None) - def rebuild_export_task(export_instance): - rebuild_export(export_instance) + @task(queue='background_queue', ignore_result=True) + def rebuild_export_task(export_instance, last_access_cutoff=None, filter=None): + rebuild_export(export_instance, last_access_cutoff, filter)
Fix botched keyword args in rebuild_export_task()
## Code Before: from celery.task import task from corehq.apps.export.export import get_export_file, rebuild_export from couchexport.models import Format from couchexport.tasks import escape_quotes from soil.util import expose_cached_download @task def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete() @task(queue='background_queue', ignore_result=True, last_access_cutoff=None, filter=None) def rebuild_export_task(export_instance): rebuild_export(export_instance) ## Instruction: Fix botched keyword args in rebuild_export_task() ## Code After: from celery.task import task from corehq.apps.export.export import get_export_file, rebuild_export from couchexport.models import Format from couchexport.tasks import escape_quotes from soil.util import expose_cached_download @task def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete() @task(queue='background_queue', ignore_result=True) def rebuild_export_task(export_instance, last_access_cutoff=None, filter=None): rebuild_export(export_instance, last_access_cutoff, filter)
... @task(queue='background_queue', ignore_result=True) def rebuild_export_task(export_instance, last_access_cutoff=None, filter=None): rebuild_export(export_instance, last_access_cutoff, filter) ...
e2bc8b6010e979a9c00851d21ee783c8e8e27a55
adaptive/typecheck.py
adaptive/typecheck.py
def assertListOf(lst, typ): assert isinstance(lst, list), lst for idx, value in enumerate(lst): #assert isinstance(value, typ), (idx, value) assert value is None or isinstance(value, typ), (idx, value) return True
def assertListOf(lst, typ, orNone=True): assert isinstance(lst, list), lst if orNone: for idx, value in enumerate(lst): assert value is None or isinstance(value, typ), (idx, value) else: for idx, value in enumerate(lst): assert isinstance(value, typ), (idx, value) return True def emitTypeCheck(out, name, typ, orNone=True): d = dict(name=name, typ=typ.py_name) if typ.name == "void": out("assert %(name)s is None, %(name)s" % d) elif typ.parameters: assert len(typ.parameters) == 1, "Unimplemented: %s" % typ assert typ.name == "List", "Unimplemented: %s" % typ d["param"] = typ.parameters[0].py_name if orNone: out("assert %(name)s is None or _assertListOf(%(name)s, %(param)s), %(name)s" % d) else: out("_assertListOf(%(name)s, %(param)s), %(name)s" % d) else: if orNone: out("assert %(name)s is None or isinstance(%(name)s, %(typ)s), %(name)s" % d) else: out("assert isinstance(%(name)s, %(typ)s), %(name)s" % d)
Add type check helper; make typ or None optional for lists
Add type check helper; make typ or None optional for lists
Python
apache-2.0
datawire/adaptive
- def assertListOf(lst, typ): + def assertListOf(lst, typ, orNone=True): assert isinstance(lst, list), lst + if orNone: - for idx, value in enumerate(lst): + for idx, value in enumerate(lst): - #assert isinstance(value, typ), (idx, value) - assert value is None or isinstance(value, typ), (idx, value) + assert value is None or isinstance(value, typ), (idx, value) + else: + for idx, value in enumerate(lst): + assert isinstance(value, typ), (idx, value) return True + def emitTypeCheck(out, name, typ, orNone=True): + d = dict(name=name, typ=typ.py_name) + if typ.name == "void": + out("assert %(name)s is None, %(name)s" % d) + elif typ.parameters: + assert len(typ.parameters) == 1, "Unimplemented: %s" % typ + assert typ.name == "List", "Unimplemented: %s" % typ + d["param"] = typ.parameters[0].py_name + if orNone: + out("assert %(name)s is None or _assertListOf(%(name)s, %(param)s), %(name)s" % d) + else: + out("_assertListOf(%(name)s, %(param)s), %(name)s" % d) + else: + if orNone: + out("assert %(name)s is None or isinstance(%(name)s, %(typ)s), %(name)s" % d) + else: + out("assert isinstance(%(name)s, %(typ)s), %(name)s" % d) +
Add type check helper; make typ or None optional for lists
## Code Before: def assertListOf(lst, typ): assert isinstance(lst, list), lst for idx, value in enumerate(lst): #assert isinstance(value, typ), (idx, value) assert value is None or isinstance(value, typ), (idx, value) return True ## Instruction: Add type check helper; make typ or None optional for lists ## Code After: def assertListOf(lst, typ, orNone=True): assert isinstance(lst, list), lst if orNone: for idx, value in enumerate(lst): assert value is None or isinstance(value, typ), (idx, value) else: for idx, value in enumerate(lst): assert isinstance(value, typ), (idx, value) return True def emitTypeCheck(out, name, typ, orNone=True): d = dict(name=name, typ=typ.py_name) if typ.name == "void": out("assert %(name)s is None, %(name)s" % d) elif typ.parameters: assert len(typ.parameters) == 1, "Unimplemented: %s" % typ assert typ.name == "List", "Unimplemented: %s" % typ d["param"] = typ.parameters[0].py_name if orNone: out("assert %(name)s is None or _assertListOf(%(name)s, %(param)s), %(name)s" % d) else: out("_assertListOf(%(name)s, %(param)s), %(name)s" % d) else: if orNone: out("assert %(name)s is None or isinstance(%(name)s, %(typ)s), %(name)s" % d) else: out("assert isinstance(%(name)s, %(typ)s), %(name)s" % d)
# ... existing code ... def assertListOf(lst, typ, orNone=True): assert isinstance(lst, list), lst if orNone: for idx, value in enumerate(lst): assert value is None or isinstance(value, typ), (idx, value) else: for idx, value in enumerate(lst): assert isinstance(value, typ), (idx, value) return True def emitTypeCheck(out, name, typ, orNone=True): d = dict(name=name, typ=typ.py_name) if typ.name == "void": out("assert %(name)s is None, %(name)s" % d) elif typ.parameters: assert len(typ.parameters) == 1, "Unimplemented: %s" % typ assert typ.name == "List", "Unimplemented: %s" % typ d["param"] = typ.parameters[0].py_name if orNone: out("assert %(name)s is None or _assertListOf(%(name)s, %(param)s), %(name)s" % d) else: out("_assertListOf(%(name)s, %(param)s), %(name)s" % d) else: if orNone: out("assert %(name)s is None or isinstance(%(name)s, %(typ)s), %(name)s" % d) else: out("assert isinstance(%(name)s, %(typ)s), %(name)s" % d) # ... rest of the code ...
1ee2e880872c4744f4159df7fc64bb64b3f35632
pygametemplate/button.py
pygametemplate/button.py
import time class Button(object): """Class representing keyboard keys.""" def __init__(self, game, number): self.game = game self.number = number self.event = None # The last event that caused the button press self.pressed = 0 # If the button was just pressed self.held = 0 # If the button is held self.released = 0 # If the button was just released self.press_time = 0.0 def press(self): self.pressed = 1 self.held = 1 self.press_time = time.time() def release(self): self.held = 0 self.released = 1 def reset(self): self.pressed = 0 self.released = 0 def time_held(self): if self.held: return time.time() - self.press_time else: return 0.0
import time class Button(object): """Class representing keyboard keys.""" def __init__(self, game, number): self.game = game self.number = number self.event = None # The last event that caused the button press self.pressed = 0 # If the button was just pressed self.held = 0 # If the button is held self.released = 0 # If the button was just released self.press_time = 0.0 def press(self): self.pressed = 1 self.held = 1 self.press_time = time.time() def release(self): self.held = 0 self.released = 1 def reset(self): self.pressed = 0 self.released = 0 def time_held(self) -> float: """Return the amount of time this button has been held for in seconds.""" if self.held: return time.time() - self.press_time else: return 0.0
Add docstring to Button.time_held() method
Add docstring to Button.time_held() method
Python
mit
AndyDeany/pygame-template
import time class Button(object): """Class representing keyboard keys.""" def __init__(self, game, number): self.game = game self.number = number self.event = None # The last event that caused the button press self.pressed = 0 # If the button was just pressed self.held = 0 # If the button is held self.released = 0 # If the button was just released self.press_time = 0.0 def press(self): self.pressed = 1 self.held = 1 self.press_time = time.time() def release(self): self.held = 0 self.released = 1 def reset(self): self.pressed = 0 self.released = 0 - def time_held(self): + def time_held(self) -> float: + """Return the amount of time this button has been held for in seconds.""" if self.held: return time.time() - self.press_time else: return 0.0
Add docstring to Button.time_held() method
## Code Before: import time class Button(object): """Class representing keyboard keys.""" def __init__(self, game, number): self.game = game self.number = number self.event = None # The last event that caused the button press self.pressed = 0 # If the button was just pressed self.held = 0 # If the button is held self.released = 0 # If the button was just released self.press_time = 0.0 def press(self): self.pressed = 1 self.held = 1 self.press_time = time.time() def release(self): self.held = 0 self.released = 1 def reset(self): self.pressed = 0 self.released = 0 def time_held(self): if self.held: return time.time() - self.press_time else: return 0.0 ## Instruction: Add docstring to Button.time_held() method ## Code After: import time class Button(object): """Class representing keyboard keys.""" def __init__(self, game, number): self.game = game self.number = number self.event = None # The last event that caused the button press self.pressed = 0 # If the button was just pressed self.held = 0 # If the button is held self.released = 0 # If the button was just released self.press_time = 0.0 def press(self): self.pressed = 1 self.held = 1 self.press_time = time.time() def release(self): self.held = 0 self.released = 1 def reset(self): self.pressed = 0 self.released = 0 def time_held(self) -> float: """Return the amount of time this button has been held for in seconds.""" if self.held: return time.time() - self.press_time else: return 0.0
... def time_held(self) -> float: """Return the amount of time this button has been held for in seconds.""" if self.held: ...
8095c37e0ab99e9827acbe4621f2fcb9334e1426
games/management/commands/autocreate_steamdb_installers.py
games/management/commands/autocreate_steamdb_installers.py
import json from django.core.management.base import BaseCommand from games import models from accounts.models import User class Command(BaseCommand): def handle(self, *args, **options): with open("steamdb.json") as steamdb_file: steamdb = json.loads(steamdb_file.read()) steam_runner = models.Runner.objects.get(slug='steam') user = User.objects.get(username='strider') for steamapp in steamdb: if steamapp['linux_status'] == 'Game Works': appid = steamapp['appid'] name = steamapp['name'] try: game = models.Game.objects.get(steamid=int(appid)) except models.Game.DoesNotExist: continue current_installer = game.installer_set.all() if current_installer: continue self.stdout.write("Creating installer for %s" % name) installer = models.Installer() installer.runner = steam_runner installer.user = user installer.game = game installer.set_default_installer() installer.published = True installer.save()
import json from django.core.management.base import BaseCommand from games import models from accounts.models import User class Command(BaseCommand): def handle(self, *args, **options): with open("steamdb.json") as steamdb_file: steamdb = json.loads(steamdb_file.read()) steam_runner = models.Runner.objects.get(slug='steam') user = User.objects.get(username='strider') for steamapp in steamdb: if steamapp['linux_status'].startswith('Game Works'): appid = steamapp['appid'] name = steamapp['name'] try: game = models.Game.objects.get(steamid=int(appid)) except models.Game.DoesNotExist: continue current_installer = game.installer_set.all() if current_installer: continue self.stdout.write("Creating installer for %s" % name) installer = models.Installer() installer.runner = steam_runner installer.user = user installer.game = game installer.set_default_installer() installer.published = True installer.save()
Update installer autocreate for games with no icon
Update installer autocreate for games with no icon
Python
agpl-3.0
Turupawn/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website,lutris/website,lutris/website,Turupawn/website
import json from django.core.management.base import BaseCommand from games import models from accounts.models import User class Command(BaseCommand): def handle(self, *args, **options): with open("steamdb.json") as steamdb_file: steamdb = json.loads(steamdb_file.read()) steam_runner = models.Runner.objects.get(slug='steam') user = User.objects.get(username='strider') for steamapp in steamdb: - if steamapp['linux_status'] == 'Game Works': + if steamapp['linux_status'].startswith('Game Works'): appid = steamapp['appid'] name = steamapp['name'] try: game = models.Game.objects.get(steamid=int(appid)) except models.Game.DoesNotExist: continue current_installer = game.installer_set.all() if current_installer: continue self.stdout.write("Creating installer for %s" % name) installer = models.Installer() installer.runner = steam_runner installer.user = user installer.game = game installer.set_default_installer() installer.published = True installer.save()
Update installer autocreate for games with no icon
## Code Before: import json from django.core.management.base import BaseCommand from games import models from accounts.models import User class Command(BaseCommand): def handle(self, *args, **options): with open("steamdb.json") as steamdb_file: steamdb = json.loads(steamdb_file.read()) steam_runner = models.Runner.objects.get(slug='steam') user = User.objects.get(username='strider') for steamapp in steamdb: if steamapp['linux_status'] == 'Game Works': appid = steamapp['appid'] name = steamapp['name'] try: game = models.Game.objects.get(steamid=int(appid)) except models.Game.DoesNotExist: continue current_installer = game.installer_set.all() if current_installer: continue self.stdout.write("Creating installer for %s" % name) installer = models.Installer() installer.runner = steam_runner installer.user = user installer.game = game installer.set_default_installer() installer.published = True installer.save() ## Instruction: Update installer autocreate for games with no icon ## Code After: import json from django.core.management.base import BaseCommand from games import models from accounts.models import User class Command(BaseCommand): def handle(self, *args, **options): with open("steamdb.json") as steamdb_file: steamdb = json.loads(steamdb_file.read()) steam_runner = models.Runner.objects.get(slug='steam') user = User.objects.get(username='strider') for steamapp in steamdb: if steamapp['linux_status'].startswith('Game Works'): appid = steamapp['appid'] name = steamapp['name'] try: game = models.Game.objects.get(steamid=int(appid)) except models.Game.DoesNotExist: continue current_installer = game.installer_set.all() if current_installer: continue self.stdout.write("Creating installer for %s" % name) installer = models.Installer() installer.runner = steam_runner installer.user = user installer.game = game installer.set_default_installer() installer.published = True installer.save()
... for steamapp in steamdb: if steamapp['linux_status'].startswith('Game Works'): appid = steamapp['appid'] ...
79a453f503e0f4283700071d415e32e82d35162b
eadred/management/commands/generatedata.py
eadred/management/commands/generatedata.py
import imp from django.conf import settings from django.core.management.base import BaseCommand from django.utils.importlib import import_module from optparse import make_option class Command(BaseCommand): help = 'Generates sample data.' option_list = BaseCommand.option_list + ( make_option('--with', action='append', dest='param', help='Pass key=val style param to generate_sampledata'), ) def handle(self, *args, **options): for item in options.get('param', []): if '=' in item: key, val = item.split('=') else: key, val = item, True options[key] = val # Allows you to specify which apps to generate sampledata for. if not args: args = [] for app in settings.INSTALLED_APPS: if args and app not in args: continue try: app_path = import_module(app).__path__ except AttributeError: continue try: imp.find_module('sampledata', app_path) except ImportError: continue module = import_module('%s.sampledata' % app) if hasattr(module, 'generate_sampledata'): self.stdout.write('Generating sample data from %s...' % app) module.generate_sampledata(options) self.stdout.write('Done!\n')
import imp from django.conf import settings from django.core.management.base import BaseCommand from django.utils.importlib import import_module from optparse import make_option class Command(BaseCommand): help = 'Generates sample data.' option_list = BaseCommand.option_list + ( make_option('--with', action='append', dest='param', help='Pass key=val style param to generate_sampledata'), ) def handle(self, *args, **options): if options.get('param'): for item in options['param']: if '=' in item: key, val = item.split('=') else: key, val = item, True options[key] = val # Allows you to specify which apps to generate sampledata for. if not args: args = [] for app in settings.INSTALLED_APPS: if args and app not in args: continue try: app_path = import_module(app).__path__ except AttributeError: continue try: imp.find_module('sampledata', app_path) except ImportError: continue module = import_module('%s.sampledata' % app) if hasattr(module, 'generate_sampledata'): self.stdout.write('Generating sample data from %s...' % app) module.generate_sampledata(options) self.stdout.write('Done!\n')
Fix issue where options['param'] can be None.
Fix issue where options['param'] can be None.
Python
bsd-3-clause
willkg/django-eadred
import imp from django.conf import settings from django.core.management.base import BaseCommand from django.utils.importlib import import_module from optparse import make_option class Command(BaseCommand): help = 'Generates sample data.' option_list = BaseCommand.option_list + ( make_option('--with', action='append', dest='param', help='Pass key=val style param to generate_sampledata'), ) def handle(self, *args, **options): + if options.get('param'): - for item in options.get('param', []): + for item in options['param']: - if '=' in item: + if '=' in item: - key, val = item.split('=') + key, val = item.split('=') - else: + else: - key, val = item, True + key, val = item, True - options[key] = val + options[key] = val # Allows you to specify which apps to generate sampledata for. if not args: args = [] for app in settings.INSTALLED_APPS: if args and app not in args: continue try: app_path = import_module(app).__path__ except AttributeError: continue try: imp.find_module('sampledata', app_path) except ImportError: continue module = import_module('%s.sampledata' % app) if hasattr(module, 'generate_sampledata'): self.stdout.write('Generating sample data from %s...' % app) module.generate_sampledata(options) self.stdout.write('Done!\n')
Fix issue where options['param'] can be None.
## Code Before: import imp from django.conf import settings from django.core.management.base import BaseCommand from django.utils.importlib import import_module from optparse import make_option class Command(BaseCommand): help = 'Generates sample data.' option_list = BaseCommand.option_list + ( make_option('--with', action='append', dest='param', help='Pass key=val style param to generate_sampledata'), ) def handle(self, *args, **options): for item in options.get('param', []): if '=' in item: key, val = item.split('=') else: key, val = item, True options[key] = val # Allows you to specify which apps to generate sampledata for. if not args: args = [] for app in settings.INSTALLED_APPS: if args and app not in args: continue try: app_path = import_module(app).__path__ except AttributeError: continue try: imp.find_module('sampledata', app_path) except ImportError: continue module = import_module('%s.sampledata' % app) if hasattr(module, 'generate_sampledata'): self.stdout.write('Generating sample data from %s...' % app) module.generate_sampledata(options) self.stdout.write('Done!\n') ## Instruction: Fix issue where options['param'] can be None. ## Code After: import imp from django.conf import settings from django.core.management.base import BaseCommand from django.utils.importlib import import_module from optparse import make_option class Command(BaseCommand): help = 'Generates sample data.' option_list = BaseCommand.option_list + ( make_option('--with', action='append', dest='param', help='Pass key=val style param to generate_sampledata'), ) def handle(self, *args, **options): if options.get('param'): for item in options['param']: if '=' in item: key, val = item.split('=') else: key, val = item, True options[key] = val # Allows you to specify which apps to generate sampledata for. if not args: args = [] for app in settings.INSTALLED_APPS: if args and app not in args: continue try: app_path = import_module(app).__path__ except AttributeError: continue try: imp.find_module('sampledata', app_path) except ImportError: continue module = import_module('%s.sampledata' % app) if hasattr(module, 'generate_sampledata'): self.stdout.write('Generating sample data from %s...' % app) module.generate_sampledata(options) self.stdout.write('Done!\n')
# ... existing code ... def handle(self, *args, **options): if options.get('param'): for item in options['param']: if '=' in item: key, val = item.split('=') else: key, val = item, True options[key] = val # ... rest of the code ...
c3e2c6f77dffc2ff5874c1bb495e6de119800cf4
rx/core/observable/merge.py
rx/core/observable/merge.py
import rx from rx import operators as ops from rx.core import Observable def _merge(*args) -> Observable: """Merges all the observable sequences into a single observable sequence. 1 - merged = rx.merge(xs, ys, zs) 2 - merged = rx.merge([xs, ys, zs]) Returns: The observable sequence that merges the elements of the observable sequences. """ sources = args[:] if isinstance(sources[0], list): sources = sources[0] return rx.from_iterable(sources).pipe(ops.merge_all())
from typing import Iterable, Union import rx from rx import operators as ops from rx.core import Observable def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable: """Merges all the observable sequences into a single observable sequence. 1 - merged = rx.merge(xs, ys, zs) 2 - merged = rx.merge([xs, ys, zs]) Returns: The observable sequence that merges the elements of the observable sequences. """ sources = args[:] if isinstance(sources[0], Iterable): sources = sources[0] return rx.from_iterable(sources).pipe(ops.merge_all())
Fix typing and accept iterable instead of list
Fix typing and accept iterable instead of list
Python
mit
ReactiveX/RxPY,ReactiveX/RxPY
+ from typing import Iterable, Union + import rx from rx import operators as ops from rx.core import Observable - def _merge(*args) -> Observable: + def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable: """Merges all the observable sequences into a single observable sequence. 1 - merged = rx.merge(xs, ys, zs) 2 - merged = rx.merge([xs, ys, zs]) Returns: The observable sequence that merges the elements of the observable sequences. """ sources = args[:] - if isinstance(sources[0], list): + if isinstance(sources[0], Iterable): sources = sources[0] return rx.from_iterable(sources).pipe(ops.merge_all()) - -
Fix typing and accept iterable instead of list
## Code Before: import rx from rx import operators as ops from rx.core import Observable def _merge(*args) -> Observable: """Merges all the observable sequences into a single observable sequence. 1 - merged = rx.merge(xs, ys, zs) 2 - merged = rx.merge([xs, ys, zs]) Returns: The observable sequence that merges the elements of the observable sequences. """ sources = args[:] if isinstance(sources[0], list): sources = sources[0] return rx.from_iterable(sources).pipe(ops.merge_all()) ## Instruction: Fix typing and accept iterable instead of list ## Code After: from typing import Iterable, Union import rx from rx import operators as ops from rx.core import Observable def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable: """Merges all the observable sequences into a single observable sequence. 1 - merged = rx.merge(xs, ys, zs) 2 - merged = rx.merge([xs, ys, zs]) Returns: The observable sequence that merges the elements of the observable sequences. """ sources = args[:] if isinstance(sources[0], Iterable): sources = sources[0] return rx.from_iterable(sources).pipe(ops.merge_all())
... from typing import Iterable, Union import rx ... def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable: """Merges all the observable sequences into a single observable ... if isinstance(sources[0], Iterable): sources = sources[0] ... return rx.from_iterable(sources).pipe(ops.merge_all()) ...
435e8fc4d9ad8c071a96e37e483fcbc194a94fc6
tests/integration/files/file/base/_modules/runtests_decorators.py
tests/integration/files/file/base/_modules/runtests_decorators.py
from __future__ import absolute_import import time # Import Salt libs import salt.utils.decorators def _fallbackfunc(): return False, 'fallback' def working_function(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(True) def booldependsTrue(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(False) def booldependsFalse(): return True @salt.utils.decorators.depends('time') def depends(): ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123') def missing_depends(): return True @salt.utils.decorators.depends('time', fallback_function=_fallbackfunc) def depends_will_not_fallback(): ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc) def missing_depends_will_fallback(): ret = {'ret': True, 'time': time.time()} return ret
from __future__ import absolute_import import time # Import Salt libs import salt.utils.decorators def _fallbackfunc(): return False, 'fallback' def working_function(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(True) def booldependsTrue(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(False) def booldependsFalse(): return True @salt.utils.decorators.depends('time') def depends(): ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123') def missing_depends(): return True @salt.utils.decorators.depends('time', fallback_function=_fallbackfunc) def depends_will_not_fallback(): ''' CLI Example: .. code-block:: bash ''' ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc) def missing_depends_will_fallback(): ret = {'ret': True, 'time': time.time()} return ret
Fix tests: add module function docstring
Fix tests: add module function docstring
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
from __future__ import absolute_import import time # Import Salt libs import salt.utils.decorators def _fallbackfunc(): return False, 'fallback' def working_function(): ''' CLI Example: .. code-block:: bash ''' return True + @salt.utils.decorators.depends(True) def booldependsTrue(): ''' CLI Example: .. code-block:: bash ''' return True + @salt.utils.decorators.depends(False) def booldependsFalse(): return True + @salt.utils.decorators.depends('time') def depends(): ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123') def missing_depends(): return True @salt.utils.decorators.depends('time', fallback_function=_fallbackfunc) def depends_will_not_fallback(): + ''' + CLI Example: + + .. code-block:: bash + ''' ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc) def missing_depends_will_fallback(): ret = {'ret': True, 'time': time.time()} return ret
Fix tests: add module function docstring
## Code Before: from __future__ import absolute_import import time # Import Salt libs import salt.utils.decorators def _fallbackfunc(): return False, 'fallback' def working_function(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(True) def booldependsTrue(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(False) def booldependsFalse(): return True @salt.utils.decorators.depends('time') def depends(): ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123') def missing_depends(): return True @salt.utils.decorators.depends('time', fallback_function=_fallbackfunc) def depends_will_not_fallback(): ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc) def missing_depends_will_fallback(): ret = {'ret': True, 'time': time.time()} return ret ## Instruction: Fix tests: add module function docstring ## Code After: from __future__ import absolute_import import time # Import Salt libs import salt.utils.decorators def _fallbackfunc(): return False, 'fallback' def working_function(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(True) def booldependsTrue(): ''' CLI Example: .. code-block:: bash ''' return True @salt.utils.decorators.depends(False) def booldependsFalse(): return True @salt.utils.decorators.depends('time') def depends(): ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123') def missing_depends(): return True @salt.utils.decorators.depends('time', fallback_function=_fallbackfunc) def depends_will_not_fallback(): ''' CLI Example: .. code-block:: bash ''' ret = {'ret': True, 'time': time.time()} return ret @salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc) def missing_depends_will_fallback(): ret = {'ret': True, 'time': time.time()} return ret
# ... existing code ... @salt.utils.decorators.depends(True) # ... modified code ... @salt.utils.decorators.depends(False) ... return True ... def depends_will_not_fallback(): ''' CLI Example: .. code-block:: bash ''' ret = {'ret': True, # ... rest of the code ...
5df86afa64aafb4aee1adb066307910e0fb64256
jd2chm_utils.py
jd2chm_utils.py
import os import sys import jd2chm_log import jd2chm_conf logging = None config = None def getAppDir(): if hasattr(sys, "frozen"): # py2exe return os.path.dirname(sys.executable) return os.path.dirname(sys.argv[0]) def getLogging(level=2): global logging if not logging: logging = jd2chm_log.Jd2chmLogging(level) return logging def getLog(): """Faciliate sharing the logger accross the different modules.""" return getLogging().logger def getConf(): global config if not config: config = jd2chm_conf.Jd2chmConfig() config.init() return config
import os import sys import shutil import jd2chm_log as log import jd2chm_conf as conf import jd2chm_const as const logging = None config = None def get_app_dir(): if hasattr(sys, "frozen"): # py2exe return os.path.dirname(sys.executable) return os.path.dirname(sys.argv[0]) def get_logging(level=2): global logging if not logging: logging = log.Jd2chmLogging(level) return logging def get_log(): """Facilitate sharing the logger across the different modules.""" return get_logging().logger def get_conf(): global config if not config: config = conf.Jd2chmConfig() config.init() return config def term_width(): return shutil.get_terminal_size((const.DEFAULT_TERM_WIDTH, const.DEFAULT_TERM_HEIGHT)).columns - const.TERM_MARGIN def center(line, max_line=0): """Center a padded string based on the width of the terminal. If max_line is provided for justified text, line shorter than max_line will only be padded on the left side. """ width = term_width() left_margin = (width - max_line) / 2 if len(line) < max_line: return (' ' * int(left_margin)) + line return line.center(width, ' ') def print_center_block(text, max_line=0): """Print a block of text centered on the terminal.""" for line in text.split('\n'): print(center(line, max_line))
Reformat code. Added methods to pretty print messages.
Reformat code. Added methods to pretty print messages.
Python
mit
andreburgaud/jd2chm,andreburgaud/jd2chm
import os import sys + import shutil - import jd2chm_log + import jd2chm_log as log - import jd2chm_conf + import jd2chm_conf as conf + import jd2chm_const as const logging = None config = None - def getAppDir(): - if hasattr(sys, "frozen"): # py2exe - return os.path.dirname(sys.executable) - return os.path.dirname(sys.argv[0]) + def get_app_dir(): + if hasattr(sys, "frozen"): # py2exe + return os.path.dirname(sys.executable) + return os.path.dirname(sys.argv[0]) - def getLogging(level=2): - global logging - if not logging: - logging = jd2chm_log.Jd2chmLogging(level) - return logging - def getLog(): - """Faciliate sharing the logger accross the different modules.""" - return getLogging().logger + def get_logging(level=2): + global logging + if not logging: + logging = log.Jd2chmLogging(level) + return logging + + + def get_log(): + """Facilitate sharing the logger across the different modules.""" + return get_logging().logger + + - def getConf(): + def get_conf(): - global config + global config - if not config: + if not config: - config = jd2chm_conf.Jd2chmConfig() + config = conf.Jd2chmConfig() - config.init() + config.init() - return config + return config + + + def term_width(): + return shutil.get_terminal_size((const.DEFAULT_TERM_WIDTH, + const.DEFAULT_TERM_HEIGHT)).columns - const.TERM_MARGIN + + + def center(line, max_line=0): + """Center a padded string based on the width of the terminal. + + If max_line is provided for justified text, line shorter than max_line + will only be padded on the left side. + """ + + width = term_width() + left_margin = (width - max_line) / 2 + if len(line) < max_line: + return (' ' * int(left_margin)) + line + return line.center(width, ' ') + + + def print_center_block(text, max_line=0): + """Print a block of text centered on the terminal.""" + + for line in text.split('\n'): + print(center(line, max_line)) +
Reformat code. Added methods to pretty print messages.
## Code Before: import os import sys import jd2chm_log import jd2chm_conf logging = None config = None def getAppDir(): if hasattr(sys, "frozen"): # py2exe return os.path.dirname(sys.executable) return os.path.dirname(sys.argv[0]) def getLogging(level=2): global logging if not logging: logging = jd2chm_log.Jd2chmLogging(level) return logging def getLog(): """Faciliate sharing the logger accross the different modules.""" return getLogging().logger def getConf(): global config if not config: config = jd2chm_conf.Jd2chmConfig() config.init() return config ## Instruction: Reformat code. Added methods to pretty print messages. ## Code After: import os import sys import shutil import jd2chm_log as log import jd2chm_conf as conf import jd2chm_const as const logging = None config = None def get_app_dir(): if hasattr(sys, "frozen"): # py2exe return os.path.dirname(sys.executable) return os.path.dirname(sys.argv[0]) def get_logging(level=2): global logging if not logging: logging = log.Jd2chmLogging(level) return logging def get_log(): """Facilitate sharing the logger across the different modules.""" return get_logging().logger def get_conf(): global config if not config: config = conf.Jd2chmConfig() config.init() return config def term_width(): return shutil.get_terminal_size((const.DEFAULT_TERM_WIDTH, const.DEFAULT_TERM_HEIGHT)).columns - const.TERM_MARGIN def center(line, max_line=0): """Center a padded string based on the width of the terminal. If max_line is provided for justified text, line shorter than max_line will only be padded on the left side. """ width = term_width() left_margin = (width - max_line) / 2 if len(line) < max_line: return (' ' * int(left_margin)) + line return line.center(width, ' ') def print_center_block(text, max_line=0): """Print a block of text centered on the terminal.""" for line in text.split('\n'): print(center(line, max_line))
... import sys import shutil import jd2chm_log as log import jd2chm_conf as conf import jd2chm_const as const ... def get_app_dir(): if hasattr(sys, "frozen"): # py2exe return os.path.dirname(sys.executable) return os.path.dirname(sys.argv[0]) def get_logging(level=2): global logging if not logging: logging = log.Jd2chmLogging(level) return logging def get_log(): """Facilitate sharing the logger across the different modules.""" return get_logging().logger def get_conf(): global config if not config: config = conf.Jd2chmConfig() config.init() return config def term_width(): return shutil.get_terminal_size((const.DEFAULT_TERM_WIDTH, const.DEFAULT_TERM_HEIGHT)).columns - const.TERM_MARGIN def center(line, max_line=0): """Center a padded string based on the width of the terminal. If max_line is provided for justified text, line shorter than max_line will only be padded on the left side. """ width = term_width() left_margin = (width - max_line) / 2 if len(line) < max_line: return (' ' * int(left_margin)) + line return line.center(width, ' ') def print_center_block(text, max_line=0): """Print a block of text centered on the terminal.""" for line in text.split('\n'): print(center(line, max_line)) ...
54a1f1774517faf377ae43f1bad4a4f5c0b0c562
accelerator/tests/contexts/judging_round_context.py
accelerator/tests/contexts/judging_round_context.py
from accelerator.tests.factories import ( JudgingFormFactory, JudgingFormElementFactory, JudgingRoundFactory, ) from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION class JudgingRoundContext: def __init__(self, **kwargs): if kwargs.get("is_active") is True: should_be_active = True kwargs["is_active"] = False else: should_be_active = False self.judging_round = JudgingRoundFactory(**kwargs) if should_be_active: self.activate_judging_round() def activate_judging_round(self): self.judging_form = self.prepare_judging_form() self.judging_round.judging_form = self.judging_form self.judging_round.is_active=True self.judging_round.save() def prepare_judging_form(self): judging_form = JudgingFormFactory() JudgingFormElementFactory( element_name=FORM_ELEM_OVERALL_RECOMMENDATION, form_type=judging_form) return judging_form
from accelerator.tests.factories import ( JudgingFormFactory, JudgingFormElementFactory, JudgingRoundFactory, ) from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION class JudgingRoundContext: def __init__(self, **kwargs): if kwargs.get("is_active") is True: should_be_active = True kwargs["is_active"] = False else: should_be_active = False self.judging_round = JudgingRoundFactory(**kwargs) if should_be_active: self.activate_judging_round() def activate_judging_round(self): self.judging_form = self.prepare_judging_form() self.judging_round.judging_form = self.judging_form self.judging_round.is_active=True self.judging_round.save() def prepare_judging_form(self): judging_form = JudgingFormFactory() JudgingFormElementFactory( element_name=FORM_ELEM_OVERALL_RECOMMENDATION, form_type=judging_form, mandatory=True, element_type="feedback") return judging_form
Add some values to the default judging_form_element
[AC-7310] Add some values to the default judging_form_element
Python
mit
masschallenge/django-accelerator,masschallenge/django-accelerator
from accelerator.tests.factories import ( JudgingFormFactory, JudgingFormElementFactory, JudgingRoundFactory, ) from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION class JudgingRoundContext: def __init__(self, **kwargs): if kwargs.get("is_active") is True: should_be_active = True kwargs["is_active"] = False else: should_be_active = False self.judging_round = JudgingRoundFactory(**kwargs) if should_be_active: self.activate_judging_round() def activate_judging_round(self): self.judging_form = self.prepare_judging_form() self.judging_round.judging_form = self.judging_form self.judging_round.is_active=True self.judging_round.save() def prepare_judging_form(self): judging_form = JudgingFormFactory() JudgingFormElementFactory( element_name=FORM_ELEM_OVERALL_RECOMMENDATION, - form_type=judging_form) + form_type=judging_form, + mandatory=True, + element_type="feedback") return judging_form
Add some values to the default judging_form_element
## Code Before: from accelerator.tests.factories import ( JudgingFormFactory, JudgingFormElementFactory, JudgingRoundFactory, ) from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION class JudgingRoundContext: def __init__(self, **kwargs): if kwargs.get("is_active") is True: should_be_active = True kwargs["is_active"] = False else: should_be_active = False self.judging_round = JudgingRoundFactory(**kwargs) if should_be_active: self.activate_judging_round() def activate_judging_round(self): self.judging_form = self.prepare_judging_form() self.judging_round.judging_form = self.judging_form self.judging_round.is_active=True self.judging_round.save() def prepare_judging_form(self): judging_form = JudgingFormFactory() JudgingFormElementFactory( element_name=FORM_ELEM_OVERALL_RECOMMENDATION, form_type=judging_form) return judging_form ## Instruction: Add some values to the default judging_form_element ## Code After: from accelerator.tests.factories import ( JudgingFormFactory, JudgingFormElementFactory, JudgingRoundFactory, ) from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION class JudgingRoundContext: def __init__(self, **kwargs): if kwargs.get("is_active") is True: should_be_active = True kwargs["is_active"] = False else: should_be_active = False self.judging_round = JudgingRoundFactory(**kwargs) if should_be_active: self.activate_judging_round() def activate_judging_round(self): self.judging_form = self.prepare_judging_form() self.judging_round.judging_form = self.judging_form self.judging_round.is_active=True self.judging_round.save() def prepare_judging_form(self): judging_form = JudgingFormFactory() JudgingFormElementFactory( element_name=FORM_ELEM_OVERALL_RECOMMENDATION, form_type=judging_form, mandatory=True, element_type="feedback") return judging_form
// ... existing code ... element_name=FORM_ELEM_OVERALL_RECOMMENDATION, form_type=judging_form, mandatory=True, element_type="feedback") return judging_form // ... rest of the code ...
faa0e5fd214151e8b0bb8fb18772807aa020c4bf
infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py
infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py
"""Script to print list of all crowdin language codes for project.""" from crowdin_bot import api NS_DICT = { 'ns': "urn:oasis:names:tc:xliff:document:1.2" } def get_project_languages(): """Get list of crowdin language codes. Returns: (list) list of project crowdin language codes """ info_xml = api.api_call_xml("info") languages = info_xml.find('languages') translatable_languages = [] for language in languages: # Check it's not the incontext pseudo language if language.find("can_translate").text == "1": translatable_languages.append(language.find('code').text) return translatable_languages if __name__ == "__main__": print('\n'.join(get_project_languages()))
"""Script to print list of all crowdin language codes for project.""" from crowdin_bot import api NS_DICT = { 'ns': "urn:oasis:names:tc:xliff:document:1.2" } def get_project_languages(): """Get list of crowdin language codes. Returns: (list) list of project crowdin language codes """ active_languages = [] trans_status = api.api_call_json("status") for language in trans_status: # Check language has actually had some translation done if int(language["words_approved"]) > 0: active_languages.append(language["code"]) return active_languages if __name__ == "__main__": for language in get_project_languages(): print(language)
Modify crowdin_bot to only include languages that have >0 translations
Modify crowdin_bot to only include languages that have >0 translations
Python
mit
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
"""Script to print list of all crowdin language codes for project.""" from crowdin_bot import api NS_DICT = { 'ns': "urn:oasis:names:tc:xliff:document:1.2" } def get_project_languages(): """Get list of crowdin language codes. Returns: (list) list of project crowdin language codes """ - info_xml = api.api_call_xml("info") - languages = info_xml.find('languages') - translatable_languages = [] + active_languages = [] + trans_status = api.api_call_json("status") - for language in languages: + for language in trans_status: - # Check it's not the incontext pseudo language - if language.find("can_translate").text == "1": - translatable_languages.append(language.find('code').text) + # Check language has actually had some translation done + if int(language["words_approved"]) > 0: + active_languages.append(language["code"]) - return translatable_languages + return active_languages if __name__ == "__main__": - print('\n'.join(get_project_languages())) + for language in get_project_languages(): + print(language)
Modify crowdin_bot to only include languages that have >0 translations
## Code Before: """Script to print list of all crowdin language codes for project.""" from crowdin_bot import api NS_DICT = { 'ns': "urn:oasis:names:tc:xliff:document:1.2" } def get_project_languages(): """Get list of crowdin language codes. Returns: (list) list of project crowdin language codes """ info_xml = api.api_call_xml("info") languages = info_xml.find('languages') translatable_languages = [] for language in languages: # Check it's not the incontext pseudo language if language.find("can_translate").text == "1": translatable_languages.append(language.find('code').text) return translatable_languages if __name__ == "__main__": print('\n'.join(get_project_languages())) ## Instruction: Modify crowdin_bot to only include languages that have >0 translations ## Code After: """Script to print list of all crowdin language codes for project.""" from crowdin_bot import api NS_DICT = { 'ns': "urn:oasis:names:tc:xliff:document:1.2" } def get_project_languages(): """Get list of crowdin language codes. Returns: (list) list of project crowdin language codes """ active_languages = [] trans_status = api.api_call_json("status") for language in trans_status: # Check language has actually had some translation done if int(language["words_approved"]) > 0: active_languages.append(language["code"]) return active_languages if __name__ == "__main__": for language in get_project_languages(): print(language)
# ... existing code ... """ active_languages = [] trans_status = api.api_call_json("status") for language in trans_status: # Check language has actually had some translation done if int(language["words_approved"]) > 0: active_languages.append(language["code"]) return active_languages # ... modified code ... if __name__ == "__main__": for language in get_project_languages(): print(language) # ... rest of the code ...
6705b4eb603f69681357a5f71f02e81705ea5e17
setup.py
setup.py
from distutils.core import setup try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except(IOError, ImportError): long_description = open('README.md').read() setup( name='pymp4parse', version='0.3.0', packages=[''], url='https://github.com/use-sparingly/pymp4parse', license='The MIT License', author='Alastair Mccormack', author_email='alastair at alu.media', description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser', requires=['bitstring'], install_requires=['bitstring'], long_description=long_description, data_files=[('', ['README.md'])] )
from distutils.core import setup try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except(IOError, ImportError): long_description = open('README.md').read() setup( name='pymp4parse', version='0.3.0', packages=[''], url='https://github.com/use-sparingly/pymp4parse', license='The MIT License', author='Alastair Mccormack', author_email='alastair at alu.media', description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser', requires=['bitstring', 'six'], install_requires=['bitstring', 'six'], long_description=long_description, data_files=[('', ['README.md'])] )
Add six as dependency to fix import issue
Add six as dependency to fix import issue
Python
mit
use-sparingly/pymp4parse
from distutils.core import setup try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except(IOError, ImportError): long_description = open('README.md').read() setup( name='pymp4parse', version='0.3.0', packages=[''], url='https://github.com/use-sparingly/pymp4parse', license='The MIT License', author='Alastair Mccormack', author_email='alastair at alu.media', description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser', - requires=['bitstring'], + requires=['bitstring', 'six'], - install_requires=['bitstring'], + install_requires=['bitstring', 'six'], long_description=long_description, data_files=[('', ['README.md'])] )
Add six as dependency to fix import issue
## Code Before: from distutils.core import setup try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except(IOError, ImportError): long_description = open('README.md').read() setup( name='pymp4parse', version='0.3.0', packages=[''], url='https://github.com/use-sparingly/pymp4parse', license='The MIT License', author='Alastair Mccormack', author_email='alastair at alu.media', description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser', requires=['bitstring'], install_requires=['bitstring'], long_description=long_description, data_files=[('', ['README.md'])] ) ## Instruction: Add six as dependency to fix import issue ## Code After: from distutils.core import setup try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except(IOError, ImportError): long_description = open('README.md').read() setup( name='pymp4parse', version='0.3.0', packages=[''], url='https://github.com/use-sparingly/pymp4parse', license='The MIT License', author='Alastair Mccormack', author_email='alastair at alu.media', description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser', requires=['bitstring', 'six'], install_requires=['bitstring', 'six'], long_description=long_description, data_files=[('', ['README.md'])] )
// ... existing code ... description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser', requires=['bitstring', 'six'], install_requires=['bitstring', 'six'], long_description=long_description, // ... rest of the code ...
4d2ef07c64603e99f05f2233382dc2a7c5bff5ba
website/members/tests.py
website/members/tests.py
from django.contrib.auth.models import User from django.test import TestCase from datetime import datetime from members.models import Member class MemberTest(TestCase): def setUp(self): self.user = User.objects.create(username='test') self.member = Member.objects.create(user=self.user) self.assertEqual(1, Member.objects.count()) def test_delete_cascade(self): self.user.delete() self.assertEqual( 0, Member.objects.count(), 'Deleting a user deletes the member information.' ) def test_user_link(self): self.assertEqual( self.user, self.member.user, 'Members are linked to a user object.' ) def test_print_person_number(self): self.member.birthday = datetime.strptime('09/07/1999', '%d/%m/%Y') self.member.person_number_ext = '1234' self.assertEqual( '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' )
from django.contrib.auth.models import User from django.test import TestCase from datetime import datetime from members.models import Member, StudyProgram class MemberTest(TestCase): def setUp(self): self.user = User.objects.create(username='test') self.member = Member.objects.create(user=self.user) self.assertEqual(1, Member.objects.count()) def test_delete_cascade(self): self.user.delete() self.assertEqual( 0, Member.objects.count(), 'Deleting a user deletes the member information.' ) def test_user_link(self): self.assertEqual( self.user, self.member.user, 'Members are linked to a user object.' ) def test_print_person_number(self): self.member.birthday = datetime.strptime('09/07/1999', '%d/%m/%Y') self.member.person_number_ext = '1234' self.assertEqual( '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' ) def test_study_deletion(self): study = StudyProgram.objects.create(name='subject') self.member.study = study self.member.save() study.delete() self.member.refresh_from_db() self.assertEqual( None, self.member.study, 'Deleting a study program resets the study for the members' )
Add test for StudyProgram deletion
:green_heart: Add test for StudyProgram deletion
Python
agpl-3.0
Dekker1/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore
from django.contrib.auth.models import User from django.test import TestCase from datetime import datetime - from members.models import Member + from members.models import Member, StudyProgram class MemberTest(TestCase): def setUp(self): self.user = User.objects.create(username='test') self.member = Member.objects.create(user=self.user) self.assertEqual(1, Member.objects.count()) def test_delete_cascade(self): self.user.delete() self.assertEqual( 0, Member.objects.count(), 'Deleting a user deletes the member information.' ) def test_user_link(self): self.assertEqual( self.user, self.member.user, 'Members are linked to a user object.' ) def test_print_person_number(self): self.member.birthday = datetime.strptime('09/07/1999', '%d/%m/%Y') self.member.person_number_ext = '1234' self.assertEqual( '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' ) + def test_study_deletion(self): + study = StudyProgram.objects.create(name='subject') + self.member.study = study + self.member.save() + study.delete() + self.member.refresh_from_db() + self.assertEqual( + None, self.member.study, + 'Deleting a study program resets the study for the members' + ) +
Add test for StudyProgram deletion
## Code Before: from django.contrib.auth.models import User from django.test import TestCase from datetime import datetime from members.models import Member class MemberTest(TestCase): def setUp(self): self.user = User.objects.create(username='test') self.member = Member.objects.create(user=self.user) self.assertEqual(1, Member.objects.count()) def test_delete_cascade(self): self.user.delete() self.assertEqual( 0, Member.objects.count(), 'Deleting a user deletes the member information.' ) def test_user_link(self): self.assertEqual( self.user, self.member.user, 'Members are linked to a user object.' ) def test_print_person_number(self): self.member.birthday = datetime.strptime('09/07/1999', '%d/%m/%Y') self.member.person_number_ext = '1234' self.assertEqual( '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' ) ## Instruction: Add test for StudyProgram deletion ## Code After: from django.contrib.auth.models import User from django.test import TestCase from datetime import datetime from members.models import Member, StudyProgram class MemberTest(TestCase): def setUp(self): self.user = User.objects.create(username='test') self.member = Member.objects.create(user=self.user) self.assertEqual(1, Member.objects.count()) def test_delete_cascade(self): self.user.delete() self.assertEqual( 0, Member.objects.count(), 'Deleting a user deletes the member information.' ) def test_user_link(self): self.assertEqual( self.user, self.member.user, 'Members are linked to a user object.' ) def test_print_person_number(self): self.member.birthday = datetime.strptime('09/07/1999', '%d/%m/%Y') self.member.person_number_ext = '1234' self.assertEqual( '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' ) def test_study_deletion(self): study = StudyProgram.objects.create(name='subject') self.member.study = study self.member.save() study.delete() self.member.refresh_from_db() self.assertEqual( None, self.member.study, 'Deleting a study program resets the study for the members' )
# ... existing code ... from members.models import Member, StudyProgram # ... modified code ... ) def test_study_deletion(self): study = StudyProgram.objects.create(name='subject') self.member.study = study self.member.save() study.delete() self.member.refresh_from_db() self.assertEqual( None, self.member.study, 'Deleting a study program resets the study for the members' ) # ... rest of the code ...
b9c3404550273e4b0af68ebe9da27c4bf405de9b
rohrpost/message.py
rohrpost/message.py
import json def _send_message(message, content: dict, close: bool): message.reply_channel.send({ 'text': json.dumps(content), 'close': close, }) def send_message(message, message_id, handler, close=False, error=None, **additional_data): content = dict() if message_id: content['id'] = message_id if handler: content['type'] = handler if error: content['error'] = error if additional_data: content['data'] = additional_data content.update(**additional_data) if not content: raise Exception('Cannot send an empty message.') _send_message(message, content, close=close) def send_success(message, message_id, handler, close=False, **additional_data): """ This method directly wraps send_message but checks the existence of id and type. """ if not message_id or not handler: raise Exception('You have to provide a message ID and handler on success messages.') send_message(message, message_id, handler, close=close, **additional_data) def send_error(message, message_id, handler, error, close=False, **additional_data): """ This method wraps send_message and makes sure that error is a keyword argument. """ send_message(message, message_id, handler, close=close, error=error, **additional_data)
import json def _send_message(message, content: dict, close: bool): message.reply_channel.send({ 'text': json.dumps(content), 'close': close, }) def send_message(message, message_id, handler, close=False, error=None, **additional_data): content = dict() if message_id: content['id'] = message_id if handler: content['type'] = handler if error: content['error'] = error if additional_data: content['data'] = additional_data if not content: raise Exception('Cannot send an empty message.') _send_message(message, content, close=close) def send_success(message, message_id, handler, close=False, **additional_data): """ This method directly wraps send_message but checks the existence of id and type. """ if not message_id or not handler: raise Exception('You have to provide a message ID and handler on success messages.') send_message(message, message_id, handler, close=close, **additional_data) def send_error(message, message_id, handler, error, close=False, **additional_data): """ This method wraps send_message and makes sure that error is a keyword argument. """ send_message(message, message_id, handler, close=close, error=error, **additional_data)
Remove superflous line, remove duplicate data
Remove superflous line, remove duplicate data
Python
mit
axsemantics/rohrpost,axsemantics/rohrpost
import json def _send_message(message, content: dict, close: bool): message.reply_channel.send({ 'text': json.dumps(content), 'close': close, }) def send_message(message, message_id, handler, close=False, error=None, **additional_data): content = dict() if message_id: content['id'] = message_id if handler: content['type'] = handler if error: content['error'] = error if additional_data: content['data'] = additional_data - content.update(**additional_data) if not content: raise Exception('Cannot send an empty message.') _send_message(message, content, close=close) def send_success(message, message_id, handler, close=False, **additional_data): """ This method directly wraps send_message but checks the existence of id and type. """ if not message_id or not handler: raise Exception('You have to provide a message ID and handler on success messages.') send_message(message, message_id, handler, close=close, **additional_data) def send_error(message, message_id, handler, error, close=False, **additional_data): """ This method wraps send_message and makes sure that error is a keyword argument. """ send_message(message, message_id, handler, close=close, error=error, **additional_data)
Remove superflous line, remove duplicate data
## Code Before: import json def _send_message(message, content: dict, close: bool): message.reply_channel.send({ 'text': json.dumps(content), 'close': close, }) def send_message(message, message_id, handler, close=False, error=None, **additional_data): content = dict() if message_id: content['id'] = message_id if handler: content['type'] = handler if error: content['error'] = error if additional_data: content['data'] = additional_data content.update(**additional_data) if not content: raise Exception('Cannot send an empty message.') _send_message(message, content, close=close) def send_success(message, message_id, handler, close=False, **additional_data): """ This method directly wraps send_message but checks the existence of id and type. """ if not message_id or not handler: raise Exception('You have to provide a message ID and handler on success messages.') send_message(message, message_id, handler, close=close, **additional_data) def send_error(message, message_id, handler, error, close=False, **additional_data): """ This method wraps send_message and makes sure that error is a keyword argument. """ send_message(message, message_id, handler, close=close, error=error, **additional_data) ## Instruction: Remove superflous line, remove duplicate data ## Code After: import json def _send_message(message, content: dict, close: bool): message.reply_channel.send({ 'text': json.dumps(content), 'close': close, }) def send_message(message, message_id, handler, close=False, error=None, **additional_data): content = dict() if message_id: content['id'] = message_id if handler: content['type'] = handler if error: content['error'] = error if additional_data: content['data'] = additional_data if not content: raise Exception('Cannot send an empty message.') _send_message(message, content, close=close) def send_success(message, message_id, handler, close=False, **additional_data): """ This method directly wraps send_message but checks the existence of id and type. """ if not message_id or not handler: raise Exception('You have to provide a message ID and handler on success messages.') send_message(message, message_id, handler, close=close, **additional_data) def send_error(message, message_id, handler, error, close=False, **additional_data): """ This method wraps send_message and makes sure that error is a keyword argument. """ send_message(message, message_id, handler, close=close, error=error, **additional_data)
... if not content: ...
6b83217f422b46ef9cc4ef5aa124350eee825fe1
satchless/contrib/tax/flatgroups/models.py
satchless/contrib/tax/flatgroups/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ class TaxGroup(models.Model): name = models.CharField(_("group name"), max_length=100) rate = models.DecimalField(_("rate"), max_digits=4, decimal_places=2, help_text=_("Percentile rate of the tax.")) rate_name = models.CharField(_("name of the rate"), max_length=30, help_text=_("Name of the rate which will be" " displayed to the user.")) def __unicode__(self): return self.name class TaxedProductMixin(models.Model): tax_group = models.ForeignKey(TaxGroup, related_name='products', null=True)
from django.db import models from django.utils.translation import ugettext_lazy as _ class TaxGroup(models.Model): name = models.CharField(_("group name"), max_length=100) rate = models.DecimalField(_("rate"), max_digits=4, decimal_places=2, help_text=_("Percentile rate of the tax.")) rate_name = models.CharField(_("name of the rate"), max_length=30, help_text=_("Name of the rate which will be" " displayed to the user.")) def __unicode__(self): return self.name class TaxedProductMixin(models.Model): tax_group = models.ForeignKey(TaxGroup, related_name='products', null=True) class Meta: abstract = True
Fix - TaxedProductMixin is abstract model
Fix - TaxedProductMixin is abstract model
Python
bsd-3-clause
taedori81/satchless
from django.db import models from django.utils.translation import ugettext_lazy as _ class TaxGroup(models.Model): name = models.CharField(_("group name"), max_length=100) rate = models.DecimalField(_("rate"), max_digits=4, decimal_places=2, help_text=_("Percentile rate of the tax.")) rate_name = models.CharField(_("name of the rate"), max_length=30, help_text=_("Name of the rate which will be" " displayed to the user.")) def __unicode__(self): return self.name class TaxedProductMixin(models.Model): tax_group = models.ForeignKey(TaxGroup, related_name='products', null=True) + class Meta: + abstract = True
Fix - TaxedProductMixin is abstract model
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ class TaxGroup(models.Model): name = models.CharField(_("group name"), max_length=100) rate = models.DecimalField(_("rate"), max_digits=4, decimal_places=2, help_text=_("Percentile rate of the tax.")) rate_name = models.CharField(_("name of the rate"), max_length=30, help_text=_("Name of the rate which will be" " displayed to the user.")) def __unicode__(self): return self.name class TaxedProductMixin(models.Model): tax_group = models.ForeignKey(TaxGroup, related_name='products', null=True) ## Instruction: Fix - TaxedProductMixin is abstract model ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ class TaxGroup(models.Model): name = models.CharField(_("group name"), max_length=100) rate = models.DecimalField(_("rate"), max_digits=4, decimal_places=2, help_text=_("Percentile rate of the tax.")) rate_name = models.CharField(_("name of the rate"), max_length=30, help_text=_("Name of the rate which will be" " displayed to the user.")) def __unicode__(self): return self.name class TaxedProductMixin(models.Model): tax_group = models.ForeignKey(TaxGroup, related_name='products', null=True) class Meta: abstract = True
... null=True) class Meta: abstract = True ...
09d78bb23ffba9d1d709a3ba5cbabbe84a9b1978
server/macros/currency_usd_to_cad.py
server/macros/currency_usd_to_cad.py
import os import re import requests USD_TO_CAD = 1.3139 # backup def get_rate(): """Get USD to CAD rate.""" try: r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5) return float(r.text.split(',')[1]) except Exception: return USD_TO_CAD def usd_to_cad(item, **kwargs): """Convert USD to CAD.""" rate = get_rate() if os.environ.get('BEHAVE_TESTING'): rate = USD_TO_CAD def convert(match): usd = float(match.group(1)) cad = rate * usd return 'CAD %d' % cad item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html']) return item name = 'usd_to_cad' label = 'Convert USD to CAD' shortcut = 'd' callback = usd_to_cad desks = ['SPORTS DESK', 'POLITICS']
import os import re import requests USD_TO_CAD = 1.3139 # backup def get_rate(): """Get USD to CAD rate.""" try: r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5) return float(r.text.split(',')[1]) except Exception: return USD_TO_CAD def usd_to_cad(item, **kwargs): """Convert USD to CAD.""" rate = get_rate() if os.environ.get('BEHAVE_TESTING'): rate = USD_TO_CAD def convert(match): usd = float(match.group(1)) cad = rate * usd return 'CAD %d' % cad item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html']) return item name = 'usd_to_cad' label = 'Convert USD to CAD' shortcut = 'd' callback = usd_to_cad
Delete the desks settings for macro
fix(macro): Delete the desks settings for macro
Python
agpl-3.0
pavlovicnemanja/superdesk,amagdas/superdesk,verifiedpixel/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,plamut/superdesk,marwoodandrew/superdesk,petrjasek/superdesk,fritzSF/superdesk,petrjasek/superdesk-ntb,verifiedpixel/superdesk,marwoodandrew/superdesk,superdesk/superdesk-aap,verifiedpixel/superdesk,superdesk/superdesk,akintolga/superdesk,sivakuna-aap/superdesk,sivakuna-aap/superdesk,liveblog/superdesk,pavlovicnemanja92/superdesk,superdesk/superdesk-ntb,ancafarcas/superdesk,hlmnrmr/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,akintolga/superdesk,marwoodandrew/superdesk-aap,ioanpocol/superdesk,liveblog/superdesk,pavlovicnemanja/superdesk,darconny/superdesk,fritzSF/superdesk,pavlovicnemanja/superdesk,pavlovicnemanja92/superdesk,amagdas/superdesk,liveblog/superdesk,thnkloud9/superdesk,superdesk/superdesk,pavlovicnemanja/superdesk,plamut/superdesk,mdhaman/superdesk,darconny/superdesk,akintolga/superdesk-aap,amagdas/superdesk,liveblog/superdesk,gbbr/superdesk,petrjasek/superdesk,superdesk/superdesk-ntb,hlmnrmr/superdesk,verifiedpixel/superdesk,superdesk/superdesk-aap,superdesk/superdesk-aap,marwoodandrew/superdesk-aap,pavlovicnemanja92/superdesk,hlmnrmr/superdesk,ioanpocol/superdesk-ntb,marwoodandrew/superdesk-aap,mdhaman/superdesk-aap,ancafarcas/superdesk,verifiedpixel/superdesk,marwoodandrew/superdesk,petrjasek/superdesk-ntb,mdhaman/superdesk,fritzSF/superdesk,pavlovicnemanja92/superdesk,mdhaman/superdesk,akintolga/superdesk-aap,plamut/superdesk,petrjasek/superdesk-ntb,sjunaid/superdesk,akintolga/superdesk-aap,amagdas/superdesk,mdhaman/superdesk-aap,ioanpocol/superdesk,gbbr/superdesk,Aca-jov/superdesk,akintolga/superdesk,marwoodandrew/superdesk,plamut/superdesk,darconny/superdesk,marwoodandrew/superdesk,amagdas/superdesk,fritzSF/superdesk,ioanpocol/superdesk-ntb,sjunaid/superdesk,ancafarcas/superdesk,marwoodandrew/superdesk-aap,petrjasek/superdesk-ntb,mugurrus/superdesk,superdesk/superdesk-aap,gbbr/superdesk,Aca-jov/superdesk,thnkloud9/superdesk,superdesk/superdesk-ntb,superdesk/superdesk-ntb,Aca-jov/superdesk,superdesk/superdesk,thnkloud9/superdesk,akintolga/superdesk,ioanpocol/superdesk,akintolga/superdesk-aap,mugurrus/superdesk,sivakuna-aap/superdesk,mugurrus/superdesk,superdesk/superdesk,liveblog/superdesk,akintolga/superdesk,petrjasek/superdesk,fritzSF/superdesk,sjunaid/superdesk,plamut/superdesk,petrjasek/superdesk,pavlovicnemanja92/superdesk,ioanpocol/superdesk-ntb
import os import re import requests USD_TO_CAD = 1.3139 # backup def get_rate(): """Get USD to CAD rate.""" try: r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5) return float(r.text.split(',')[1]) except Exception: return USD_TO_CAD def usd_to_cad(item, **kwargs): """Convert USD to CAD.""" rate = get_rate() if os.environ.get('BEHAVE_TESTING'): rate = USD_TO_CAD def convert(match): usd = float(match.group(1)) cad = rate * usd return 'CAD %d' % cad item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html']) return item name = 'usd_to_cad' label = 'Convert USD to CAD' shortcut = 'd' callback = usd_to_cad - desks = ['SPORTS DESK', 'POLITICS']
Delete the desks settings for macro
## Code Before: import os import re import requests USD_TO_CAD = 1.3139 # backup def get_rate(): """Get USD to CAD rate.""" try: r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5) return float(r.text.split(',')[1]) except Exception: return USD_TO_CAD def usd_to_cad(item, **kwargs): """Convert USD to CAD.""" rate = get_rate() if os.environ.get('BEHAVE_TESTING'): rate = USD_TO_CAD def convert(match): usd = float(match.group(1)) cad = rate * usd return 'CAD %d' % cad item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html']) return item name = 'usd_to_cad' label = 'Convert USD to CAD' shortcut = 'd' callback = usd_to_cad desks = ['SPORTS DESK', 'POLITICS'] ## Instruction: Delete the desks settings for macro ## Code After: import os import re import requests USD_TO_CAD = 1.3139 # backup def get_rate(): """Get USD to CAD rate.""" try: r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5) return float(r.text.split(',')[1]) except Exception: return USD_TO_CAD def usd_to_cad(item, **kwargs): """Convert USD to CAD.""" rate = get_rate() if os.environ.get('BEHAVE_TESTING'): rate = USD_TO_CAD def convert(match): usd = float(match.group(1)) cad = rate * usd return 'CAD %d' % cad item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html']) return item name = 'usd_to_cad' label = 'Convert USD to CAD' shortcut = 'd' callback = usd_to_cad
// ... existing code ... callback = usd_to_cad // ... rest of the code ...
ad6670874f37c52f4a15f30e1ab2682bd81f40f8
python/helpers/profiler/yappi_profiler.py
python/helpers/profiler/yappi_profiler.py
import yappi class YappiProfile(object): """ Wrapper class that represents Yappi profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): yappi.start() def disable(self): yappi.stop() def create_stats(self): self.stats = yappi.convert2pstats(yappi.get_func_stats()).stats def getstats(self): if self.stats is None: self.create_stats() return self.stats def dump_stats(self, file): import marshal f = open(file, 'wb') marshal.dump(self.getstats(), f) f.close()
import yappi class YappiProfile(object): """ Wrapper class that represents Yappi profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): yappi.start() def disable(self): yappi.stop() def create_stats(self): self.stats = yappi.convert2pstats(yappi.get_func_stats()).stats def getstats(self): self.create_stats() return self.stats def dump_stats(self, file): import marshal f = open(file, 'wb') marshal.dump(self.getstats(), f) f.close()
Fix 2nd and more capturing snapshot (PY-15823).
Fix 2nd and more capturing snapshot (PY-15823).
Python
apache-2.0
adedayo/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,da1z/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,clumsy/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,fitermay/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,amith01994/intellij-community,diorcety/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,supersven/intellij-community,supersven/intellij-community,ibinti/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,supersven/intellij-community,asedunov/intellij-community,semonte/intellij-community,wreckJ/intellij-community,signed/intellij-community,adedayo/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,samthor/intellij-community,jagguli/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,caot/intellij-community,hurricup/intellij-community,retomerz/intellij-community,apixandru/intellij-community,allotria/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,kdwink/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,signed/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,da1z/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,slisson/intellij-community,ryano144/intellij-community,petteyg/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,slisson/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,xfournet/intellij-community,holmes/intellij-community,fnouama/intellij-community,fnouama/intellij-community,signed/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,signed/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,kool79/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,diorcety/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,retomerz/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,robovm/robovm-studio,da1z/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,apixandru/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,caot/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,kool79/intellij-community,samthor/intellij-community,ryano144/intellij-community,petteyg/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,da1z/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,samthor/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,petteyg/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,izonder/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,izonder/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,blademainer/intellij-community,semonte/intellij-community,Distrotech/intellij-community,signed/intellij-community,izonder/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,holmes/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,signed/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,izonder/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,samthor/intellij-community,supersven/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,slisson/intellij-community,adedayo/intellij-community,da1z/intellij-community,allotria/intellij-community,adedayo/intellij-community,holmes/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,samthor/intellij-community,blademainer/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,signed/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,diorcety/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,semonte/intellij-community,xfournet/intellij-community,robovm/robovm-studio,salguarnieri/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,FHannes/intellij-community,blademainer/intellij-community,signed/intellij-community,caot/intellij-community,slisson/intellij-community,Lekanich/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,da1z/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,holmes/intellij-community,caot/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,slisson/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,FHannes/intellij-community,amith01994/intellij-community,fnouama/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,izonder/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,asedunov/intellij-community,fnouama/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,holmes/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,caot/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,nicolargo/intellij-community,caot/intellij-community,blademainer/intellij-community,FHannes/intellij-community,apixandru/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,holmes/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,kdwink/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,Lekanich/intellij-community,kool79/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,vladmm/intellij-community,petteyg/intellij-community,signed/intellij-community,semonte/intellij-community,xfournet/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,fnouama/intellij-community,supersven/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,caot/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,allotria/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,ibinti/intellij-community,amith01994/intellij-community,semonte/intellij-community,retomerz/intellij-community,holmes/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,kdwink/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,semonte/intellij-community,xfournet/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,fitermay/intellij-community,ibinti/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,retomerz/intellij-community,clumsy/intellij-community,vladmm/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,retomerz/intellij-community,FHannes/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,kool79/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,supersven/intellij-community,apixandru/intellij-community,slisson/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,da1z/intellij-community,slisson/intellij-community,hurricup/intellij-community,robovm/robovm-studio,izonder/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,ahb0327/intellij-community,kool79/intellij-community,allotria/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,adedayo/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,clumsy/intellij-community,FHannes/intellij-community,FHannes/intellij-community,vladmm/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,kool79/intellij-community,hurricup/intellij-community,jagguli/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,jagguli/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,asedunov/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,apixandru/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,slisson/intellij-community,izonder/intellij-community,amith01994/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,caot/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,samthor/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,holmes/intellij-community,holmes/intellij-community,apixandru/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,semonte/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community
import yappi class YappiProfile(object): """ Wrapper class that represents Yappi profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): yappi.start() def disable(self): yappi.stop() def create_stats(self): self.stats = yappi.convert2pstats(yappi.get_func_stats()).stats def getstats(self): - if self.stats is None: - self.create_stats() + self.create_stats() return self.stats def dump_stats(self, file): import marshal f = open(file, 'wb') marshal.dump(self.getstats(), f) f.close()
Fix 2nd and more capturing snapshot (PY-15823).
## Code Before: import yappi class YappiProfile(object): """ Wrapper class that represents Yappi profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): yappi.start() def disable(self): yappi.stop() def create_stats(self): self.stats = yappi.convert2pstats(yappi.get_func_stats()).stats def getstats(self): if self.stats is None: self.create_stats() return self.stats def dump_stats(self, file): import marshal f = open(file, 'wb') marshal.dump(self.getstats(), f) f.close() ## Instruction: Fix 2nd and more capturing snapshot (PY-15823). ## Code After: import yappi class YappiProfile(object): """ Wrapper class that represents Yappi profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): yappi.start() def disable(self): yappi.stop() def create_stats(self): self.stats = yappi.convert2pstats(yappi.get_func_stats()).stats def getstats(self): self.create_stats() return self.stats def dump_stats(self, file): import marshal f = open(file, 'wb') marshal.dump(self.getstats(), f) f.close()
// ... existing code ... def getstats(self): self.create_stats() // ... rest of the code ...
722e975e8819b59d9d2f53627a5d37550ea09c55
tests/test_clean.py
tests/test_clean.py
from mergepurge import clean import pandas as pd import numpy as np t_data = pd.Series({'name': 'Timothy Testerosa III'}) t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols # Then, iterate over the names def test_clean_contact_name(): assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed
from mergepurge import clean import pandas as pd import numpy as np complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known)
Add tests for most functions in clean module
Add tests for most functions in clean module Iterate over the complete and parsed test data and confirm we can still produce the excepted output for most functions in clean.py.
Python
mit
mikecunha/mergepurge
from mergepurge import clean import pandas as pd import numpy as np - t_data = pd.Series({'name': 'Timothy Testerosa III'}) - t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') - # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols - # Then, iterate over the names + complete = pd.read_csv('complete_parsed.tsv', + sep='\t', encoding='utf-8', + dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) + + COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] + COMP_CONTACT_COLS = ['first', 'last'] + COMP_COMPANY_COLS = ['company'] + BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] + + partial = pd.read_csv('./incomplete.tsv', + sep='\t', encoding='utf-8', + dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): - assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed + # Re-parse approx 100 records + for _, test_record in complete.iterrows(): + known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), + test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) + parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) + assert parsed == known + + def test_parse_location_cols(): + for _, test_record in complete.iterrows(): + known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), + test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), + test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) + parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) + assert parsed == known + + + def test_parse_business_name(): + for _, test_record in complete.iterrows(): + known = test_record.get('aa_company', np.nan) + parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) + assert parsed == known + + + def test_build_matching_cols(): + known = complete[BUILT_COLS].head(10).copy() + built = clean.build_matching_cols(complete.head(10).copy(), + COMP_LOC_COLS, + COMP_CONTACT_COLS, + COMP_COMPANY_COLS) + assert all(built[BUILT_COLS] == known) +
Add tests for most functions in clean module
## Code Before: from mergepurge import clean import pandas as pd import numpy as np t_data = pd.Series({'name': 'Timothy Testerosa III'}) t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols # Then, iterate over the names def test_clean_contact_name(): assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed ## Instruction: Add tests for most functions in clean module ## Code After: from mergepurge import clean import pandas as pd import numpy as np complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known)
... complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) ... def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known) ...
7b4b2fcbcb9a95c07f09b71305afa0c5ce95fe99
tenant_schemas/routers.py
tenant_schemas/routers.py
from django.conf import settings class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def allow_syncdb(self, db, model): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from tenant_schemas.utils import get_public_schema_name, app_labels if connection.schema_name == get_public_schema_name(): if model._meta.app_label not in app_labels(settings.SHARED_APPS): return False else: if model._meta.app_label not in app_labels(settings.TENANT_APPS): return False return None
from django.conf import settings class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def allow_migrate(self, db, model): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from tenant_schemas.utils import get_public_schema_name, app_labels if connection.schema_name == get_public_schema_name(): if model._meta.app_label not in app_labels(settings.SHARED_APPS): return False else: if model._meta.app_label not in app_labels(settings.TENANT_APPS): return False return None def allow_syncdb(self, db, model): # allow_syncdb was changed to allow_migrate in django 1.7 return self.allow_migrate(db, model)
Add database router allow_migrate() for Django 1.7
Add database router allow_migrate() for Django 1.7
Python
mit
goodtune/django-tenant-schemas,Mobytes/django-tenant-schemas,kajarenc/django-tenant-schemas,honur/django-tenant-schemas,mcanaves/django-tenant-schemas,ArtProcessors/django-tenant-schemas,goodtune/django-tenant-schemas,ArtProcessors/django-tenant-schemas,bernardopires/django-tenant-schemas,bernardopires/django-tenant-schemas,pombredanne/django-tenant-schemas
from django.conf import settings class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from tenant_schemas.utils import get_public_schema_name, app_labels if connection.schema_name == get_public_schema_name(): if model._meta.app_label not in app_labels(settings.SHARED_APPS): return False else: if model._meta.app_label not in app_labels(settings.TENANT_APPS): return False return None + def allow_syncdb(self, db, model): + # allow_syncdb was changed to allow_migrate in django 1.7 + return self.allow_migrate(db, model) +
Add database router allow_migrate() for Django 1.7
## Code Before: from django.conf import settings class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def allow_syncdb(self, db, model): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from tenant_schemas.utils import get_public_schema_name, app_labels if connection.schema_name == get_public_schema_name(): if model._meta.app_label not in app_labels(settings.SHARED_APPS): return False else: if model._meta.app_label not in app_labels(settings.TENANT_APPS): return False return None ## Instruction: Add database router allow_migrate() for Django 1.7 ## Code After: from django.conf import settings class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def allow_migrate(self, db, model): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from tenant_schemas.utils import get_public_schema_name, app_labels if connection.schema_name == get_public_schema_name(): if model._meta.app_label not in app_labels(settings.SHARED_APPS): return False else: if model._meta.app_label not in app_labels(settings.TENANT_APPS): return False return None def allow_syncdb(self, db, model): # allow_syncdb was changed to allow_migrate in django 1.7 return self.allow_migrate(db, model)
// ... existing code ... def allow_migrate(self, db, model): # the imports below need to be done here else django <1.5 goes crazy // ... modified code ... return None def allow_syncdb(self, db, model): # allow_syncdb was changed to allow_migrate in django 1.7 return self.allow_migrate(db, model) // ... rest of the code ...
92aeffe058bfd724309ddcdbdab9226057074afe
masters/master.chromium.lkgr/master_source_cfg.py
masters/master.chromium.lkgr/master_source_cfg.py
from buildbot.changes.pb import PBChangeSource def Update(config, active_master, c): # Polls config.Master.trunk_url for changes c['change_source'].append(PBChangeSource())
from master.url_poller import URLPoller LKGR_URL = 'https://chromium-status.appspot.com/lkgr' def Update(config, active_master, c): c['change_source'].append( URLPoller(changeurl=LKGR_URL, pollInterval=300, category='lkgr', include_revision=True))
Switch master.chromium.lkgr to poll the chromium-status app.
Switch master.chromium.lkgr to poll the chromium-status app. Using a PBChangeSource is silly, opaque, and potentially dangerous. We already have a URLPoller for exactly this use-case (already in use by chromium.endure) so let's use it here too. This also has the advantage of making sure the LKGR waterfall picks up *all* updates to LKGR, including manual ones. [email protected], [email protected] BUG=366954 Review URL: https://codereview.chromium.org/255753002 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@266093 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
- from buildbot.changes.pb import PBChangeSource + from master.url_poller import URLPoller + + + LKGR_URL = 'https://chromium-status.appspot.com/lkgr' def Update(config, active_master, c): - # Polls config.Master.trunk_url for changes - c['change_source'].append(PBChangeSource()) + c['change_source'].append( + URLPoller(changeurl=LKGR_URL, pollInterval=300, + category='lkgr', include_revision=True))
Switch master.chromium.lkgr to poll the chromium-status app.
## Code Before: from buildbot.changes.pb import PBChangeSource def Update(config, active_master, c): # Polls config.Master.trunk_url for changes c['change_source'].append(PBChangeSource()) ## Instruction: Switch master.chromium.lkgr to poll the chromium-status app. ## Code After: from master.url_poller import URLPoller LKGR_URL = 'https://chromium-status.appspot.com/lkgr' def Update(config, active_master, c): c['change_source'].append( URLPoller(changeurl=LKGR_URL, pollInterval=300, category='lkgr', include_revision=True))
... from master.url_poller import URLPoller LKGR_URL = 'https://chromium-status.appspot.com/lkgr' ... def Update(config, active_master, c): c['change_source'].append( URLPoller(changeurl=LKGR_URL, pollInterval=300, category='lkgr', include_revision=True)) ...
b1bd07038b0c6a6d801e686372996b3478c71af9
iss/management/commands/upsert_iss_organizations.py
iss/management/commands/upsert_iss_organizations.py
import logging import os from django.core.management.base import BaseCommand import iss.salesforce import iss.utils logger = logging.getLogger(os.path.basename(__file__)) class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '-m', '--modified-within', type=int, metavar='n-days', default=7, help='upsert organizations for accounts modified within n-days') def handle(self, *args, **options): upsert_organizations_for_recently_modified_accounts( options['modified_within']) def upsert_organizations_for_recently_modified_accounts(since=7): """Upsert organizations for SF Accounts modified in last `since` days.""" logger.info('upserting orgs for accounts modified in last {since} days'. format(since=since)) recently_modified_accounts = ( iss.salesforce.Account.get_recently_modified_accounts(since=since)) iss.utils.upsert_organizations_for_accounts(recently_modified_accounts)
import logging import os from django.core.management.base import BaseCommand import iss.models import iss.salesforce import iss.utils logger = logging.getLogger(os.path.basename(__file__)) class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '-m', '--modified-within', type=int, metavar='n-days', default=7, help='upsert organizations for accounts modified within n-days') parser.add_argument( '-i', '--include-aashe-in-website', action='store_true', help='force AASHE exclude_from_website to be False') def handle(self, *args, **options): upsert_organizations_for_recently_modified_accounts( since=options['modified_within'], include_aashe_in_website=options['include_aashe_in_website']) def upsert_organizations_for_recently_modified_accounts( since=7, include_aashe_in_website=False): """Upsert organizations for SF Accounts modified in last `since` days. When `include_aashe_in_website` is true, set the `exclude_from_website` flag on the Organization representing AASHE to False (0, actually). (Added for the Hub project.) """ logger.info('upserting orgs for accounts modified in last {since} days'. format(since=since)) recently_modified_accounts = ( iss.salesforce.Account.get_recently_modified_accounts(since=since)) iss.utils.upsert_organizations_for_accounts(recently_modified_accounts) if include_aashe_in_website: aashe = iss.models.Organization.objects.get(org_name="AASHE") if aashe.exclude_from_website: aashe.exclude_from_website = 0 aashe.save()
Add --include-aashe-in-website flag to org upsert
Add --include-aashe-in-website flag to org upsert
Python
mit
AASHE/iss
import logging import os from django.core.management.base import BaseCommand + import iss.models import iss.salesforce import iss.utils logger = logging.getLogger(os.path.basename(__file__)) class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '-m', '--modified-within', type=int, metavar='n-days', default=7, help='upsert organizations for accounts modified within n-days') + parser.add_argument( + '-i', '--include-aashe-in-website', + action='store_true', + help='force AASHE exclude_from_website to be False') def handle(self, *args, **options): upsert_organizations_for_recently_modified_accounts( - options['modified_within']) + since=options['modified_within'], + include_aashe_in_website=options['include_aashe_in_website']) - def upsert_organizations_for_recently_modified_accounts(since=7): + def upsert_organizations_for_recently_modified_accounts( + since=7, include_aashe_in_website=False): - """Upsert organizations for SF Accounts modified in last `since` days.""" + """Upsert organizations for SF Accounts modified in last `since` days. + + When `include_aashe_in_website` is true, set the + `exclude_from_website` flag on the Organization representing AASHE + to False (0, actually). (Added for the Hub project.) + """ logger.info('upserting orgs for accounts modified in last {since} days'. format(since=since)) recently_modified_accounts = ( iss.salesforce.Account.get_recently_modified_accounts(since=since)) iss.utils.upsert_organizations_for_accounts(recently_modified_accounts) + if include_aashe_in_website: + aashe = iss.models.Organization.objects.get(org_name="AASHE") + if aashe.exclude_from_website: + aashe.exclude_from_website = 0 + aashe.save() +
Add --include-aashe-in-website flag to org upsert
## Code Before: import logging import os from django.core.management.base import BaseCommand import iss.salesforce import iss.utils logger = logging.getLogger(os.path.basename(__file__)) class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '-m', '--modified-within', type=int, metavar='n-days', default=7, help='upsert organizations for accounts modified within n-days') def handle(self, *args, **options): upsert_organizations_for_recently_modified_accounts( options['modified_within']) def upsert_organizations_for_recently_modified_accounts(since=7): """Upsert organizations for SF Accounts modified in last `since` days.""" logger.info('upserting orgs for accounts modified in last {since} days'. format(since=since)) recently_modified_accounts = ( iss.salesforce.Account.get_recently_modified_accounts(since=since)) iss.utils.upsert_organizations_for_accounts(recently_modified_accounts) ## Instruction: Add --include-aashe-in-website flag to org upsert ## Code After: import logging import os from django.core.management.base import BaseCommand import iss.models import iss.salesforce import iss.utils logger = logging.getLogger(os.path.basename(__file__)) class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '-m', '--modified-within', type=int, metavar='n-days', default=7, help='upsert organizations for accounts modified within n-days') parser.add_argument( '-i', '--include-aashe-in-website', action='store_true', help='force AASHE exclude_from_website to be False') def handle(self, *args, **options): upsert_organizations_for_recently_modified_accounts( since=options['modified_within'], include_aashe_in_website=options['include_aashe_in_website']) def upsert_organizations_for_recently_modified_accounts( since=7, include_aashe_in_website=False): """Upsert organizations for SF Accounts modified in last `since` days. When `include_aashe_in_website` is true, set the `exclude_from_website` flag on the Organization representing AASHE to False (0, actually). (Added for the Hub project.) """ logger.info('upserting orgs for accounts modified in last {since} days'. format(since=since)) recently_modified_accounts = ( iss.salesforce.Account.get_recently_modified_accounts(since=since)) iss.utils.upsert_organizations_for_accounts(recently_modified_accounts) if include_aashe_in_website: aashe = iss.models.Organization.objects.get(org_name="AASHE") if aashe.exclude_from_website: aashe.exclude_from_website = 0 aashe.save()
... import iss.models import iss.salesforce ... help='upsert organizations for accounts modified within n-days') parser.add_argument( '-i', '--include-aashe-in-website', action='store_true', help='force AASHE exclude_from_website to be False') ... upsert_organizations_for_recently_modified_accounts( since=options['modified_within'], include_aashe_in_website=options['include_aashe_in_website']) ... def upsert_organizations_for_recently_modified_accounts( since=7, include_aashe_in_website=False): """Upsert organizations for SF Accounts modified in last `since` days. When `include_aashe_in_website` is true, set the `exclude_from_website` flag on the Organization representing AASHE to False (0, actually). (Added for the Hub project.) """ logger.info('upserting orgs for accounts modified in last {since} days'. ... iss.utils.upsert_organizations_for_accounts(recently_modified_accounts) if include_aashe_in_website: aashe = iss.models.Organization.objects.get(org_name="AASHE") if aashe.exclude_from_website: aashe.exclude_from_website = 0 aashe.save() ...
17ab8c01a88bda8dba4aaa5e57c857babfeb9444
debtcollector/fixtures/disable.py
debtcollector/fixtures/disable.py
from __future__ import absolute_import import fixtures from debtcollector import _utils class DisableFixture(fixtures.Fixture): """Fixture that disables debtcollector triggered warnings. This does **not** disable warnings calls emitted by other libraries. This can be used like:: from debtcollector.fixtures import disable with disable.DisableFixture(): <some code that calls into depreciated code> """ def _setUp(self): self.addCleanup(setattr, _utils, "_enabled", True) _utils._enabled = False
import fixtures from debtcollector import _utils class DisableFixture(fixtures.Fixture): """Fixture that disables debtcollector triggered warnings. This does **not** disable warnings calls emitted by other libraries. This can be used like:: from debtcollector.fixtures import disable with disable.DisableFixture(): <some code that calls into depreciated code> """ def _setUp(self): self.addCleanup(setattr, _utils, "_enabled", True) _utils._enabled = False
Stop to use the __future__ module.
Stop to use the __future__ module. The __future__ module [1] was used in this context to ensure compatibility between python 2 and python 3. We previously dropped the support of python 2.7 [2] and now we only support python 3 so we don't need to continue to use this module and the imports listed below. Imports commonly used and their related PEPs: - `division` is related to PEP 238 [3] - `print_function` is related to PEP 3105 [4] - `unicode_literals` is related to PEP 3112 [5] - `with_statement` is related to PEP 343 [6] - `absolute_import` is related to PEP 328 [7] [1] https://docs.python.org/3/library/__future__.html [2] https://governance.openstack.org/tc/goals/selected/ussuri/drop-py27.html [3] https://www.python.org/dev/peps/pep-0238 [4] https://www.python.org/dev/peps/pep-3105 [5] https://www.python.org/dev/peps/pep-3112 [6] https://www.python.org/dev/peps/pep-0343 [7] https://www.python.org/dev/peps/pep-0328 Change-Id: I2b2f006e0ec145730bec843add4147345797b920
Python
apache-2.0
openstack/debtcollector
- - from __future__ import absolute_import import fixtures from debtcollector import _utils class DisableFixture(fixtures.Fixture): """Fixture that disables debtcollector triggered warnings. This does **not** disable warnings calls emitted by other libraries. This can be used like:: from debtcollector.fixtures import disable with disable.DisableFixture(): <some code that calls into depreciated code> """ def _setUp(self): self.addCleanup(setattr, _utils, "_enabled", True) _utils._enabled = False
Stop to use the __future__ module.
## Code Before: from __future__ import absolute_import import fixtures from debtcollector import _utils class DisableFixture(fixtures.Fixture): """Fixture that disables debtcollector triggered warnings. This does **not** disable warnings calls emitted by other libraries. This can be used like:: from debtcollector.fixtures import disable with disable.DisableFixture(): <some code that calls into depreciated code> """ def _setUp(self): self.addCleanup(setattr, _utils, "_enabled", True) _utils._enabled = False ## Instruction: Stop to use the __future__ module. ## Code After: import fixtures from debtcollector import _utils class DisableFixture(fixtures.Fixture): """Fixture that disables debtcollector triggered warnings. This does **not** disable warnings calls emitted by other libraries. This can be used like:: from debtcollector.fixtures import disable with disable.DisableFixture(): <some code that calls into depreciated code> """ def _setUp(self): self.addCleanup(setattr, _utils, "_enabled", True) _utils._enabled = False
// ... existing code ... // ... rest of the code ...
d3f72f3ded76fb49eedb0c93c58211aab0231b97
jetson/networkTable.py
jetson/networkTable.py
import time from networktables import NetworkTables rioIP = '10.58.06.2' # this shouldn't change tableName = 'JetsonToRio' # should be same in rio's java NT program def initTable(): NetworkTables.initialize(server=rioIP) return NetworkTables.getTable(tableName) def pushVals(table, jetsonVals): table.putNumberArray(jetsonVals)
import time from networktables import NetworkTables def initTable(): NetworkTables.initialize(server=rioIP) return NetworkTables.getTable(tableName) def pushVals(table, jetsonVals): table.putNumberArray(jetsonVals) class NetworkInterface(object): """docstring for NetworkInterface.""" rioIP = '10.58.06.2' # this shouldn't change tableName = 'SmartDashboard' # should be same in rio's java NT program table = None def __init__(self): super(NetworkInterface, self).__init__() NetworkTables.initialize(server=rioIP) self.table = NetworkTables.getTable(tableName) def pushVals(jetsonVals): table.putNumberArray("JetsonVals",jetsonVals)
Put network table interface in class format
Put network table interface in class format
Python
mit
frc5806/Steamworks,frc5806/Steamworks,frc5806/Steamworks,frc5806/Steamworks
import time from networktables import NetworkTables + - rioIP = '10.58.06.2' # this shouldn't change - tableName = 'JetsonToRio' # should be same in rio's java NT program def initTable(): NetworkTables.initialize(server=rioIP) return NetworkTables.getTable(tableName) def pushVals(table, jetsonVals): table.putNumberArray(jetsonVals) - - + + class NetworkInterface(object): + """docstring for NetworkInterface.""" + rioIP = '10.58.06.2' # this shouldn't change + tableName = 'SmartDashboard' # should be same in rio's java NT program + table = None + + def __init__(self): + super(NetworkInterface, self).__init__() + NetworkTables.initialize(server=rioIP) + self.table = NetworkTables.getTable(tableName) + + def pushVals(jetsonVals): + table.putNumberArray("JetsonVals",jetsonVals) +
Put network table interface in class format
## Code Before: import time from networktables import NetworkTables rioIP = '10.58.06.2' # this shouldn't change tableName = 'JetsonToRio' # should be same in rio's java NT program def initTable(): NetworkTables.initialize(server=rioIP) return NetworkTables.getTable(tableName) def pushVals(table, jetsonVals): table.putNumberArray(jetsonVals) ## Instruction: Put network table interface in class format ## Code After: import time from networktables import NetworkTables def initTable(): NetworkTables.initialize(server=rioIP) return NetworkTables.getTable(tableName) def pushVals(table, jetsonVals): table.putNumberArray(jetsonVals) class NetworkInterface(object): """docstring for NetworkInterface.""" rioIP = '10.58.06.2' # this shouldn't change tableName = 'SmartDashboard' # should be same in rio's java NT program table = None def __init__(self): super(NetworkInterface, self).__init__() NetworkTables.initialize(server=rioIP) self.table = NetworkTables.getTable(tableName) def pushVals(jetsonVals): table.putNumberArray("JetsonVals",jetsonVals)
// ... existing code ... // ... modified code ... table.putNumberArray(jetsonVals) class NetworkInterface(object): """docstring for NetworkInterface.""" rioIP = '10.58.06.2' # this shouldn't change tableName = 'SmartDashboard' # should be same in rio's java NT program table = None def __init__(self): super(NetworkInterface, self).__init__() NetworkTables.initialize(server=rioIP) self.table = NetworkTables.getTable(tableName) def pushVals(jetsonVals): table.putNumberArray("JetsonVals",jetsonVals) // ... rest of the code ...
58cd5650900a426363c7e0b8fb9bf7d2f881f95b
quickadmin/config.py
quickadmin/config.py
from distutils.version import StrictVersion from django import get_version QADMIN_DEFAULT_EXCLUDES = [ 'django.contrib.sessions', 'django.contrib.admin', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.comments', 'django.contrib.flatpages', 'django.contrib.sitemaps', 'south', ] USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
from distutils.version import StrictVersion from django import get_version QADMIN_DEFAULT_EXCLUDES = [ 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.comments', 'django.contrib.flatpages', 'django.contrib.sitemaps', 'south', ] USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
Add Django auth as a stock application
Add Django auth as a stock application
Python
mit
zniper/django-quickadmin
from distutils.version import StrictVersion from django import get_version QADMIN_DEFAULT_EXCLUDES = [ 'django.contrib.sessions', + 'django.contrib.auth', 'django.contrib.admin', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.comments', 'django.contrib.flatpages', 'django.contrib.sitemaps', 'south', ] USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
Add Django auth as a stock application
## Code Before: from distutils.version import StrictVersion from django import get_version QADMIN_DEFAULT_EXCLUDES = [ 'django.contrib.sessions', 'django.contrib.admin', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.comments', 'django.contrib.flatpages', 'django.contrib.sitemaps', 'south', ] USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7')) ## Instruction: Add Django auth as a stock application ## Code After: from distutils.version import StrictVersion from django import get_version QADMIN_DEFAULT_EXCLUDES = [ 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.comments', 'django.contrib.flatpages', 'django.contrib.sitemaps', 'south', ] USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
// ... existing code ... 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', // ... rest of the code ...
d37555f71d61aa2f40b6d959833d7dd08bc269d4
tmserver/jtui/__init__.py
tmserver/jtui/__init__.py
import logging from flask import Blueprint, current_app, jsonify from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) def register_error(cls): """Decorator to register exception classes as errors that can be serialized to JSON""" @jtui.errorhandler(cls) def handle_invalid_usage(error): current_app.logger.error(error) response = jsonify(error=error) response.status_code = error.status_code return response return cls register_http_error_classes(jtui.errorhandler) import tmserver.jtui.api
import logging from flask import Blueprint jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) import tmserver.jtui.api
Remove jtui blueprint specific error handler
Remove jtui blueprint specific error handler
Python
agpl-3.0
TissueMAPS/TmServer
import logging - from flask import Blueprint, current_app, jsonify + from flask import Blueprint - from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) - - def register_error(cls): - """Decorator to register exception classes as errors that can be - serialized to JSON""" - @jtui.errorhandler(cls) - def handle_invalid_usage(error): - current_app.logger.error(error) - response = jsonify(error=error) - response.status_code = error.status_code - return response - return cls - - - register_http_error_classes(jtui.errorhandler) - - import tmserver.jtui.api
Remove jtui blueprint specific error handler
## Code Before: import logging from flask import Blueprint, current_app, jsonify from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) def register_error(cls): """Decorator to register exception classes as errors that can be serialized to JSON""" @jtui.errorhandler(cls) def handle_invalid_usage(error): current_app.logger.error(error) response = jsonify(error=error) response.status_code = error.status_code return response return cls register_http_error_classes(jtui.errorhandler) import tmserver.jtui.api ## Instruction: Remove jtui blueprint specific error handler ## Code After: import logging from flask import Blueprint jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) import tmserver.jtui.api
// ... existing code ... import logging from flask import Blueprint // ... modified code ... import tmserver.jtui.api // ... rest of the code ...
7dbe03fcbbbf57ec9380eb9a1c24b5fd9f6594e2
zinnia/tests/utils.py
zinnia/tests/utils.py
"""Utils for Zinnia's tests""" import cStringIO from xmlrpclib import Transport from django.test.client import Client class TestTransport(Transport): """Handles connections to XML-RPC server through Django test client.""" def __init__(self, *args, **kwargs): Transport.__init__(self, *args, **kwargs) self.client = Client() def request(self, host, handler, request_body, verbose=0): self.verbose = verbose response = self.client.post(handler, request_body, content_type="text/xml") res = cStringIO.StringIO(response.content) res.seek(0) return self.parse_response(res)
"""Utils for Zinnia's tests""" import StringIO from xmlrpclib import Transport from django.test.client import Client class TestTransport(Transport): """Handles connections to XML-RPC server through Django test client.""" def __init__(self, *args, **kwargs): Transport.__init__(self, *args, **kwargs) self.client = Client() def request(self, host, handler, request_body, verbose=0): self.verbose = verbose response = self.client.post(handler, request_body, content_type="text/xml") res = StringIO.StringIO(response.content) setattr(res, 'getheader', lambda *args: '') # For Python >= 2.7 res.seek(0) return self.parse_response(res)
Fix tests on Python2.7 xmlrpclib.Transport.parse_response calls 'getheader' on its response input
Fix tests on Python2.7 xmlrpclib.Transport.parse_response calls 'getheader' on its response input
Python
bsd-3-clause
1844144/django-blog-zinnia,petecummings/django-blog-zinnia,jfdsmit/django-blog-zinnia,Maplecroft/django-blog-zinnia,marctc/django-blog-zinnia,Zopieux/django-blog-zinnia,1844144/django-blog-zinnia,petecummings/django-blog-zinnia,bywbilly/django-blog-zinnia,Zopieux/django-blog-zinnia,bywbilly/django-blog-zinnia,extertioner/django-blog-zinnia,aorzh/django-blog-zinnia,Fantomas42/django-blog-zinnia,aorzh/django-blog-zinnia,Maplecroft/django-blog-zinnia,ghachey/django-blog-zinnia,ghachey/django-blog-zinnia,jfdsmit/django-blog-zinnia,marctc/django-blog-zinnia,marctc/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,jfdsmit/django-blog-zinnia,ZuluPro/django-blog-zinnia,bywbilly/django-blog-zinnia,jfdsmit/django-blog-zinnia,extertioner/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,dapeng0802/django-blog-zinnia,Fantomas42/django-blog-zinnia,ZuluPro/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,ZuluPro/django-blog-zinnia,aorzh/django-blog-zinnia,1844144/django-blog-zinnia,dapeng0802/django-blog-zinnia,petecummings/django-blog-zinnia
"""Utils for Zinnia's tests""" - import cStringIO + import StringIO from xmlrpclib import Transport from django.test.client import Client class TestTransport(Transport): """Handles connections to XML-RPC server through Django test client.""" def __init__(self, *args, **kwargs): Transport.__init__(self, *args, **kwargs) self.client = Client() def request(self, host, handler, request_body, verbose=0): self.verbose = verbose response = self.client.post(handler, request_body, content_type="text/xml") - res = cStringIO.StringIO(response.content) + res = StringIO.StringIO(response.content) + setattr(res, 'getheader', lambda *args: '') # For Python >= 2.7 res.seek(0) return self.parse_response(res)
Fix tests on Python2.7 xmlrpclib.Transport.parse_response calls 'getheader' on its response input
## Code Before: """Utils for Zinnia's tests""" import cStringIO from xmlrpclib import Transport from django.test.client import Client class TestTransport(Transport): """Handles connections to XML-RPC server through Django test client.""" def __init__(self, *args, **kwargs): Transport.__init__(self, *args, **kwargs) self.client = Client() def request(self, host, handler, request_body, verbose=0): self.verbose = verbose response = self.client.post(handler, request_body, content_type="text/xml") res = cStringIO.StringIO(response.content) res.seek(0) return self.parse_response(res) ## Instruction: Fix tests on Python2.7 xmlrpclib.Transport.parse_response calls 'getheader' on its response input ## Code After: """Utils for Zinnia's tests""" import StringIO from xmlrpclib import Transport from django.test.client import Client class TestTransport(Transport): """Handles connections to XML-RPC server through Django test client.""" def __init__(self, *args, **kwargs): Transport.__init__(self, *args, **kwargs) self.client = Client() def request(self, host, handler, request_body, verbose=0): self.verbose = verbose response = self.client.post(handler, request_body, content_type="text/xml") res = StringIO.StringIO(response.content) setattr(res, 'getheader', lambda *args: '') # For Python >= 2.7 res.seek(0) return self.parse_response(res)
... """Utils for Zinnia's tests""" import StringIO from xmlrpclib import Transport ... content_type="text/xml") res = StringIO.StringIO(response.content) setattr(res, 'getheader', lambda *args: '') # For Python >= 2.7 res.seek(0) ...
bda36d78984ee8b4701315170f004ed6955072ac
common/widgets.py
common/widgets.py
from django.forms import TextInput, Widget from django.forms.utils import flatatt from django.utils.html import format_html class PhoneNumberInput(TextInput): input_type = 'tel' class FileFieldLink(Widget): """ Widget that displays file from FileField as a link to the uploaded data. """ def render(self, name, value, attrs=None): return format_html('<a{}><p{}>{}</p></a>', flatatt({'href': value.url}), flatatt({'class': 'form-control-static'}), value.name)
from django.forms import TextInput, Widget from django.forms.utils import flatatt from django.utils.html import format_html from django.utils.translation import ugettext as _ class PhoneNumberInput(TextInput): input_type = 'tel' class FileFieldLink(Widget): """ Widget that displays file from FileField as a link to the uploaded data. """ def render(self, name, value, attrs=None): outer_attrs = {} if attrs: outer_attrs.update(attrs) if value: outer_attrs['href'] = value.url return format_html('<a{}><p{}>{}</p></a>', flatatt(outer_attrs), flatatt({'class': 'form-control-static'}), value.name) else: outer_attrs['class'] = 'form-control-static' return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
Handle "no file uploaded" situation in FileFieldLink
Handle "no file uploaded" situation in FileFieldLink Fixes ValueErrors when user has no identity card uploaded
Python
agpl-3.0
m4tx/egielda,m4tx/egielda,m4tx/egielda
from django.forms import TextInput, Widget from django.forms.utils import flatatt from django.utils.html import format_html + + from django.utils.translation import ugettext as _ class PhoneNumberInput(TextInput): input_type = 'tel' class FileFieldLink(Widget): """ Widget that displays file from FileField as a link to the uploaded data. """ def render(self, name, value, attrs=None): + outer_attrs = {} + if attrs: + outer_attrs.update(attrs) + if value: + outer_attrs['href'] = value.url - return format_html('<a{}><p{}>{}</p></a>', + return format_html('<a{}><p{}>{}</p></a>', - flatatt({'href': value.url}), + flatatt(outer_attrs), - flatatt({'class': 'form-control-static'}), + flatatt({'class': 'form-control-static'}), - value.name) + value.name) + else: + outer_attrs['class'] = 'form-control-static' + return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
Handle "no file uploaded" situation in FileFieldLink
## Code Before: from django.forms import TextInput, Widget from django.forms.utils import flatatt from django.utils.html import format_html class PhoneNumberInput(TextInput): input_type = 'tel' class FileFieldLink(Widget): """ Widget that displays file from FileField as a link to the uploaded data. """ def render(self, name, value, attrs=None): return format_html('<a{}><p{}>{}</p></a>', flatatt({'href': value.url}), flatatt({'class': 'form-control-static'}), value.name) ## Instruction: Handle "no file uploaded" situation in FileFieldLink ## Code After: from django.forms import TextInput, Widget from django.forms.utils import flatatt from django.utils.html import format_html from django.utils.translation import ugettext as _ class PhoneNumberInput(TextInput): input_type = 'tel' class FileFieldLink(Widget): """ Widget that displays file from FileField as a link to the uploaded data. """ def render(self, name, value, attrs=None): outer_attrs = {} if attrs: outer_attrs.update(attrs) if value: outer_attrs['href'] = value.url return format_html('<a{}><p{}>{}</p></a>', flatatt(outer_attrs), flatatt({'class': 'form-control-static'}), value.name) else: outer_attrs['class'] = 'form-control-static' return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
... from django.utils.html import format_html from django.utils.translation import ugettext as _ ... def render(self, name, value, attrs=None): outer_attrs = {} if attrs: outer_attrs.update(attrs) if value: outer_attrs['href'] = value.url return format_html('<a{}><p{}>{}</p></a>', flatatt(outer_attrs), flatatt({'class': 'form-control-static'}), value.name) else: outer_attrs['class'] = 'form-control-static' return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded")) ...
25ba4aea17d869022682fd70d4c3ccbade19955f
openfisca_country_template/situation_examples/__init__.py
openfisca_country_template/situation_examples/__init__.py
"""This file provides a function to load json example situations.""" import json import os DIR_PATH = os.path.dirname(os.path.abspath(__file__)) def parse(file_name): """Load json example situations.""" file_path = os.path.join(DIR_PATH, file_name) with open(file_path, "r") as file: return json.loads(file.read()) single = parse("single.json") couple = parse("couple.json")
"""This file provides a function to load json example situations.""" import json import os DIR_PATH = os.path.dirname(os.path.abspath(__file__)) def parse(file_name): """Load json example situations.""" file_path = os.path.join(DIR_PATH, file_name) with open(file_path, "r", encoding="utf8") as file: return json.loads(file.read()) single = parse("single.json") couple = parse("couple.json")
Add encoding to open file
Add encoding to open file
Python
agpl-3.0
openfisca/country-template,openfisca/country-template
"""This file provides a function to load json example situations.""" import json import os DIR_PATH = os.path.dirname(os.path.abspath(__file__)) def parse(file_name): """Load json example situations.""" file_path = os.path.join(DIR_PATH, file_name) - with open(file_path, "r") as file: + with open(file_path, "r", encoding="utf8") as file: return json.loads(file.read()) single = parse("single.json") couple = parse("couple.json")
Add encoding to open file
## Code Before: """This file provides a function to load json example situations.""" import json import os DIR_PATH = os.path.dirname(os.path.abspath(__file__)) def parse(file_name): """Load json example situations.""" file_path = os.path.join(DIR_PATH, file_name) with open(file_path, "r") as file: return json.loads(file.read()) single = parse("single.json") couple = parse("couple.json") ## Instruction: Add encoding to open file ## Code After: """This file provides a function to load json example situations.""" import json import os DIR_PATH = os.path.dirname(os.path.abspath(__file__)) def parse(file_name): """Load json example situations.""" file_path = os.path.join(DIR_PATH, file_name) with open(file_path, "r", encoding="utf8") as file: return json.loads(file.read()) single = parse("single.json") couple = parse("couple.json")
# ... existing code ... file_path = os.path.join(DIR_PATH, file_name) with open(file_path, "r", encoding="utf8") as file: return json.loads(file.read()) # ... rest of the code ...
d2bec26a63877e31e2d887e0879a8fd197741147
thinc/t2t.py
thinc/t2t.py
from __future__ import unicode_literals from .neural._classes.convolution import ExtractWindow # noqa: F401 from .neural._classes.attention import ParametricAttention # noqa: F401 from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401
from __future__ import unicode_literals from .neural._classes.convolution import ExtractWindow # noqa: F401 from .neural._classes.attention import ParametricAttention # noqa: F401 from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401 from .neural._classes.multiheaded_attention import MultiHeadedAttention from .neural._classes.multiheaded_attention import prepare_self_attention
Add import links for MultiHeadedAttention and prepare_self_attention
Add import links for MultiHeadedAttention and prepare_self_attention
Python
mit
spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc
from __future__ import unicode_literals from .neural._classes.convolution import ExtractWindow # noqa: F401 from .neural._classes.attention import ParametricAttention # noqa: F401 from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401 + from .neural._classes.multiheaded_attention import MultiHeadedAttention + from .neural._classes.multiheaded_attention import prepare_self_attention
Add import links for MultiHeadedAttention and prepare_self_attention
## Code Before: from __future__ import unicode_literals from .neural._classes.convolution import ExtractWindow # noqa: F401 from .neural._classes.attention import ParametricAttention # noqa: F401 from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401 ## Instruction: Add import links for MultiHeadedAttention and prepare_self_attention ## Code After: from __future__ import unicode_literals from .neural._classes.convolution import ExtractWindow # noqa: F401 from .neural._classes.attention import ParametricAttention # noqa: F401 from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401 from .neural._classes.multiheaded_attention import MultiHeadedAttention from .neural._classes.multiheaded_attention import prepare_self_attention
// ... existing code ... from .neural._classes.rnn import LSTM, BiLSTM # noqa: F401 from .neural._classes.multiheaded_attention import MultiHeadedAttention from .neural._classes.multiheaded_attention import prepare_self_attention // ... rest of the code ...
1c494f21cde384b611998d237baa430384dcefbc
Challenges/chall_22.py
Challenges/chall_22.py
''' Uses Anaconda environment with Pillow for image processing - Python 3.7, numpy, and Pillow (PIL) - Run `source activate imgPIL`, `python chall_22.py` ''' from PIL import Image import numpy as np def main(): ''' Hint: emulate (picture of joystick) <!-- or maybe white.gif would be more bright --> http://www.pythonchallenge.com/pc/hex/white.gif shows a 200x200 black square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' white = Image.open(img_path) hist = white.histogram() print(len(hist)) return 0 if __name__ == '__main__': main()
''' Uses Anaconda environment with Pillow for image processing - Python 3.7, numpy, and Pillow (PIL) - Run `source activate imgPIL`, `python chall_22.py` ''' from PIL import Image, ImageDraw def main(): ''' Hint: emulate (picture of joystick) <!-- or maybe white.gif would be more bright --> http://www.pythonchallenge.com/pc/hex/white.gif shows a 200x200 black square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' with Image.open(img_path) as gif: hist = gif.histogram() # 1 pixel in hist bin 8 (0-255) print(hist.index(1)) data = list(gif.getdata()) print(data.index(8)) # 20100 return 0 if __name__ == '__main__': main()
Refactor image open to with block
Refactor image open to with block
Python
mit
HKuz/PythonChallenge
''' Uses Anaconda environment with Pillow for image processing - Python 3.7, numpy, and Pillow (PIL) - Run `source activate imgPIL`, `python chall_22.py` ''' - from PIL import Image + from PIL import Image, ImageDraw - import numpy as np def main(): ''' Hint: emulate (picture of joystick) <!-- or maybe white.gif would be more bright --> http://www.pythonchallenge.com/pc/hex/white.gif shows a 200x200 black square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' - white = Image.open(img_path) - hist = white.histogram() - print(len(hist)) + with Image.open(img_path) as gif: + hist = gif.histogram() # 1 pixel in hist bin 8 (0-255) + print(hist.index(1)) + data = list(gif.getdata()) + print(data.index(8)) # 20100 return 0 if __name__ == '__main__': main()
Refactor image open to with block
## Code Before: ''' Uses Anaconda environment with Pillow for image processing - Python 3.7, numpy, and Pillow (PIL) - Run `source activate imgPIL`, `python chall_22.py` ''' from PIL import Image import numpy as np def main(): ''' Hint: emulate (picture of joystick) <!-- or maybe white.gif would be more bright --> http://www.pythonchallenge.com/pc/hex/white.gif shows a 200x200 black square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' white = Image.open(img_path) hist = white.histogram() print(len(hist)) return 0 if __name__ == '__main__': main() ## Instruction: Refactor image open to with block ## Code After: ''' Uses Anaconda environment with Pillow for image processing - Python 3.7, numpy, and Pillow (PIL) - Run `source activate imgPIL`, `python chall_22.py` ''' from PIL import Image, ImageDraw def main(): ''' Hint: emulate (picture of joystick) <!-- or maybe white.gif would be more bright --> http://www.pythonchallenge.com/pc/hex/white.gif shows a 200x200 black square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' with Image.open(img_path) as gif: hist = gif.histogram() # 1 pixel in hist bin 8 (0-255) print(hist.index(1)) data = list(gif.getdata()) print(data.index(8)) # 20100 return 0 if __name__ == '__main__': main()
# ... existing code ... from PIL import Image, ImageDraw # ... modified code ... img_path = './joystick_chall_22/white.gif' with Image.open(img_path) as gif: hist = gif.histogram() # 1 pixel in hist bin 8 (0-255) print(hist.index(1)) data = list(gif.getdata()) print(data.index(8)) # 20100 # ... rest of the code ...
c10badab9b93eb021b1942475c681042292c182c
scrapi/harvesters/boise_state.py
scrapi/harvesters/boise_state.py
''' Harvester for the ScholarWorks for the SHARE project Example API call: http://scholarworks.boisestate.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc ''' from __future__ import unicode_literals from scrapi.base import OAIHarvester class Boise_stateHarvester(OAIHarvester): short_name = 'boise_state' long_name = 'ScholarWorks' url = 'http://scholarworks.boisestate.edu' base_url = 'http://scholarworks.boisestate.edu/do/oai/' property_list = ['source', 'identifier', 'type', 'date', 'setSpec', 'publisher', 'rights', 'format'] timezone_granularity = True
''' Harvester for the ScholarWorks for the SHARE project Example API call: http://scholarworks.boisestate.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc ''' from __future__ import unicode_literals from scrapi.base import OAIHarvester class Boise_stateHarvester(OAIHarvester): short_name = 'boise_state' long_name = 'Boise State University ScholarWorks' url = 'http://scholarworks.boisestate.edu' base_url = 'http://scholarworks.boisestate.edu/do/oai/' property_list = ['source', 'identifier', 'type', 'date', 'setSpec', 'publisher', 'rights', 'format'] timezone_granularity = True
Update longname for Boise state
Update longname for Boise state
Python
apache-2.0
CenterForOpenScience/scrapi,CenterForOpenScience/scrapi
''' Harvester for the ScholarWorks for the SHARE project Example API call: http://scholarworks.boisestate.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc ''' from __future__ import unicode_literals from scrapi.base import OAIHarvester class Boise_stateHarvester(OAIHarvester): short_name = 'boise_state' - long_name = 'ScholarWorks' + long_name = 'Boise State University ScholarWorks' url = 'http://scholarworks.boisestate.edu' base_url = 'http://scholarworks.boisestate.edu/do/oai/' property_list = ['source', 'identifier', 'type', 'date', 'setSpec', 'publisher', 'rights', 'format'] timezone_granularity = True
Update longname for Boise state
## Code Before: ''' Harvester for the ScholarWorks for the SHARE project Example API call: http://scholarworks.boisestate.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc ''' from __future__ import unicode_literals from scrapi.base import OAIHarvester class Boise_stateHarvester(OAIHarvester): short_name = 'boise_state' long_name = 'ScholarWorks' url = 'http://scholarworks.boisestate.edu' base_url = 'http://scholarworks.boisestate.edu/do/oai/' property_list = ['source', 'identifier', 'type', 'date', 'setSpec', 'publisher', 'rights', 'format'] timezone_granularity = True ## Instruction: Update longname for Boise state ## Code After: ''' Harvester for the ScholarWorks for the SHARE project Example API call: http://scholarworks.boisestate.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc ''' from __future__ import unicode_literals from scrapi.base import OAIHarvester class Boise_stateHarvester(OAIHarvester): short_name = 'boise_state' long_name = 'Boise State University ScholarWorks' url = 'http://scholarworks.boisestate.edu' base_url = 'http://scholarworks.boisestate.edu/do/oai/' property_list = ['source', 'identifier', 'type', 'date', 'setSpec', 'publisher', 'rights', 'format'] timezone_granularity = True
// ... existing code ... short_name = 'boise_state' long_name = 'Boise State University ScholarWorks' url = 'http://scholarworks.boisestate.edu' // ... rest of the code ...
1690959502e2951920e52a0832e6571144bab6a8
_lib/wordpress_faq_processor.py
_lib/wordpress_faq_processor.py
import sys import json import os.path import requests def posts_at_url(url): current_page = 1 max_page = sys.maxint while current_page <= max_page: url = os.path.expandvars(url) resp = requests.get(url, params={'page': current_page, 'count': '-1'}) results = json.loads(resp.content) current_page += 1 max_page = results['pages'] for p in results['posts']: yield p def documents(name, url, **kwargs): for post in posts_at_url(url): yield process_post(post) def process_post(post): post['_id'] = post['slug'] names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang', 'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term', 'utm_content', 'faq'] for name in names: if name in post['custom_fields']: post[name] = post['custom_fields'][name] if 'taxonomy_fj_tag' in post: post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']] del post['custom_fields'] return post
import sys import json import os.path import requests def posts_at_url(url): current_page = 1 max_page = sys.maxint while current_page <= max_page: url = os.path.expandvars(url) resp = requests.get(url, params={'page': current_page, 'count': '-1'}) results = json.loads(resp.content) current_page += 1 max_page = results['pages'] for p in results['posts']: yield p def documents(name, url, **kwargs): for post in posts_at_url(url): yield process_post(post) def process_post(post): post['_id'] = post['slug'] names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang', 'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term', 'utm_content', 'faq'] for name in names: if name in post['custom_fields']: post[name] = post['custom_fields'][name] if 'taxonomy_fj_tag' in post: post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']] del post['custom_fields'] return {'_index': 'content', '_type': 'faq', '_id': post['slug'], '_source': post}
Change faq processor to bulk index
Change faq processor to bulk index
Python
cc0-1.0
kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh
import sys import json import os.path import requests def posts_at_url(url): current_page = 1 max_page = sys.maxint while current_page <= max_page: url = os.path.expandvars(url) resp = requests.get(url, params={'page': current_page, 'count': '-1'}) results = json.loads(resp.content) current_page += 1 max_page = results['pages'] for p in results['posts']: yield p def documents(name, url, **kwargs): for post in posts_at_url(url): yield process_post(post) def process_post(post): post['_id'] = post['slug'] names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang', 'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term', 'utm_content', 'faq'] for name in names: if name in post['custom_fields']: post[name] = post['custom_fields'][name] if 'taxonomy_fj_tag' in post: post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']] del post['custom_fields'] - return post + return {'_index': 'content', + '_type': 'faq', + '_id': post['slug'], + '_source': post}
Change faq processor to bulk index
## Code Before: import sys import json import os.path import requests def posts_at_url(url): current_page = 1 max_page = sys.maxint while current_page <= max_page: url = os.path.expandvars(url) resp = requests.get(url, params={'page': current_page, 'count': '-1'}) results = json.loads(resp.content) current_page += 1 max_page = results['pages'] for p in results['posts']: yield p def documents(name, url, **kwargs): for post in posts_at_url(url): yield process_post(post) def process_post(post): post['_id'] = post['slug'] names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang', 'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term', 'utm_content', 'faq'] for name in names: if name in post['custom_fields']: post[name] = post['custom_fields'][name] if 'taxonomy_fj_tag' in post: post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']] del post['custom_fields'] return post ## Instruction: Change faq processor to bulk index ## Code After: import sys import json import os.path import requests def posts_at_url(url): current_page = 1 max_page = sys.maxint while current_page <= max_page: url = os.path.expandvars(url) resp = requests.get(url, params={'page': current_page, 'count': '-1'}) results = json.loads(resp.content) current_page += 1 max_page = results['pages'] for p in results['posts']: yield p def documents(name, url, **kwargs): for post in posts_at_url(url): yield process_post(post) def process_post(post): post['_id'] = post['slug'] names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang', 'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term', 'utm_content', 'faq'] for name in names: if name in post['custom_fields']: post[name] = post['custom_fields'][name] if 'taxonomy_fj_tag' in post: post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']] del post['custom_fields'] return {'_index': 'content', '_type': 'faq', '_id': post['slug'], '_source': post}
# ... existing code ... return {'_index': 'content', '_type': 'faq', '_id': post['slug'], '_source': post} # ... rest of the code ...
8570efd42f35b89d9a97d9aa5a5aa47765cd21f6
diary/logthread.py
diary/logthread.py
from threading import Thread try: from queue import Queue except ImportError: # python 2 from Queue import Queue class ElemThread(Thread): """A thread for logging as to not disrupt the logged application""" def __init__(self, elem, name="Elementary Logger"): """Construct a thread for logging :param elem: An Elementary instance to handle logging :param name: A string to represent this thread """ Thread.__init__(self, name=name) self.daemon = True # py2 constructor requires explicit self.elem = elem self.queue = Queue() self.start() def add(self, event): """Add a logged event to queue for logging""" self.queue.put(event) def run(self): """Main for thread to run""" while True: self.elem.write(self.queue.get())
from threading import Thread try: from queue import Queue except ImportError: # python 2 from Queue import Queue class DiaryThread(Thread): """A thread for logging as to not disrupt the logged application""" def __init__(self, diary, name="Diary Logger"): """Construct a thread for logging :param diary: An Diary instance to handle logging :param name: A string to represent this thread """ Thread.__init__(self, name=name) self.daemon = True # py2 constructor requires explicit self.diary = diary self.queue = Queue() self.start() def add(self, event): """Add a logged event to queue for logging""" self.queue.put(event) def run(self): """Main for thread to run""" while True: self.diary.write(self.queue.get())
Make last changes over to diary name
Make last changes over to diary name
Python
mit
GreenVars/diary
from threading import Thread try: from queue import Queue except ImportError: # python 2 from Queue import Queue - class ElemThread(Thread): + class DiaryThread(Thread): """A thread for logging as to not disrupt the logged application""" - def __init__(self, elem, name="Elementary Logger"): + def __init__(self, diary, name="Diary Logger"): """Construct a thread for logging - :param elem: An Elementary instance to handle logging + :param diary: An Diary instance to handle logging :param name: A string to represent this thread """ Thread.__init__(self, name=name) self.daemon = True # py2 constructor requires explicit - self.elem = elem + self.diary = diary self.queue = Queue() self.start() def add(self, event): """Add a logged event to queue for logging""" self.queue.put(event) def run(self): """Main for thread to run""" while True: - self.elem.write(self.queue.get()) + self.diary.write(self.queue.get())
Make last changes over to diary name
## Code Before: from threading import Thread try: from queue import Queue except ImportError: # python 2 from Queue import Queue class ElemThread(Thread): """A thread for logging as to not disrupt the logged application""" def __init__(self, elem, name="Elementary Logger"): """Construct a thread for logging :param elem: An Elementary instance to handle logging :param name: A string to represent this thread """ Thread.__init__(self, name=name) self.daemon = True # py2 constructor requires explicit self.elem = elem self.queue = Queue() self.start() def add(self, event): """Add a logged event to queue for logging""" self.queue.put(event) def run(self): """Main for thread to run""" while True: self.elem.write(self.queue.get()) ## Instruction: Make last changes over to diary name ## Code After: from threading import Thread try: from queue import Queue except ImportError: # python 2 from Queue import Queue class DiaryThread(Thread): """A thread for logging as to not disrupt the logged application""" def __init__(self, diary, name="Diary Logger"): """Construct a thread for logging :param diary: An Diary instance to handle logging :param name: A string to represent this thread """ Thread.__init__(self, name=name) self.daemon = True # py2 constructor requires explicit self.diary = diary self.queue = Queue() self.start() def add(self, event): """Add a logged event to queue for logging""" self.queue.put(event) def run(self): """Main for thread to run""" while True: self.diary.write(self.queue.get())
// ... existing code ... class DiaryThread(Thread): """A thread for logging as to not disrupt the logged application""" // ... modified code ... def __init__(self, diary, name="Diary Logger"): """Construct a thread for logging ... :param diary: An Diary instance to handle logging :param name: A string to represent this thread ... self.daemon = True # py2 constructor requires explicit self.diary = diary self.queue = Queue() ... while True: self.diary.write(self.queue.get()) // ... rest of the code ...
4727991d29bc888611b6eaa403456524785b6338
highlightjs/testsettings.py
highlightjs/testsettings.py
import django.conf.global_settings as DEFAULT_SETTINGS SECRET_KEY = 'highlightjsisawesome' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } INSTALLED_APPS = ( 'highlightjs', ) MIDDLEWARE_CLASSES = DEFAULT_SETTINGS.MIDDLEWARE_CLASSES
import django.conf.global_settings as DEFAULT_SETTINGS SECRET_KEY = 'highlightjsisawesome' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } INSTALLED_APPS = ( 'highlightjs', ) MIDDLEWARE_CLASSES = DEFAULT_SETTINGS.MIDDLEWARE_CLASSES TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, }, ]
Add django backend for test settings
Add django backend for test settings
Python
mit
MounirMesselmeni/django-highlightjs
import django.conf.global_settings as DEFAULT_SETTINGS SECRET_KEY = 'highlightjsisawesome' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } INSTALLED_APPS = ( 'highlightjs', ) MIDDLEWARE_CLASSES = DEFAULT_SETTINGS.MIDDLEWARE_CLASSES + TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + }, + ] +
Add django backend for test settings
## Code Before: import django.conf.global_settings as DEFAULT_SETTINGS SECRET_KEY = 'highlightjsisawesome' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } INSTALLED_APPS = ( 'highlightjs', ) MIDDLEWARE_CLASSES = DEFAULT_SETTINGS.MIDDLEWARE_CLASSES ## Instruction: Add django backend for test settings ## Code After: import django.conf.global_settings as DEFAULT_SETTINGS SECRET_KEY = 'highlightjsisawesome' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } INSTALLED_APPS = ( 'highlightjs', ) MIDDLEWARE_CLASSES = DEFAULT_SETTINGS.MIDDLEWARE_CLASSES TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, }, ]
# ... existing code ... MIDDLEWARE_CLASSES = DEFAULT_SETTINGS.MIDDLEWARE_CLASSES TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, }, ] # ... rest of the code ...
2438efb99b85fbc76cd285792c1511e7e2813a05
zeus/api/resources/repository_tests.py
zeus/api/resources/repository_tests.py
from datetime import timedelta from sqlalchemy.sql import func from zeus.config import db from zeus.constants import Result, Status from zeus.models import Repository, TestCase, Job from zeus.utils import timezone from .base_repository import BaseRepositoryResource from ..schemas import TestCaseStatisticsSchema testcases_schema = TestCaseStatisticsSchema(many=True) class RepositoryTestsResource(BaseRepositoryResource): def get(self, repo: Repository): """ Return a list of testcases for the given repository. """ runs_failed = ( func.count(TestCase.result) .filter(TestCase.result == Result.failed) .label("runs_failed") ) query = ( db.session.query( TestCase.hash, TestCase.name, func.count(TestCase.hash).label("runs_total"), runs_failed, func.avg(TestCase.duration).label("avg_duration"), ) .join(Job, TestCase.job_id == Job.id) .filter( Job.repository_id == repo.id, Job.date_finished >= timezone.now() - timedelta(days=14), Job.status == Status.finished, TestCase.repository_id == repo.id, ) .group_by(TestCase.hash, TestCase.name) .order_by(runs_failed.desc()) ) return self.paginate_with_schema(testcases_schema, query)
from datetime import timedelta from sqlalchemy.sql import func from zeus.config import db from zeus.constants import Result, Status from zeus.models import Repository, TestCase, Job from zeus.utils import timezone from .base_repository import BaseRepositoryResource from ..schemas import TestCaseStatisticsSchema testcases_schema = TestCaseStatisticsSchema(many=True) class RepositoryTestsResource(BaseRepositoryResource): def get(self, repo: Repository): """ Return a list of testcases for the given repository. """ runs_failed = ( func.count(TestCase.result) .filter(TestCase.result == Result.failed) .label("runs_failed") ) query = ( db.session.query( TestCase.hash, TestCase.name, func.count(TestCase.hash).label("runs_total"), runs_failed, func.avg(TestCase.duration).label("avg_duration"), ) .filter( TestCase.job_id.in_( db.session.query(Job.id) .filter( Job.repository_id == repo.id, Job.date_finished >= timezone.now() - timedelta(days=14), Job.status == Status.finished, ) .subquery() ) ) .group_by(TestCase.hash, TestCase.name) .order_by(runs_failed.desc()) ) return self.paginate_with_schema(testcases_schema, query)
Simplify query plan for repo tests
ref: Simplify query plan for repo tests
Python
apache-2.0
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
from datetime import timedelta from sqlalchemy.sql import func from zeus.config import db from zeus.constants import Result, Status from zeus.models import Repository, TestCase, Job from zeus.utils import timezone from .base_repository import BaseRepositoryResource from ..schemas import TestCaseStatisticsSchema testcases_schema = TestCaseStatisticsSchema(many=True) class RepositoryTestsResource(BaseRepositoryResource): def get(self, repo: Repository): """ Return a list of testcases for the given repository. """ runs_failed = ( func.count(TestCase.result) .filter(TestCase.result == Result.failed) .label("runs_failed") ) query = ( db.session.query( TestCase.hash, TestCase.name, func.count(TestCase.hash).label("runs_total"), runs_failed, func.avg(TestCase.duration).label("avg_duration"), ) - .join(Job, TestCase.job_id == Job.id) .filter( + TestCase.job_id.in_( + db.session.query(Job.id) + .filter( - Job.repository_id == repo.id, + Job.repository_id == repo.id, - Job.date_finished >= timezone.now() - timedelta(days=14), + Job.date_finished >= timezone.now() - timedelta(days=14), - Job.status == Status.finished, + Job.status == Status.finished, - TestCase.repository_id == repo.id, + ) + .subquery() + ) ) .group_by(TestCase.hash, TestCase.name) .order_by(runs_failed.desc()) ) return self.paginate_with_schema(testcases_schema, query)
Simplify query plan for repo tests
## Code Before: from datetime import timedelta from sqlalchemy.sql import func from zeus.config import db from zeus.constants import Result, Status from zeus.models import Repository, TestCase, Job from zeus.utils import timezone from .base_repository import BaseRepositoryResource from ..schemas import TestCaseStatisticsSchema testcases_schema = TestCaseStatisticsSchema(many=True) class RepositoryTestsResource(BaseRepositoryResource): def get(self, repo: Repository): """ Return a list of testcases for the given repository. """ runs_failed = ( func.count(TestCase.result) .filter(TestCase.result == Result.failed) .label("runs_failed") ) query = ( db.session.query( TestCase.hash, TestCase.name, func.count(TestCase.hash).label("runs_total"), runs_failed, func.avg(TestCase.duration).label("avg_duration"), ) .join(Job, TestCase.job_id == Job.id) .filter( Job.repository_id == repo.id, Job.date_finished >= timezone.now() - timedelta(days=14), Job.status == Status.finished, TestCase.repository_id == repo.id, ) .group_by(TestCase.hash, TestCase.name) .order_by(runs_failed.desc()) ) return self.paginate_with_schema(testcases_schema, query) ## Instruction: Simplify query plan for repo tests ## Code After: from datetime import timedelta from sqlalchemy.sql import func from zeus.config import db from zeus.constants import Result, Status from zeus.models import Repository, TestCase, Job from zeus.utils import timezone from .base_repository import BaseRepositoryResource from ..schemas import TestCaseStatisticsSchema testcases_schema = TestCaseStatisticsSchema(many=True) class RepositoryTestsResource(BaseRepositoryResource): def get(self, repo: Repository): """ Return a list of testcases for the given repository. """ runs_failed = ( func.count(TestCase.result) .filter(TestCase.result == Result.failed) .label("runs_failed") ) query = ( db.session.query( TestCase.hash, TestCase.name, func.count(TestCase.hash).label("runs_total"), runs_failed, func.avg(TestCase.duration).label("avg_duration"), ) .filter( TestCase.job_id.in_( db.session.query(Job.id) .filter( Job.repository_id == repo.id, Job.date_finished >= timezone.now() - timedelta(days=14), Job.status == Status.finished, ) .subquery() ) ) .group_by(TestCase.hash, TestCase.name) .order_by(runs_failed.desc()) ) return self.paginate_with_schema(testcases_schema, query)
# ... existing code ... ) .filter( TestCase.job_id.in_( db.session.query(Job.id) .filter( Job.repository_id == repo.id, Job.date_finished >= timezone.now() - timedelta(days=14), Job.status == Status.finished, ) .subquery() ) ) # ... rest of the code ...
8fd65190a2a68a7afeab91b0a02c83309f72ccd6
tests/test_testing.py
tests/test_testing.py
import greenado from greenado.testing import gen_test from tornado.testing import AsyncTestCase from tornado import gen @gen.coroutine def coroutine(): raise gen.Return(1234) class GreenadoTests(AsyncTestCase): @gen_test def test_without_timeout(self): assert greenado.gyield(coroutine()) == 1234 @gen_test(timeout=5) def test_with_timeout(self): assert greenado.gyield(coroutine()) == 1234
import greenado from greenado.testing import gen_test from tornado.testing import AsyncTestCase from tornado import gen @gen.coroutine def coroutine(): raise gen.Return(1234) class GreenadoTests(AsyncTestCase): @gen_test def test_without_timeout1(self): assert greenado.gyield(coroutine()) == 1234 @gen_test @greenado.generator def test_without_timeout2(self): assert (yield coroutine()) == 1234 @gen_test(timeout=5) def test_with_timeout1(self): assert greenado.gyield(coroutine()) == 1234 @gen_test(timeout=5) @greenado.generator def test_with_timeout2(self): assert (yield coroutine()) == 1234
Add tests to gen_test for generator, seems to work
Add tests to gen_test for generator, seems to work
Python
apache-2.0
virtuald/greenado,virtuald/greenado
import greenado from greenado.testing import gen_test from tornado.testing import AsyncTestCase from tornado import gen @gen.coroutine def coroutine(): raise gen.Return(1234) class GreenadoTests(AsyncTestCase): @gen_test - def test_without_timeout(self): + def test_without_timeout1(self): assert greenado.gyield(coroutine()) == 1234 + + @gen_test + @greenado.generator + def test_without_timeout2(self): + assert (yield coroutine()) == 1234 @gen_test(timeout=5) - def test_with_timeout(self): + def test_with_timeout1(self): assert greenado.gyield(coroutine()) == 1234 + + @gen_test(timeout=5) + @greenado.generator + def test_with_timeout2(self): + assert (yield coroutine()) == 1234
Add tests to gen_test for generator, seems to work
## Code Before: import greenado from greenado.testing import gen_test from tornado.testing import AsyncTestCase from tornado import gen @gen.coroutine def coroutine(): raise gen.Return(1234) class GreenadoTests(AsyncTestCase): @gen_test def test_without_timeout(self): assert greenado.gyield(coroutine()) == 1234 @gen_test(timeout=5) def test_with_timeout(self): assert greenado.gyield(coroutine()) == 1234 ## Instruction: Add tests to gen_test for generator, seems to work ## Code After: import greenado from greenado.testing import gen_test from tornado.testing import AsyncTestCase from tornado import gen @gen.coroutine def coroutine(): raise gen.Return(1234) class GreenadoTests(AsyncTestCase): @gen_test def test_without_timeout1(self): assert greenado.gyield(coroutine()) == 1234 @gen_test @greenado.generator def test_without_timeout2(self): assert (yield coroutine()) == 1234 @gen_test(timeout=5) def test_with_timeout1(self): assert greenado.gyield(coroutine()) == 1234 @gen_test(timeout=5) @greenado.generator def test_with_timeout2(self): assert (yield coroutine()) == 1234
... @gen_test def test_without_timeout1(self): assert greenado.gyield(coroutine()) == 1234 @gen_test @greenado.generator def test_without_timeout2(self): assert (yield coroutine()) == 1234 ... @gen_test(timeout=5) def test_with_timeout1(self): assert greenado.gyield(coroutine()) == 1234 @gen_test(timeout=5) @greenado.generator def test_with_timeout2(self): assert (yield coroutine()) == 1234 ...
970eb92f6db8b2fd22594d662a7142a976d60559
airflow/contrib/hooks/__init__.py
airflow/contrib/hooks/__init__.py
from airflow.utils import import_module_attrs as _import_module_attrs _hooks = { 'ftp_hook': ['FTPHook'], 'vertica_hook': ['VerticaHook'], 'ssh_hook': ['SSHHook'], 'bigquery_hook': ['BigQueryHook'], 'qubole_hook': ['QuboleHook'] } _import_module_attrs(globals(), _hooks)
from airflow.utils import import_module_attrs as _import_module_attrs _hooks = { 'ftp_hook': ['FTPHook'], 'ftps_hook': ['FTPSHook'], 'vertica_hook': ['VerticaHook'], 'ssh_hook': ['SSHHook'], 'bigquery_hook': ['BigQueryHook'], 'qubole_hook': ['QuboleHook'] } _import_module_attrs(globals(), _hooks)
Add FTPSHook in _hooks register.
Add FTPSHook in _hooks register.
Python
apache-2.0
cjqian/incubator-airflow,KL-WLCR/incubator-airflow,dmitry-r/incubator-airflow,yiqingj/airflow,rishibarve/incubator-airflow,vineet-rh/incubator-airflow,ty707/airflow,preete-dixit-ck/incubator-airflow,saguziel/incubator-airflow,sdiazb/airflow,NielsZeilemaker/incubator-airflow,subodhchhabra/airflow,yiqingj/airflow,preete-dixit-ck/incubator-airflow,adamhaney/airflow,aminghadersohi/airflow,mtagle/airflow,adrpar/incubator-airflow,jhsenjaliya/incubator-airflow,cfei18/incubator-airflow,criccomini/airflow,subodhchhabra/airflow,caseyching/incubator-airflow,juvoinc/airflow,airbnb/airflow,dmitry-r/incubator-airflow,hamedhsn/incubator-airflow,apache/airflow,fenglu-g/incubator-airflow,Fokko/incubator-airflow,KL-WLCR/incubator-airflow,andrewmchen/incubator-airflow,forevernull/incubator-airflow,cademarkegard/airflow,jwi078/incubator-airflow,jiwang576/incubator-airflow,btallman/incubator-airflow,zack3241/incubator-airflow,DinoCow/airflow,biln/airflow,caseyching/incubator-airflow,Fokko/incubator-airflow,DEVELByte/incubator-airflow,airbnb/airflow,alexvanboxel/airflow,apache/incubator-airflow,vijaysbhat/incubator-airflow,gritlogic/incubator-airflow,gtoonstra/airflow,forevernull/incubator-airflow,btallman/incubator-airflow,zodiac/incubator-airflow,malmiron/incubator-airflow,ty707/airflow,cademarkegard/airflow,preete-dixit-ck/incubator-airflow,sergiohgz/incubator-airflow,cademarkegard/airflow,wndhydrnt/airflow,mtdewulf/incubator-airflow,yiqingj/airflow,opensignal/airflow,vijaysbhat/incubator-airflow,MetrodataTeam/incubator-airflow,apache/airflow,mattuuh7/incubator-airflow,holygits/incubator-airflow,edgarRd/incubator-airflow,lxneng/incubator-airflow,plypaul/airflow,sid88in/incubator-airflow,yati-sagade/incubator-airflow,adrpar/incubator-airflow,cjqian/incubator-airflow,hamedhsn/incubator-airflow,skudriashev/incubator-airflow,artwr/airflow,jwi078/incubator-airflow,stverhae/incubator-airflow,owlabs/incubator-airflow,ronfung/incubator-airflow,lyft/incubator-airflow,zack3241/incubator-airflow,hgrif/incubator-airflow,RealImpactAnalytics/airflow,dgies/incubator-airflow,ty707/airflow,ProstoMaxim/incubator-airflow,aminghadersohi/airflow,jbhsieh/incubator-airflow,rishibarve/incubator-airflow,jbhsieh/incubator-airflow,r39132/airflow,jgao54/airflow,forevernull/incubator-airflow,jlowin/airflow,jlowin/airflow,holygits/incubator-airflow,DEVELByte/incubator-airflow,mtdewulf/incubator-airflow,biln/airflow,fenglu-g/incubator-airflow,mylons/incubator-airflow,ledsusop/airflow,zoyahav/incubator-airflow,CloverHealth/airflow,dud225/incubator-airflow,jfantom/incubator-airflow,dgies/incubator-airflow,sekikn/incubator-airflow,lyft/incubator-airflow,saguziel/incubator-airflow,mattuuh7/incubator-airflow,gtoonstra/airflow,vineet-rh/incubator-airflow,yk5/incubator-airflow,aminghadersohi/airflow,wileeam/airflow,jfantom/incubator-airflow,mistercrunch/airflow,easytaxibr/airflow,Twistbioscience/incubator-airflow,zodiac/incubator-airflow,hgrif/incubator-airflow,lxneng/incubator-airflow,Acehaidrey/incubator-airflow,mattuuh7/incubator-airflow,wxiang7/airflow,gilt/incubator-airflow,mrkm4ntr/incubator-airflow,zack3241/incubator-airflow,Acehaidrey/incubator-airflow,owlabs/incubator-airflow,yati-sagade/incubator-airflow,kerzhner/airflow,mrkm4ntr/incubator-airflow,andyxhadji/incubator-airflow,sergiohgz/incubator-airflow,DinoCow/airflow,edgarRd/incubator-airflow,skudriashev/incubator-airflow,jhsenjaliya/incubator-airflow,dhuang/incubator-airflow,dgies/incubator-airflow,adrpar/incubator-airflow,AllisonWang/incubator-airflow,NielsZeilemaker/incubator-airflow,danielvdende/incubator-airflow,rishibarve/incubator-airflow,wxiang7/airflow,jiwang576/incubator-airflow,sdiazb/airflow,brandsoulmates/incubator-airflow,mrares/incubator-airflow,jhsenjaliya/incubator-airflow,jesusfcr/airflow,criccomini/airflow,wooga/airflow,OpringaoDoTurno/airflow,yati-sagade/incubator-airflow,ty707/airflow,OpringaoDoTurno/airflow,apache/incubator-airflow,ProstoMaxim/incubator-airflow,sid88in/incubator-airflow,nathanielvarona/airflow,janczak10/incubator-airflow,yati-sagade/incubator-airflow,ledsusop/airflow,plypaul/airflow,jhsenjaliya/incubator-airflow,brandsoulmates/incubator-airflow,RealImpactAnalytics/airflow,wileeam/airflow,gilt/incubator-airflow,wileeam/airflow,wolfier/incubator-airflow,wooga/airflow,Tagar/incubator-airflow,RealImpactAnalytics/airflow,owlabs/incubator-airflow,AllisonWang/incubator-airflow,jwi078/incubator-airflow,nathanielvarona/airflow,KL-WLCR/incubator-airflow,MetrodataTeam/incubator-airflow,r39132/airflow,dgies/incubator-airflow,danielvdende/incubator-airflow,jesusfcr/airflow,forevernull/incubator-airflow,nathanielvarona/airflow,MortalViews/incubator-airflow,plypaul/airflow,easytaxibr/airflow,gilt/incubator-airflow,edgarRd/incubator-airflow,OpringaoDoTurno/airflow,mrares/incubator-airflow,modsy/incubator-airflow,akosel/incubator-airflow,adamhaney/airflow,holygits/incubator-airflow,RealImpactAnalytics/airflow,mistercrunch/airflow,gritlogic/incubator-airflow,gritlogic/incubator-airflow,mtagle/airflow,jgao54/airflow,skudriashev/incubator-airflow,janczak10/incubator-airflow,danielvdende/incubator-airflow,cfei18/incubator-airflow,kerzhner/airflow,Acehaidrey/incubator-airflow,mtagle/airflow,Tagar/incubator-airflow,juvoinc/airflow,hgrif/incubator-airflow,yk5/incubator-airflow,jwi078/incubator-airflow,opensignal/airflow,DinoCow/airflow,fenglu-g/incubator-airflow,AllisonWang/incubator-airflow,wndhydrnt/airflow,d-lee/airflow,malmiron/incubator-airflow,jiwang576/incubator-airflow,mattuuh7/incubator-airflow,dud225/incubator-airflow,mistercrunch/airflow,cjqian/incubator-airflow,subodhchhabra/airflow,DinoCow/airflow,mylons/incubator-airflow,adamhaney/airflow,cfei18/incubator-airflow,opensignal/airflow,andyxhadji/incubator-airflow,ronfung/incubator-airflow,stverhae/incubator-airflow,ProstoMaxim/incubator-airflow,sdiazb/airflow,gtoonstra/airflow,jesusfcr/airflow,MortalViews/incubator-airflow,DEVELByte/incubator-airflow,kerzhner/airflow,lxneng/incubator-airflow,AllisonWang/incubator-airflow,fenglu-g/incubator-airflow,jesusfcr/airflow,biln/airflow,nathanielvarona/airflow,opensignal/airflow,jgao54/airflow,wxiang7/airflow,btallman/incubator-airflow,vineet-rh/incubator-airflow,jgao54/airflow,andyxhadji/incubator-airflow,alexvanboxel/airflow,malmiron/incubator-airflow,ronfung/incubator-airflow,janczak10/incubator-airflow,zodiac/incubator-airflow,akosel/incubator-airflow,juvoinc/airflow,danielvdende/incubator-airflow,wndhydrnt/airflow,edgarRd/incubator-airflow,Twistbioscience/incubator-airflow,gilt/incubator-airflow,cjqian/incubator-airflow,juvoinc/airflow,vijaysbhat/incubator-airflow,apache/incubator-airflow,KL-WLCR/incubator-airflow,Fokko/incubator-airflow,subodhchhabra/airflow,artwr/airflow,dhuang/incubator-airflow,aminghadersohi/airflow,nathanielvarona/airflow,OpringaoDoTurno/airflow,zoyahav/incubator-airflow,sid88in/incubator-airflow,wolfier/incubator-airflow,cfei18/incubator-airflow,jiwang576/incubator-airflow,wooga/airflow,dhuang/incubator-airflow,CloverHealth/airflow,N3da/incubator-airflow,dhuang/incubator-airflow,jlowin/airflow,DEVELByte/incubator-airflow,MetrodataTeam/incubator-airflow,wolfier/incubator-airflow,zodiac/incubator-airflow,sekikn/incubator-airflow,mrares/incubator-airflow,d-lee/airflow,spektom/incubator-airflow,lyft/incubator-airflow,andrewmchen/incubator-airflow,apache/airflow,lxneng/incubator-airflow,bolkedebruin/airflow,biln/airflow,r39132/airflow,saguziel/incubator-airflow,mtdewulf/incubator-airflow,bolkedebruin/airflow,wileeam/airflow,mrares/incubator-airflow,ledsusop/airflow,spektom/incubator-airflow,alexvanboxel/airflow,Acehaidrey/incubator-airflow,dud225/incubator-airflow,asnir/airflow,skudriashev/incubator-airflow,janczak10/incubator-airflow,gritlogic/incubator-airflow,Tagar/incubator-airflow,modsy/incubator-airflow,nathanielvarona/airflow,ronfung/incubator-airflow,caseyching/incubator-airflow,brandsoulmates/incubator-airflow,Acehaidrey/incubator-airflow,criccomini/airflow,easytaxibr/airflow,caseyching/incubator-airflow,criccomini/airflow,cfei18/incubator-airflow,N3da/incubator-airflow,lyft/incubator-airflow,hgrif/incubator-airflow,adrpar/incubator-airflow,akosel/incubator-airflow,jfantom/incubator-airflow,spektom/incubator-airflow,sergiohgz/incubator-airflow,owlabs/incubator-airflow,MetrodataTeam/incubator-airflow,sekikn/incubator-airflow,mrkm4ntr/incubator-airflow,d-lee/airflow,andyxhadji/incubator-airflow,mtagle/airflow,vijaysbhat/incubator-airflow,adamhaney/airflow,r39132/airflow,dud225/incubator-airflow,Twistbioscience/incubator-airflow,zack3241/incubator-airflow,modsy/incubator-airflow,Twistbioscience/incubator-airflow,NielsZeilemaker/incubator-airflow,Tagar/incubator-airflow,jfantom/incubator-airflow,sid88in/incubator-airflow,mistercrunch/airflow,wolfier/incubator-airflow,apache/airflow,mylons/incubator-airflow,spektom/incubator-airflow,MortalViews/incubator-airflow,CloverHealth/airflow,yk5/incubator-airflow,hamedhsn/incubator-airflow,andrewmchen/incubator-airflow,mrkm4ntr/incubator-airflow,wndhydrnt/airflow,asnir/airflow,danielvdende/incubator-airflow,dmitry-r/incubator-airflow,mylons/incubator-airflow,kerzhner/airflow,apache/incubator-airflow,cademarkegard/airflow,artwr/airflow,jlowin/airflow,CloverHealth/airflow,zoyahav/incubator-airflow,preete-dixit-ck/incubator-airflow,easytaxibr/airflow,malmiron/incubator-airflow,N3da/incubator-airflow,bolkedebruin/airflow,modsy/incubator-airflow,gtoonstra/airflow,dmitry-r/incubator-airflow,alexvanboxel/airflow,saguziel/incubator-airflow,jbhsieh/incubator-airflow,akosel/incubator-airflow,apache/airflow,brandsoulmates/incubator-airflow,N3da/incubator-airflow,wxiang7/airflow,Fokko/incubator-airflow,yk5/incubator-airflow,hamedhsn/incubator-airflow,rishibarve/incubator-airflow,bolkedebruin/airflow,d-lee/airflow,airbnb/airflow,vineet-rh/incubator-airflow,apache/airflow,plypaul/airflow,bolkedebruin/airflow,andrewmchen/incubator-airflow,mtdewulf/incubator-airflow,sdiazb/airflow,MortalViews/incubator-airflow,Acehaidrey/incubator-airflow,sekikn/incubator-airflow,NielsZeilemaker/incubator-airflow,asnir/airflow,wooga/airflow,ledsusop/airflow,artwr/airflow,danielvdende/incubator-airflow,btallman/incubator-airflow,asnir/airflow,airbnb/airflow,stverhae/incubator-airflow,cfei18/incubator-airflow,holygits/incubator-airflow,sergiohgz/incubator-airflow,zoyahav/incubator-airflow,jbhsieh/incubator-airflow,stverhae/incubator-airflow,yiqingj/airflow,ProstoMaxim/incubator-airflow
from airflow.utils import import_module_attrs as _import_module_attrs _hooks = { 'ftp_hook': ['FTPHook'], + 'ftps_hook': ['FTPSHook'], 'vertica_hook': ['VerticaHook'], 'ssh_hook': ['SSHHook'], 'bigquery_hook': ['BigQueryHook'], 'qubole_hook': ['QuboleHook'] } _import_module_attrs(globals(), _hooks)
Add FTPSHook in _hooks register.
## Code Before: from airflow.utils import import_module_attrs as _import_module_attrs _hooks = { 'ftp_hook': ['FTPHook'], 'vertica_hook': ['VerticaHook'], 'ssh_hook': ['SSHHook'], 'bigquery_hook': ['BigQueryHook'], 'qubole_hook': ['QuboleHook'] } _import_module_attrs(globals(), _hooks) ## Instruction: Add FTPSHook in _hooks register. ## Code After: from airflow.utils import import_module_attrs as _import_module_attrs _hooks = { 'ftp_hook': ['FTPHook'], 'ftps_hook': ['FTPSHook'], 'vertica_hook': ['VerticaHook'], 'ssh_hook': ['SSHHook'], 'bigquery_hook': ['BigQueryHook'], 'qubole_hook': ['QuboleHook'] } _import_module_attrs(globals(), _hooks)
// ... existing code ... 'ftp_hook': ['FTPHook'], 'ftps_hook': ['FTPSHook'], 'vertica_hook': ['VerticaHook'], // ... rest of the code ...
6c20f8a2c722fca1b2f811d4f06ea5480ec6d945
telethon/events/messagedeleted.py
telethon/events/messagedeleted.py
from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) if peer is None: # If it's not a channel ID, then it was private/small group. # We can't know which one was exactly unless we logged all # messages, but we can indicate that it was maybe either of # both by setting them both to True. self.is_private = self.is_group = True self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
Set is private/group=True for messages deleted out of channels
Set is private/group=True for messages deleted out of channels
Python
mit
LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,expectocode/Telethon,LonamiWebs/Telethon
from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) + if peer is None: + # If it's not a channel ID, then it was private/small group. + # We can't know which one was exactly unless we logged all + # messages, but we can indicate that it was maybe either of + # both by setting them both to True. + self.is_private = self.is_group = True + self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
Set is private/group=True for messages deleted out of channels
## Code Before: from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids ## Instruction: Set is private/group=True for messages deleted out of channels ## Code After: from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) if peer is None: # If it's not a channel ID, then it was private/small group. # We can't know which one was exactly unless we logged all # messages, but we can indicate that it was maybe either of # both by setting them both to True. self.is_private = self.is_group = True self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
... ) if peer is None: # If it's not a channel ID, then it was private/small group. # We can't know which one was exactly unless we logged all # messages, but we can indicate that it was maybe either of # both by setting them both to True. self.is_private = self.is_group = True self.deleted_id = None if not deleted_ids else deleted_ids[0] ...
1aa75af659daac62fdef423beac16aef1f057afb
test/testCore.py
test/testCore.py
import pyfits import sys def test_with_statement(): if sys.hexversion >= 0x02050000: exec("""from __future__ import with_statement with pyfits.open("ascii.fits") as f: pass""") def test_naxisj_check(): hdulist = pyfits.open("o4sp040b0_raw.fits") hdulist[1].header.update("NAXIS3", 500) assert 'NAXIS3' in hdulist[1].header hdulist.verify('fix') assert 'NAXIS3' not in hdulist[1].header
import pyfits import numpy as np import sys def test_with_statement(): if sys.hexversion >= 0x02050000: exec("""from __future__ import with_statement with pyfits.open("ascii.fits") as f: pass""") def test_naxisj_check(): hdulist = pyfits.open("o4sp040b0_raw.fits") hdulist[1].header.update("NAXIS3", 500) assert 'NAXIS3' in hdulist[1].header hdulist.verify('fix') assert 'NAXIS3' not in hdulist[1].header def test_byteswap(): p = pyfits.PrimaryHDU() l = pyfits.HDUList() n = np.zeros(3, dtype='i2') n[0] = 1 n[1] = 60000 n[2] = 2 c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n) t = pyfits.new_table([c]) l.append(p) l.append(t) l.writeto('test.fits', clobber=True) p = pyfits.open('test.fits') assert p[1].data[1]['foo'] == 60000.0
Add test for byteswapping bug resolved in r514.
Add test for byteswapping bug resolved in r514. git-svn-id: 5305e2c1a78737cf7dd5f8f44e9bbbd00348fde7@543 ed100bfc-0583-0410-97f2-c26b58777a21
Python
bsd-3-clause
embray/PyFITS,spacetelescope/PyFITS,embray/PyFITS,embray/PyFITS,spacetelescope/PyFITS,embray/PyFITS
import pyfits + import numpy as np import sys def test_with_statement(): if sys.hexversion >= 0x02050000: exec("""from __future__ import with_statement with pyfits.open("ascii.fits") as f: pass""") def test_naxisj_check(): hdulist = pyfits.open("o4sp040b0_raw.fits") hdulist[1].header.update("NAXIS3", 500) assert 'NAXIS3' in hdulist[1].header hdulist.verify('fix') assert 'NAXIS3' not in hdulist[1].header + def test_byteswap(): + p = pyfits.PrimaryHDU() + l = pyfits.HDUList() + + n = np.zeros(3, dtype='i2') + n[0] = 1 + n[1] = 60000 + n[2] = 2 + + c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n) + t = pyfits.new_table([c]) + + l.append(p) + l.append(t) + + l.writeto('test.fits', clobber=True) + + p = pyfits.open('test.fits') + assert p[1].data[1]['foo'] == 60000.0 +
Add test for byteswapping bug resolved in r514.
## Code Before: import pyfits import sys def test_with_statement(): if sys.hexversion >= 0x02050000: exec("""from __future__ import with_statement with pyfits.open("ascii.fits") as f: pass""") def test_naxisj_check(): hdulist = pyfits.open("o4sp040b0_raw.fits") hdulist[1].header.update("NAXIS3", 500) assert 'NAXIS3' in hdulist[1].header hdulist.verify('fix') assert 'NAXIS3' not in hdulist[1].header ## Instruction: Add test for byteswapping bug resolved in r514. ## Code After: import pyfits import numpy as np import sys def test_with_statement(): if sys.hexversion >= 0x02050000: exec("""from __future__ import with_statement with pyfits.open("ascii.fits") as f: pass""") def test_naxisj_check(): hdulist = pyfits.open("o4sp040b0_raw.fits") hdulist[1].header.update("NAXIS3", 500) assert 'NAXIS3' in hdulist[1].header hdulist.verify('fix') assert 'NAXIS3' not in hdulist[1].header def test_byteswap(): p = pyfits.PrimaryHDU() l = pyfits.HDUList() n = np.zeros(3, dtype='i2') n[0] = 1 n[1] = 60000 n[2] = 2 c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n) t = pyfits.new_table([c]) l.append(p) l.append(t) l.writeto('test.fits', clobber=True) p = pyfits.open('test.fits') assert p[1].data[1]['foo'] == 60000.0
... import pyfits import numpy as np import sys ... assert 'NAXIS3' not in hdulist[1].header def test_byteswap(): p = pyfits.PrimaryHDU() l = pyfits.HDUList() n = np.zeros(3, dtype='i2') n[0] = 1 n[1] = 60000 n[2] = 2 c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n) t = pyfits.new_table([c]) l.append(p) l.append(t) l.writeto('test.fits', clobber=True) p = pyfits.open('test.fits') assert p[1].data[1]['foo'] == 60000.0 ...
35a413ecdc83578a0ef63d0865a4fe7bae6f1e99
scipy/interpolate/generate_interpnd.py
scipy/interpolate/generate_interpnd.py
import tempfile import subprocess import os import sys import re import shutil from mako.template import Template f = open('interpnd.pyx', 'r') template = f.read() f.close() tmp_dir = tempfile.mkdtemp() try: # Run templating engine fn = os.path.join(tmp_dir, 'interpnd.pyx') f = open(fn, 'w') f.write(Template(template).render()) f.close() # Run Cython dst_fn = os.path.join(tmp_dir, 'interpnd.c') ret = subprocess.call(['cython', '-I', '../..', '-o', dst_fn, fn]) if ret != 0: sys.exit(ret) # Strip comments f = open(dst_fn, 'r') text = f.read() f.close() r = re.compile(r'/\*(.*?)\*/', re.S) text = r.sub('', text) f = open('interpnd.c', 'w') f.write(text) f.close() finally: shutil.rmtree(tmp_dir)
import tempfile import subprocess import os import sys import re import shutil from mako.template import Template dotnet = False if len(sys.argv) > 1 and sys.argv[1] == '--dotnet': dotnet = True f = open('interpnd.pyx', 'r') template = f.read() f.close() tmp_dir = tempfile.mkdtemp() try: # Run templating engine fn = os.path.join(tmp_dir, 'interpnd.pyx') f = open(fn, 'w') f.write(Template(template).render()) f.close() # Run Cython if dotnet: dst_name = 'interpnd.cpp' args_extra = ['--dotnet'] else: dst_name = 'interpnd.c' args_extra = [] dst_fn = os.path.join(tmp_dir, dst_name) ret = subprocess.call(['cython', '-I', '../..', '-o'] + args_extra + [dst_fn, fn]) if ret != 0: sys.exit(ret) # Strip comments f = open(dst_fn, 'r') text = f.read() f.close() r = re.compile(r'/\*(.*?)\*/', re.S) text = r.sub('', text) f = open(dst_name, 'w') f.write(text) f.close() finally: shutil.rmtree(tmp_dir)
Modify the interpnd cython generator to allow .NET output
Modify the interpnd cython generator to allow .NET output
Python
bsd-3-clause
jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor
import tempfile import subprocess import os import sys import re import shutil from mako.template import Template + + dotnet = False + if len(sys.argv) > 1 and sys.argv[1] == '--dotnet': + dotnet = True f = open('interpnd.pyx', 'r') template = f.read() f.close() tmp_dir = tempfile.mkdtemp() try: # Run templating engine fn = os.path.join(tmp_dir, 'interpnd.pyx') f = open(fn, 'w') f.write(Template(template).render()) f.close() # Run Cython + if dotnet: + dst_name = 'interpnd.cpp' + args_extra = ['--dotnet'] + else: + dst_name = 'interpnd.c' + args_extra = [] - dst_fn = os.path.join(tmp_dir, 'interpnd.c') + dst_fn = os.path.join(tmp_dir, dst_name) + - ret = subprocess.call(['cython', '-I', '../..', '-o', dst_fn, fn]) + ret = subprocess.call(['cython', '-I', '../..', '-o'] + args_extra + [dst_fn, fn]) if ret != 0: sys.exit(ret) # Strip comments f = open(dst_fn, 'r') text = f.read() f.close() r = re.compile(r'/\*(.*?)\*/', re.S) text = r.sub('', text) - f = open('interpnd.c', 'w') + f = open(dst_name, 'w') f.write(text) f.close() finally: shutil.rmtree(tmp_dir)
Modify the interpnd cython generator to allow .NET output
## Code Before: import tempfile import subprocess import os import sys import re import shutil from mako.template import Template f = open('interpnd.pyx', 'r') template = f.read() f.close() tmp_dir = tempfile.mkdtemp() try: # Run templating engine fn = os.path.join(tmp_dir, 'interpnd.pyx') f = open(fn, 'w') f.write(Template(template).render()) f.close() # Run Cython dst_fn = os.path.join(tmp_dir, 'interpnd.c') ret = subprocess.call(['cython', '-I', '../..', '-o', dst_fn, fn]) if ret != 0: sys.exit(ret) # Strip comments f = open(dst_fn, 'r') text = f.read() f.close() r = re.compile(r'/\*(.*?)\*/', re.S) text = r.sub('', text) f = open('interpnd.c', 'w') f.write(text) f.close() finally: shutil.rmtree(tmp_dir) ## Instruction: Modify the interpnd cython generator to allow .NET output ## Code After: import tempfile import subprocess import os import sys import re import shutil from mako.template import Template dotnet = False if len(sys.argv) > 1 and sys.argv[1] == '--dotnet': dotnet = True f = open('interpnd.pyx', 'r') template = f.read() f.close() tmp_dir = tempfile.mkdtemp() try: # Run templating engine fn = os.path.join(tmp_dir, 'interpnd.pyx') f = open(fn, 'w') f.write(Template(template).render()) f.close() # Run Cython if dotnet: dst_name = 'interpnd.cpp' args_extra = ['--dotnet'] else: dst_name = 'interpnd.c' args_extra = [] dst_fn = os.path.join(tmp_dir, dst_name) ret = subprocess.call(['cython', '-I', '../..', '-o'] + args_extra + [dst_fn, fn]) if ret != 0: sys.exit(ret) # Strip comments f = open(dst_fn, 'r') text = f.read() f.close() r = re.compile(r'/\*(.*?)\*/', re.S) text = r.sub('', text) f = open(dst_name, 'w') f.write(text) f.close() finally: shutil.rmtree(tmp_dir)
# ... existing code ... from mako.template import Template dotnet = False if len(sys.argv) > 1 and sys.argv[1] == '--dotnet': dotnet = True # ... modified code ... # Run Cython if dotnet: dst_name = 'interpnd.cpp' args_extra = ['--dotnet'] else: dst_name = 'interpnd.c' args_extra = [] dst_fn = os.path.join(tmp_dir, dst_name) ret = subprocess.call(['cython', '-I', '../..', '-o'] + args_extra + [dst_fn, fn]) if ret != 0: ... text = r.sub('', text) f = open(dst_name, 'w') f.write(text) # ... rest of the code ...
527593c5f183054e330894e6b7161e24cca265a5
lily/notes/factories.py
lily/notes/factories.py
import random import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory from faker.factory import Factory from lily.accounts.factories import AccountFactory from lily.contacts.factories import ContactFactory from lily.users.factories import LilyUserFactory from .models import Note faker = Factory.create('nl_NL') class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) @factory.lazy_attribute def subject(self): SubjectFactory = random.choice([AccountFactory, ContactFactory]) return SubjectFactory(tenant=self.tenant) class Meta: model = Note
import random from datetime import datetime import pytz import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory from faker.factory import Factory from lily.accounts.factories import AccountFactory from lily.contacts.factories import ContactFactory from lily.users.factories import LilyUserFactory from .models import Note faker = Factory.create('nl_NL') class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) sort_by_date = LazyAttribute(lambda o: datetime.now(tz=pytz.utc)) @factory.lazy_attribute def subject(self): SubjectFactory = random.choice([AccountFactory, ContactFactory]) return SubjectFactory(tenant=self.tenant) class Meta: model = Note
Fix so testdata can be loaded when setting up local environment
Fix so testdata can be loaded when setting up local environment
Python
agpl-3.0
HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily
import random + from datetime import datetime + import pytz import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory from faker.factory import Factory from lily.accounts.factories import AccountFactory from lily.contacts.factories import ContactFactory from lily.users.factories import LilyUserFactory from .models import Note faker = Factory.create('nl_NL') class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) + sort_by_date = LazyAttribute(lambda o: datetime.now(tz=pytz.utc)) @factory.lazy_attribute def subject(self): SubjectFactory = random.choice([AccountFactory, ContactFactory]) return SubjectFactory(tenant=self.tenant) class Meta: model = Note
Fix so testdata can be loaded when setting up local environment
## Code Before: import random import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory from faker.factory import Factory from lily.accounts.factories import AccountFactory from lily.contacts.factories import ContactFactory from lily.users.factories import LilyUserFactory from .models import Note faker = Factory.create('nl_NL') class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) @factory.lazy_attribute def subject(self): SubjectFactory = random.choice([AccountFactory, ContactFactory]) return SubjectFactory(tenant=self.tenant) class Meta: model = Note ## Instruction: Fix so testdata can be loaded when setting up local environment ## Code After: import random from datetime import datetime import pytz import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory from faker.factory import Factory from lily.accounts.factories import AccountFactory from lily.contacts.factories import ContactFactory from lily.users.factories import LilyUserFactory from .models import Note faker = Factory.create('nl_NL') class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) sort_by_date = LazyAttribute(lambda o: datetime.now(tz=pytz.utc)) @factory.lazy_attribute def subject(self): SubjectFactory = random.choice([AccountFactory, ContactFactory]) return SubjectFactory(tenant=self.tenant) class Meta: model = Note
# ... existing code ... import random from datetime import datetime import pytz import factory # ... modified code ... author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) sort_by_date = LazyAttribute(lambda o: datetime.now(tz=pytz.utc)) # ... rest of the code ...
c17b8e6141d2832b9920eb143de2937993fb8865
linguist/models/base.py
linguist/models/base.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from .. import settings @python_2_unicode_compatible class Translation(models.Model): """ A Translation. """ identifier = models.CharField( max_length=100, verbose_name=_('identifier'), help_text=_('The registered model identifier.')) object_id = models.IntegerField( verbose_name=_('The object ID'), help_text=_('The object ID of this translation')) locale = models.CharField( max_length=10, verbose_name=_('locale'), choices=settings.SUPPORTED_LOCALES, default=settings.DEFAULT_LOCALE, help_text=_('The locale for this translation')) field_name = models.CharField( max_length=100, verbose_name=_('field name'), help_text=_('The model field name for this translation.')) content = models.TextField( verbose_name=_('content'), null=True, help_text=_('The translated content for the field.')) class Meta: abstract = True app_label = 'linguist' verbose_name = _('translation') verbose_name_plural = _('translations') unique_together = (('identifier', 'object_id', 'locale', 'field_name'),) def __str__(self): return '%s:%d:%s:%s' % ( self.identifier, self.object_id, self.field_name, self.locale)
from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from .. import settings @python_2_unicode_compatible class Translation(models.Model): """ A Translation. """ identifier = models.CharField( max_length=100, verbose_name=_('identifier'), help_text=_('The registered model identifier.')) object_id = models.IntegerField( verbose_name=_('The object ID'), help_text=_('The object ID of this translation')) language = models.CharField( max_length=10, verbose_name=_('locale'), choices=settings.SUPPORTED_LANGUAGES, default=settings.DEFAULT_LANGUAGE, help_text=_('The language for this translation')) field_name = models.CharField( max_length=100, verbose_name=_('field name'), help_text=_('The model field name for this translation.')) content = models.TextField( verbose_name=_('content'), null=True, help_text=_('The translated content for the field.')) class Meta: abstract = True app_label = 'linguist' verbose_name = _('translation') verbose_name_plural = _('translations') unique_together = (('identifier', 'object_id', 'language', 'field_name'),) def __str__(self): return '%s:%d:%s:%s' % ( self.identifier, self.object_id, self.field_name, self.language)
Rename locale field to language.
Rename locale field to language.
Python
mit
ulule/django-linguist
from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from .. import settings @python_2_unicode_compatible class Translation(models.Model): """ A Translation. """ identifier = models.CharField( max_length=100, verbose_name=_('identifier'), help_text=_('The registered model identifier.')) object_id = models.IntegerField( verbose_name=_('The object ID'), help_text=_('The object ID of this translation')) - locale = models.CharField( + language = models.CharField( max_length=10, verbose_name=_('locale'), - choices=settings.SUPPORTED_LOCALES, + choices=settings.SUPPORTED_LANGUAGES, - default=settings.DEFAULT_LOCALE, + default=settings.DEFAULT_LANGUAGE, - help_text=_('The locale for this translation')) + help_text=_('The language for this translation')) field_name = models.CharField( max_length=100, verbose_name=_('field name'), help_text=_('The model field name for this translation.')) content = models.TextField( verbose_name=_('content'), null=True, help_text=_('The translated content for the field.')) class Meta: abstract = True app_label = 'linguist' verbose_name = _('translation') verbose_name_plural = _('translations') - unique_together = (('identifier', 'object_id', 'locale', 'field_name'),) + unique_together = (('identifier', 'object_id', 'language', 'field_name'),) def __str__(self): return '%s:%d:%s:%s' % ( self.identifier, self.object_id, self.field_name, - self.locale) + self.language)
Rename locale field to language.
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from .. import settings @python_2_unicode_compatible class Translation(models.Model): """ A Translation. """ identifier = models.CharField( max_length=100, verbose_name=_('identifier'), help_text=_('The registered model identifier.')) object_id = models.IntegerField( verbose_name=_('The object ID'), help_text=_('The object ID of this translation')) locale = models.CharField( max_length=10, verbose_name=_('locale'), choices=settings.SUPPORTED_LOCALES, default=settings.DEFAULT_LOCALE, help_text=_('The locale for this translation')) field_name = models.CharField( max_length=100, verbose_name=_('field name'), help_text=_('The model field name for this translation.')) content = models.TextField( verbose_name=_('content'), null=True, help_text=_('The translated content for the field.')) class Meta: abstract = True app_label = 'linguist' verbose_name = _('translation') verbose_name_plural = _('translations') unique_together = (('identifier', 'object_id', 'locale', 'field_name'),) def __str__(self): return '%s:%d:%s:%s' % ( self.identifier, self.object_id, self.field_name, self.locale) ## Instruction: Rename locale field to language. ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from .. import settings @python_2_unicode_compatible class Translation(models.Model): """ A Translation. """ identifier = models.CharField( max_length=100, verbose_name=_('identifier'), help_text=_('The registered model identifier.')) object_id = models.IntegerField( verbose_name=_('The object ID'), help_text=_('The object ID of this translation')) language = models.CharField( max_length=10, verbose_name=_('locale'), choices=settings.SUPPORTED_LANGUAGES, default=settings.DEFAULT_LANGUAGE, help_text=_('The language for this translation')) field_name = models.CharField( max_length=100, verbose_name=_('field name'), help_text=_('The model field name for this translation.')) content = models.TextField( verbose_name=_('content'), null=True, help_text=_('The translated content for the field.')) class Meta: abstract = True app_label = 'linguist' verbose_name = _('translation') verbose_name_plural = _('translations') unique_together = (('identifier', 'object_id', 'language', 'field_name'),) def __str__(self): return '%s:%d:%s:%s' % ( self.identifier, self.object_id, self.field_name, self.language)
// ... existing code ... language = models.CharField( max_length=10, // ... modified code ... verbose_name=_('locale'), choices=settings.SUPPORTED_LANGUAGES, default=settings.DEFAULT_LANGUAGE, help_text=_('The language for this translation')) ... verbose_name_plural = _('translations') unique_together = (('identifier', 'object_id', 'language', 'field_name'),) ... self.field_name, self.language) // ... rest of the code ...
310553e1282231c35093ff355c61129e9f073a0a
src/lib/verify_email_google.py
src/lib/verify_email_google.py
import DNS from validate_email import validate_email from DNS.Lib import PackError def is_google_apps_email(email): hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
import DNS import re from validate_email import validate_email from DNS.Lib import PackError EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$') def is_valid_email(email): if email.count('@') != 1: return False return bool(EMAIL_RE.match(email)) def is_google_apps_email(email): if not is_valid_email(email): return False hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
Add Google Apps email address validation
Add Google Apps email address validation
Python
agpl-3.0
juposocial/jupo,juposocial/jupo,juposocial/jupo,juposocial/jupo
import DNS + import re from validate_email import validate_email from DNS.Lib import PackError + EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$') + + def is_valid_email(email): + if email.count('@') != 1: + return False + return bool(EMAIL_RE.match(email)) + def is_google_apps_email(email): + if not is_valid_email(email): + return False + hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
Add Google Apps email address validation
## Code Before: import DNS from validate_email import validate_email from DNS.Lib import PackError def is_google_apps_email(email): hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False ## Instruction: Add Google Apps email address validation ## Code After: import DNS import re from validate_email import validate_email from DNS.Lib import PackError EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$') def is_valid_email(email): if email.count('@') != 1: return False return bool(EMAIL_RE.match(email)) def is_google_apps_email(email): if not is_valid_email(email): return False hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
// ... existing code ... import DNS import re from validate_email import validate_email // ... modified code ... EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$') def is_valid_email(email): if email.count('@') != 1: return False return bool(EMAIL_RE.match(email)) def is_google_apps_email(email): if not is_valid_email(email): return False hostname = email[email.find('@')+1:] // ... rest of the code ...
9a1a05c33258461c5d474b014654464892cd7b90
bake/bakedefaults.py
bake/bakedefaults.py
LABEL_KEY = 'label' KEY_START = '@' KEY_END = '@' CFGFILE = 'bake.cfg'
LABEL_KEY = 'label' KEY_START = '@' KEY_END = '@'
Remove mention of bake.cfg file
Remove mention of bake.cfg file
Python
mit
AlexSzatmary/bake
LABEL_KEY = 'label' KEY_START = '@' KEY_END = '@' - CFGFILE = 'bake.cfg'
Remove mention of bake.cfg file
## Code Before: LABEL_KEY = 'label' KEY_START = '@' KEY_END = '@' CFGFILE = 'bake.cfg' ## Instruction: Remove mention of bake.cfg file ## Code After: LABEL_KEY = 'label' KEY_START = '@' KEY_END = '@'
// ... existing code ... KEY_END = '@' // ... rest of the code ...
0213bbb8f8075b2dc36a33380a66932c9d541f63
src/sphobjinv/__init__.py
src/sphobjinv/__init__.py
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr from sphobjinv.enum import HeaderFields, SourceTypes from sphobjinv.error import ( SphobjinvError, VersionError, ) from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson from sphobjinv.inventory import Inventory from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version from sphobjinv.schema import json_schema from sphobjinv.version import __version__ from sphobjinv.zlib import compress, decompress
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr from sphobjinv.enum import HeaderFields, SourceTypes from sphobjinv.error import SphobjinvError, VersionError from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson from sphobjinv.inventory import Inventory from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version from sphobjinv.schema import json_schema from sphobjinv.version import __version__ from sphobjinv.zlib import compress, decompress
Clean up the error imports
Clean up the error imports The new errors that had been added for _intersphinx.py had left the sphobjinv.error import line split. No need, when it all fits on one line.
Python
mit
bskinn/sphobjinv
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr from sphobjinv.enum import HeaderFields, SourceTypes + from sphobjinv.error import SphobjinvError, VersionError - from sphobjinv.error import ( - SphobjinvError, - VersionError, - ) from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson from sphobjinv.inventory import Inventory from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version from sphobjinv.schema import json_schema from sphobjinv.version import __version__ from sphobjinv.zlib import compress, decompress
Clean up the error imports
## Code Before: from sphobjinv.data import DataFields, DataObjBytes, DataObjStr from sphobjinv.enum import HeaderFields, SourceTypes from sphobjinv.error import ( SphobjinvError, VersionError, ) from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson from sphobjinv.inventory import Inventory from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version from sphobjinv.schema import json_schema from sphobjinv.version import __version__ from sphobjinv.zlib import compress, decompress ## Instruction: Clean up the error imports ## Code After: from sphobjinv.data import DataFields, DataObjBytes, DataObjStr from sphobjinv.enum import HeaderFields, SourceTypes from sphobjinv.error import SphobjinvError, VersionError from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson from sphobjinv.inventory import Inventory from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version from sphobjinv.schema import json_schema from sphobjinv.version import __version__ from sphobjinv.zlib import compress, decompress
... from sphobjinv.enum import HeaderFields, SourceTypes from sphobjinv.error import SphobjinvError, VersionError from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson ...
b528b2cf4379369da8277a0a1c904267b5c7cf6f
Lib/test/test_atexit.py
Lib/test/test_atexit.py
from test_support import TESTFN, vereq import atexit import os input = """\ import atexit def handler1(): print "handler1" def handler2(*args, **kargs): print "handler2", args, kargs atexit.register(handler1) atexit.register(handler2) atexit.register(handler2, 7, kw="abc") """ fname = TESTFN + ".py" f = file(fname, "w") f.write(input) f.close() p = os.popen("python " + fname) output = p.read() p.close() vereq(output, """\ handler2 (7,) {'kw': 'abc'} handler2 () {} handler1 """) input = """\ def direct(): print "direct exit" import sys sys.exitfunc = direct # Make sure atexit doesn't drop def indirect(): print "indirect exit" import atexit atexit.register(indirect) """ f = file(fname, "w") f.write(input) f.close() p = os.popen("python " + fname) output = p.read() p.close() vereq(output, """\ indirect exit direct exit """) os.unlink(fname)
from test_support import TESTFN, vereq import atexit import os import sys input = """\ import atexit def handler1(): print "handler1" def handler2(*args, **kargs): print "handler2", args, kargs atexit.register(handler1) atexit.register(handler2) atexit.register(handler2, 7, kw="abc") """ fname = TESTFN + ".py" f = file(fname, "w") f.write(input) f.close() p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() p.close() vereq(output, """\ handler2 (7,) {'kw': 'abc'} handler2 () {} handler1 """) input = """\ def direct(): print "direct exit" import sys sys.exitfunc = direct # Make sure atexit doesn't drop def indirect(): print "indirect exit" import atexit atexit.register(indirect) """ f = file(fname, "w") f.write(input) f.close() p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() p.close() vereq(output, """\ indirect exit direct exit """) os.unlink(fname)
Use sys.executable to run Python, as suggested by Neal Norwitz.
Use sys.executable to run Python, as suggested by Neal Norwitz.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
from test_support import TESTFN, vereq import atexit import os + import sys input = """\ import atexit def handler1(): print "handler1" def handler2(*args, **kargs): print "handler2", args, kargs atexit.register(handler1) atexit.register(handler2) atexit.register(handler2, 7, kw="abc") """ fname = TESTFN + ".py" f = file(fname, "w") f.write(input) f.close() - p = os.popen("python " + fname) + p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() p.close() vereq(output, """\ handler2 (7,) {'kw': 'abc'} handler2 () {} handler1 """) input = """\ def direct(): print "direct exit" import sys sys.exitfunc = direct # Make sure atexit doesn't drop def indirect(): print "indirect exit" import atexit atexit.register(indirect) """ f = file(fname, "w") f.write(input) f.close() - p = os.popen("python " + fname) + p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() p.close() vereq(output, """\ indirect exit direct exit """) os.unlink(fname)
Use sys.executable to run Python, as suggested by Neal Norwitz.
## Code Before: from test_support import TESTFN, vereq import atexit import os input = """\ import atexit def handler1(): print "handler1" def handler2(*args, **kargs): print "handler2", args, kargs atexit.register(handler1) atexit.register(handler2) atexit.register(handler2, 7, kw="abc") """ fname = TESTFN + ".py" f = file(fname, "w") f.write(input) f.close() p = os.popen("python " + fname) output = p.read() p.close() vereq(output, """\ handler2 (7,) {'kw': 'abc'} handler2 () {} handler1 """) input = """\ def direct(): print "direct exit" import sys sys.exitfunc = direct # Make sure atexit doesn't drop def indirect(): print "indirect exit" import atexit atexit.register(indirect) """ f = file(fname, "w") f.write(input) f.close() p = os.popen("python " + fname) output = p.read() p.close() vereq(output, """\ indirect exit direct exit """) os.unlink(fname) ## Instruction: Use sys.executable to run Python, as suggested by Neal Norwitz. ## Code After: from test_support import TESTFN, vereq import atexit import os import sys input = """\ import atexit def handler1(): print "handler1" def handler2(*args, **kargs): print "handler2", args, kargs atexit.register(handler1) atexit.register(handler2) atexit.register(handler2, 7, kw="abc") """ fname = TESTFN + ".py" f = file(fname, "w") f.write(input) f.close() p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() p.close() vereq(output, """\ handler2 (7,) {'kw': 'abc'} handler2 () {} handler1 """) input = """\ def direct(): print "direct exit" import sys sys.exitfunc = direct # Make sure atexit doesn't drop def indirect(): print "indirect exit" import atexit atexit.register(indirect) """ f = file(fname, "w") f.write(input) f.close() p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() p.close() vereq(output, """\ indirect exit direct exit """) os.unlink(fname)
... import os import sys ... p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() ... p = os.popen("%s %s" % (sys.executable, fname)) output = p.read() ...
fbc5e2d52549452c2adbe58644358cf3c4eeb526
testsuite/test_util.py
testsuite/test_util.py
import os import unittest import pep8 class UtilTestCase(unittest.TestCase): def test_normalize_paths(self): cwd = os.getcwd() self.assertEquals(pep8.normalize_paths(''), []) self.assertEquals(pep8.normalize_paths(['foo']), ['foo']) self.assertEquals(pep8.normalize_paths('foo'), ['foo']) self.assertEquals(pep8.normalize_paths('foo,bar'), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('/foo/bar,baz/../bat'), ['/foo/bar', cwd + '/bat']) self.assertEquals(pep8.normalize_paths(".pyc,\n build/*"), ['.pyc', cwd + '/build/*'])
import os import unittest import pep8 class UtilTestCase(unittest.TestCase): def test_normalize_paths(self): cwd = os.getcwd() self.assertEquals(pep8.normalize_paths(''), []) self.assertEquals(pep8.normalize_paths([]), []) self.assertEquals(pep8.normalize_paths(None), []) self.assertEquals(pep8.normalize_paths(['foo']), ['foo']) self.assertEquals(pep8.normalize_paths('foo'), ['foo']) self.assertEquals(pep8.normalize_paths('foo,bar'), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('foo, bar '), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('/foo/bar,baz/../bat'), ['/foo/bar', cwd + '/bat']) self.assertEquals(pep8.normalize_paths(".pyc,\n build/*"), ['.pyc', cwd + '/build/*'])
Add a few more cases of "not value"
Add a few more cases of "not value"
Python
mit
ojengwa/pep8,pedros/pep8,asandyz/pep8,jayvdb/pep8,doismellburning/pep8,pandeesh/pep8,jayvdb/pep8,PyCQA/pep8,ABaldwinHunter/pep8,codeclimate/pep8,ABaldwinHunter/pep8-clone-classic,zevnux/pep8,MeteorAdminz/pep8
import os import unittest import pep8 class UtilTestCase(unittest.TestCase): def test_normalize_paths(self): cwd = os.getcwd() self.assertEquals(pep8.normalize_paths(''), []) + self.assertEquals(pep8.normalize_paths([]), []) + self.assertEquals(pep8.normalize_paths(None), []) self.assertEquals(pep8.normalize_paths(['foo']), ['foo']) self.assertEquals(pep8.normalize_paths('foo'), ['foo']) self.assertEquals(pep8.normalize_paths('foo,bar'), ['foo', 'bar']) + self.assertEquals(pep8.normalize_paths('foo, bar '), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('/foo/bar,baz/../bat'), ['/foo/bar', cwd + '/bat']) self.assertEquals(pep8.normalize_paths(".pyc,\n build/*"), ['.pyc', cwd + '/build/*'])
Add a few more cases of "not value"
## Code Before: import os import unittest import pep8 class UtilTestCase(unittest.TestCase): def test_normalize_paths(self): cwd = os.getcwd() self.assertEquals(pep8.normalize_paths(''), []) self.assertEquals(pep8.normalize_paths(['foo']), ['foo']) self.assertEquals(pep8.normalize_paths('foo'), ['foo']) self.assertEquals(pep8.normalize_paths('foo,bar'), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('/foo/bar,baz/../bat'), ['/foo/bar', cwd + '/bat']) self.assertEquals(pep8.normalize_paths(".pyc,\n build/*"), ['.pyc', cwd + '/build/*']) ## Instruction: Add a few more cases of "not value" ## Code After: import os import unittest import pep8 class UtilTestCase(unittest.TestCase): def test_normalize_paths(self): cwd = os.getcwd() self.assertEquals(pep8.normalize_paths(''), []) self.assertEquals(pep8.normalize_paths([]), []) self.assertEquals(pep8.normalize_paths(None), []) self.assertEquals(pep8.normalize_paths(['foo']), ['foo']) self.assertEquals(pep8.normalize_paths('foo'), ['foo']) self.assertEquals(pep8.normalize_paths('foo,bar'), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('foo, bar '), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('/foo/bar,baz/../bat'), ['/foo/bar', cwd + '/bat']) self.assertEquals(pep8.normalize_paths(".pyc,\n build/*"), ['.pyc', cwd + '/build/*'])
... self.assertEquals(pep8.normalize_paths(''), []) self.assertEquals(pep8.normalize_paths([]), []) self.assertEquals(pep8.normalize_paths(None), []) self.assertEquals(pep8.normalize_paths(['foo']), ['foo']) ... self.assertEquals(pep8.normalize_paths('foo,bar'), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('foo, bar '), ['foo', 'bar']) self.assertEquals(pep8.normalize_paths('/foo/bar,baz/../bat'), ...
2a43183f5d2c14bacb92fe563d3c2ddf61b116da
tests/testMain.py
tests/testMain.py
import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXMLDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) if __name__ == '__main__': unittest.main()
import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXmlDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) def testCsvDocumentExists(self): self.assertTrue(os.path.exists(csv_doc)) if __name__ == '__main__': unittest.main()
Make naming consistent with our standard (camelcase always, even with acronymn)
Make naming consistent with our standard (camelcase always, even with acronymn)
Python
mpl-2.0
EsriOceans/btm
import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils - def testXMLDocumentExists(self): + def testXmlDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) + + def testCsvDocumentExists(self): + self.assertTrue(os.path.exists(csv_doc)) if __name__ == '__main__': unittest.main()
Make naming consistent with our standard (camelcase always, even with acronymn)
## Code Before: import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXMLDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) if __name__ == '__main__': unittest.main() ## Instruction: Make naming consistent with our standard (camelcase always, even with acronymn) ## Code After: import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXmlDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) def testCsvDocumentExists(self): self.assertTrue(os.path.exists(csv_doc)) if __name__ == '__main__': unittest.main()
// ... existing code ... def testXmlDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) def testCsvDocumentExists(self): self.assertTrue(os.path.exists(csv_doc)) // ... rest of the code ...
7cac8f8ba591315d68e223503c4e93f976c8d89d
characters/views.py
characters/views.py
from django.shortcuts import get_object_or_404, redirect, render from characters.forms import CharacterForm from characters.models import Character, Class, Race def index(request): all_characters = Character.objects.all() context = {'all_characters': all_characters} return render(request, 'characters/index.html', context) def view_character(request, character_id): character = get_object_or_404(Character, pk=character_id) context = {'character': character} return render(request, 'characters/view_character.html', context) def create_character(request): form = CharacterForm(request.POST or None) if request.method == 'POST' and form.is_valid(): race = Race.objects.get(id=1) cclass = Class.objects.get(id=1) character = Character( name=request.POST['name'], background=request.POST['background'], race=race, cclass=cclass ) character.save() return redirect('characters:view', character_id=character.id) context = {'form': form} return render(request, 'characters/create_character.html', context)
from django.shortcuts import get_object_or_404, redirect, render from characters.forms import CharacterForm from characters.models import Character, Class, Race def index(request): all_characters = Character.objects.all() context = {'all_characters': all_characters} return render(request, 'characters/index.html', context) def view_character(request, character_id): character = get_object_or_404(Character, pk=character_id) context = {'character': character} return render(request, 'characters/view_character.html', context) def create_character(request): form = CharacterForm(request.POST or None) if request.method == 'POST' and form.is_valid(): character = Character( name=request.POST['name'], background=request.POST['background'], race_id=1, cclass_id=1 ) character.save() return redirect('characters:view', character_id=character.id) context = {'form': form} return render(request, 'characters/create_character.html', context)
Set default race and class without extra database queries
Set default race and class without extra database queries
Python
mit
mpirnat/django-tutorial-v2
from django.shortcuts import get_object_or_404, redirect, render from characters.forms import CharacterForm from characters.models import Character, Class, Race def index(request): all_characters = Character.objects.all() context = {'all_characters': all_characters} return render(request, 'characters/index.html', context) def view_character(request, character_id): character = get_object_or_404(Character, pk=character_id) context = {'character': character} return render(request, 'characters/view_character.html', context) def create_character(request): form = CharacterForm(request.POST or None) if request.method == 'POST' and form.is_valid(): - race = Race.objects.get(id=1) - cclass = Class.objects.get(id=1) - character = Character( name=request.POST['name'], background=request.POST['background'], - race=race, + race_id=1, - cclass=cclass + cclass_id=1 ) character.save() return redirect('characters:view', character_id=character.id) context = {'form': form} return render(request, 'characters/create_character.html', context)
Set default race and class without extra database queries
## Code Before: from django.shortcuts import get_object_or_404, redirect, render from characters.forms import CharacterForm from characters.models import Character, Class, Race def index(request): all_characters = Character.objects.all() context = {'all_characters': all_characters} return render(request, 'characters/index.html', context) def view_character(request, character_id): character = get_object_or_404(Character, pk=character_id) context = {'character': character} return render(request, 'characters/view_character.html', context) def create_character(request): form = CharacterForm(request.POST or None) if request.method == 'POST' and form.is_valid(): race = Race.objects.get(id=1) cclass = Class.objects.get(id=1) character = Character( name=request.POST['name'], background=request.POST['background'], race=race, cclass=cclass ) character.save() return redirect('characters:view', character_id=character.id) context = {'form': form} return render(request, 'characters/create_character.html', context) ## Instruction: Set default race and class without extra database queries ## Code After: from django.shortcuts import get_object_or_404, redirect, render from characters.forms import CharacterForm from characters.models import Character, Class, Race def index(request): all_characters = Character.objects.all() context = {'all_characters': all_characters} return render(request, 'characters/index.html', context) def view_character(request, character_id): character = get_object_or_404(Character, pk=character_id) context = {'character': character} return render(request, 'characters/view_character.html', context) def create_character(request): form = CharacterForm(request.POST or None) if request.method == 'POST' and form.is_valid(): character = Character( name=request.POST['name'], background=request.POST['background'], race_id=1, cclass_id=1 ) character.save() return redirect('characters:view', character_id=character.id) context = {'form': form} return render(request, 'characters/create_character.html', context)
# ... existing code ... character = Character( # ... modified code ... background=request.POST['background'], race_id=1, cclass_id=1 ) # ... rest of the code ...
d6da05f79d62f90d8d03908197a0389b67535aa5
halfedge_mesh.py
halfedge_mesh.py
class HalfedgeMesh: def __init__(self, filename=None): """Make an empty halfedge mesh.""" self.vertices = [] self.halfedges = [] self.facets = [] def read_off(self, filename): class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. Args: x: x-coordinate of the point y: y-coordinate of the point z: z-coordinate of the point index: integer id of this vertex """ pass def halfedges(self): """Return a list of halfedges targeting to this vertex.""" pass class Facet: def __init__(self, index): """Create a facet with the given index.""" pass def halfedges(self): """Return halfedges going ccw around this facet.""" pass class Halfedge: def __init__(self, index): """Create a halfedge with given index.""" pass def opposite(self): """Return the opposite halfedge.""" pass def next(self): """Return the opposite halfedge.""" pass def prev(self): """Return the opposite halfedge.""" pass def vertex(self): """Return the target vertex.""" pass def facet(self): """Return the incident facet.""" pass if __name__ == '__main__': m = HalfedgeMesh()
class HalfedgeMesh: def __init__(self, filename=None): """Make an empty halfedge mesh.""" self.vertices = [] self.halfedges = [] self.facets = [] def parse_off(self, filename): """Parses OFF files and returns a set of vertices, halfedges, and facets. """ pass def get_halfedge(self, u, v): """Retrieve halfedge with starting vertex u and target vertex v u - starting vertex v - target vertex Returns a halfedge """ pass class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. x - x-coordinate of the point y - y-coordinate of the point z - z-coordinate of the point index - integer id of this vertex """ pass def halfedges(self): """Return a list of halfedges targeting to this vertex.""" pass class Facet: def __init__(self, index): """Create a facet with the given index.""" pass def halfedges(self): """Return halfedges going ccw around this facet.""" pass class Halfedge: def __init__(self, index): """Create a halfedge with given index.""" pass def opposite(self): """Return the opposite halfedge.""" pass def next(self): """Return the opposite halfedge.""" pass def prev(self): """Return the opposite halfedge.""" pass def vertex(self): """Return the target vertex.""" pass def facet(self): """Return the incident facet.""" pass if __name__ == '__main__': m = HalfedgeMesh()
Add parse_off stub and change docstring
Add parse_off stub and change docstring I follow the TomDoc format for docstrings.
Python
mit
carlosrojas/halfedge_mesh
class HalfedgeMesh: def __init__(self, filename=None): """Make an empty halfedge mesh.""" self.vertices = [] self.halfedges = [] self.facets = [] - def read_off(self, filename): + def parse_off(self, filename): + """Parses OFF files and returns a set of vertices, halfedges, and + facets. + """ + pass + + def get_halfedge(self, u, v): + """Retrieve halfedge with starting vertex u and target vertex v + + u - starting vertex + v - target vertex + + Returns a halfedge + """ + pass class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. - Args: - x: x-coordinate of the point + x - x-coordinate of the point - y: y-coordinate of the point + y - y-coordinate of the point - z: z-coordinate of the point + z - z-coordinate of the point - index: integer id of this vertex + index - integer id of this vertex """ pass def halfedges(self): """Return a list of halfedges targeting to this vertex.""" pass class Facet: def __init__(self, index): """Create a facet with the given index.""" pass def halfedges(self): """Return halfedges going ccw around this facet.""" pass class Halfedge: def __init__(self, index): """Create a halfedge with given index.""" pass def opposite(self): """Return the opposite halfedge.""" pass def next(self): """Return the opposite halfedge.""" pass def prev(self): """Return the opposite halfedge.""" pass def vertex(self): """Return the target vertex.""" pass def facet(self): """Return the incident facet.""" pass if __name__ == '__main__': m = HalfedgeMesh()
Add parse_off stub and change docstring
## Code Before: class HalfedgeMesh: def __init__(self, filename=None): """Make an empty halfedge mesh.""" self.vertices = [] self.halfedges = [] self.facets = [] def read_off(self, filename): class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. Args: x: x-coordinate of the point y: y-coordinate of the point z: z-coordinate of the point index: integer id of this vertex """ pass def halfedges(self): """Return a list of halfedges targeting to this vertex.""" pass class Facet: def __init__(self, index): """Create a facet with the given index.""" pass def halfedges(self): """Return halfedges going ccw around this facet.""" pass class Halfedge: def __init__(self, index): """Create a halfedge with given index.""" pass def opposite(self): """Return the opposite halfedge.""" pass def next(self): """Return the opposite halfedge.""" pass def prev(self): """Return the opposite halfedge.""" pass def vertex(self): """Return the target vertex.""" pass def facet(self): """Return the incident facet.""" pass if __name__ == '__main__': m = HalfedgeMesh() ## Instruction: Add parse_off stub and change docstring ## Code After: class HalfedgeMesh: def __init__(self, filename=None): """Make an empty halfedge mesh.""" self.vertices = [] self.halfedges = [] self.facets = [] def parse_off(self, filename): """Parses OFF files and returns a set of vertices, halfedges, and facets. """ pass def get_halfedge(self, u, v): """Retrieve halfedge with starting vertex u and target vertex v u - starting vertex v - target vertex Returns a halfedge """ pass class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. x - x-coordinate of the point y - y-coordinate of the point z - z-coordinate of the point index - integer id of this vertex """ pass def halfedges(self): """Return a list of halfedges targeting to this vertex.""" pass class Facet: def __init__(self, index): """Create a facet with the given index.""" pass def halfedges(self): """Return halfedges going ccw around this facet.""" pass class Halfedge: def __init__(self, index): """Create a halfedge with given index.""" pass def opposite(self): """Return the opposite halfedge.""" pass def next(self): """Return the opposite halfedge.""" pass def prev(self): """Return the opposite halfedge.""" pass def vertex(self): """Return the target vertex.""" pass def facet(self): """Return the incident facet.""" pass if __name__ == '__main__': m = HalfedgeMesh()
// ... existing code ... def parse_off(self, filename): """Parses OFF files and returns a set of vertices, halfedges, and facets. """ pass def get_halfedge(self, u, v): """Retrieve halfedge with starting vertex u and target vertex v u - starting vertex v - target vertex Returns a halfedge """ pass // ... modified code ... x - x-coordinate of the point y - y-coordinate of the point z - z-coordinate of the point index - integer id of this vertex """ // ... rest of the code ...
3735c090702cc8c290dbf8930223ff794c80775a
versionsapp.py
versionsapp.py
from webob import Response from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def APIVersionList(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json([ { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } for version in self.version_classes ])) def APIVersion(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json({ 'todo': 'Report detail' })) def factory(global_config, **settings): return VersionsApp()
from webob import Response import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.
Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.
Python
apache-2.0
NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api
from webob import Response + import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] + def _api_version_detail(self, version): + return { + "id": version._version_identifier(), + "links": [ + { + "href": "/" + version._version_identifier(), + "rel": "self" + } + ] + } + def APIVersionList(self, args): - return Response(content_type = 'application/json', body = self._resultset_to_json([ + return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ + self._api_version_detail(version) for version in self.version_classes - { - "id": version._version_identifier(), - "links": [ - { - "href": "/" + version._version_identifier(), - "rel": "self" - } - ] - } for version in self.version_classes ])) - def APIVersion(self, args): + def APIVersion(self, version_identifier): + versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] + if not versions: + return webob.exc.HTTPNotFound() + if len(versions) > 1: + raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ - 'todo': 'Report detail' + self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.
## Code Before: from webob import Response from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def APIVersionList(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json([ { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } for version in self.version_classes ])) def APIVersion(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json({ 'todo': 'Report detail' })) def factory(global_config, **settings): return VersionsApp() ## Instruction: Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version. ## Code After: from webob import Response import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
... from webob import Response import webob.exc from apiversion import APIVersion ... def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) ... def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) ...
99884ec3e960fa7b73e10a6969c455de6eca542b
src/ggrc_workflows/migrations/versions/20140715214934_26d9c9c91542_add_cycletaskgroupobject_object.py
src/ggrc_workflows/migrations/versions/20140715214934_26d9c9c91542_add_cycletaskgroupobject_object.py
# revision identifiers, used by Alembic. revision = '26d9c9c91542' down_revision = '19a67dc67c3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False)) op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False)) def downgrade(): op.drop_column('cycle_task_group_objects', 'object_type') op.drop_column('cycle_task_group_objects', 'object_id')
# revision identifiers, used by Alembic. revision = '26d9c9c91542' down_revision = '19a67dc67c3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False)) op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False)) op.execute(''' UPDATE cycle_task_group_objects JOIN task_group_objects ON cycle_task_group_objects.task_group_object_id = task_group_objects.id SET cycle_task_group_objects.object_id = task_group_objects.object_id, cycle_task_group_objects.object_type = task_group_objects.object_type; ''') def downgrade(): op.drop_column('cycle_task_group_objects', 'object_type') op.drop_column('cycle_task_group_objects', 'object_id')
Update migration to fix existing CycleTaskGroupObjects
Update migration to fix existing CycleTaskGroupObjects
Python
apache-2.0
NejcZupec/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,vladan-m/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core
# revision identifiers, used by Alembic. revision = '26d9c9c91542' down_revision = '19a67dc67c3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False)) op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False)) + op.execute(''' + UPDATE cycle_task_group_objects + JOIN task_group_objects + ON cycle_task_group_objects.task_group_object_id = task_group_objects.id + SET + cycle_task_group_objects.object_id = task_group_objects.object_id, + cycle_task_group_objects.object_type = task_group_objects.object_type; + ''') + def downgrade(): op.drop_column('cycle_task_group_objects', 'object_type') op.drop_column('cycle_task_group_objects', 'object_id')
Update migration to fix existing CycleTaskGroupObjects
## Code Before: # revision identifiers, used by Alembic. revision = '26d9c9c91542' down_revision = '19a67dc67c3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False)) op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False)) def downgrade(): op.drop_column('cycle_task_group_objects', 'object_type') op.drop_column('cycle_task_group_objects', 'object_id') ## Instruction: Update migration to fix existing CycleTaskGroupObjects ## Code After: # revision identifiers, used by Alembic. revision = '26d9c9c91542' down_revision = '19a67dc67c3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False)) op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False)) op.execute(''' UPDATE cycle_task_group_objects JOIN task_group_objects ON cycle_task_group_objects.task_group_object_id = task_group_objects.id SET cycle_task_group_objects.object_id = task_group_objects.object_id, cycle_task_group_objects.object_type = task_group_objects.object_type; ''') def downgrade(): op.drop_column('cycle_task_group_objects', 'object_type') op.drop_column('cycle_task_group_objects', 'object_id')
// ... existing code ... op.execute(''' UPDATE cycle_task_group_objects JOIN task_group_objects ON cycle_task_group_objects.task_group_object_id = task_group_objects.id SET cycle_task_group_objects.object_id = task_group_objects.object_id, cycle_task_group_objects.object_type = task_group_objects.object_type; ''') // ... rest of the code ...
8df03bdd466270127b4185afa792d26e71e323f7
avalonstar/apps/api/views.py
avalonstar/apps/api/views.py
from django.shortcuts import get_object_or_404 from rest_framework import viewsets from rest_framework.response import Response from apps.broadcasts.models import Broadcast, Host, Raid, Series from apps.games.models import Game from apps.subscribers.models import Ticket from .serializers import (BroadcastSerializer, GameSerializer, HostSerializer, RaidSerializer, SeriesSerializer, TicketSerializer) class BroadcastViewSet(viewsets.ReadOnlyModelViewSet): queryset = Broadcast.objects.all() serializer_class = BroadcastSerializer class HostViewSet(viewsets.ModelViewSet): queryset = Host.objects.all() serializer_class = HostSerializer class RaidViewSet(viewsets.ModelViewSet): queryset = Raid.objects.all() serializer_class = RaidSerializer class TicketViewSet(viewsets.ModelViewSet): queryset = Ticket.objects.all() serializer_class = TicketSerializer def retrieve(self, request, pk=None): queryset = Ticket.objects.all() ticket = get_object_or_404(queryset, name=pk) serializer = TicketSerializer(ticket) return Response(serializer.data)
from django.shortcuts import get_object_or_404 from rest_framework import viewsets from rest_framework.response import Response from apps.broadcasts.models import Broadcast, Host, Raid, Series from apps.games.models import Game from apps.subscribers.models import Ticket from .serializers import (BroadcastSerializer, GameSerializer, HostSerializer, RaidSerializer, SeriesSerializer, TicketSerializer) class BroadcastViewSet(viewsets.ReadOnlyModelViewSet): queryset = Broadcast.objects.all() serializer_class = BroadcastSerializer class HostViewSet(viewsets.ModelViewSet): queryset = Host.objects.all() serializer_class = HostSerializer class RaidViewSet(viewsets.ModelViewSet): queryset = Raid.objects.all() serializer_class = RaidSerializer class TicketViewSet(viewsets.ModelViewSet): queryset = Ticket.objects.order_by('-updated') serializer_class = TicketSerializer def retrieve(self, request, pk=None): queryset = Ticket.objects.all() ticket = get_object_or_404(queryset, name=pk) serializer = TicketSerializer(ticket) return Response(serializer.data)
Order the tickets correctly in the API.
Order the tickets correctly in the API.
Python
apache-2.0
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
from django.shortcuts import get_object_or_404 from rest_framework import viewsets from rest_framework.response import Response from apps.broadcasts.models import Broadcast, Host, Raid, Series from apps.games.models import Game from apps.subscribers.models import Ticket from .serializers import (BroadcastSerializer, GameSerializer, HostSerializer, RaidSerializer, SeriesSerializer, TicketSerializer) class BroadcastViewSet(viewsets.ReadOnlyModelViewSet): queryset = Broadcast.objects.all() serializer_class = BroadcastSerializer class HostViewSet(viewsets.ModelViewSet): queryset = Host.objects.all() serializer_class = HostSerializer class RaidViewSet(viewsets.ModelViewSet): queryset = Raid.objects.all() serializer_class = RaidSerializer class TicketViewSet(viewsets.ModelViewSet): - queryset = Ticket.objects.all() + queryset = Ticket.objects.order_by('-updated') serializer_class = TicketSerializer def retrieve(self, request, pk=None): queryset = Ticket.objects.all() ticket = get_object_or_404(queryset, name=pk) serializer = TicketSerializer(ticket) return Response(serializer.data)
Order the tickets correctly in the API.
## Code Before: from django.shortcuts import get_object_or_404 from rest_framework import viewsets from rest_framework.response import Response from apps.broadcasts.models import Broadcast, Host, Raid, Series from apps.games.models import Game from apps.subscribers.models import Ticket from .serializers import (BroadcastSerializer, GameSerializer, HostSerializer, RaidSerializer, SeriesSerializer, TicketSerializer) class BroadcastViewSet(viewsets.ReadOnlyModelViewSet): queryset = Broadcast.objects.all() serializer_class = BroadcastSerializer class HostViewSet(viewsets.ModelViewSet): queryset = Host.objects.all() serializer_class = HostSerializer class RaidViewSet(viewsets.ModelViewSet): queryset = Raid.objects.all() serializer_class = RaidSerializer class TicketViewSet(viewsets.ModelViewSet): queryset = Ticket.objects.all() serializer_class = TicketSerializer def retrieve(self, request, pk=None): queryset = Ticket.objects.all() ticket = get_object_or_404(queryset, name=pk) serializer = TicketSerializer(ticket) return Response(serializer.data) ## Instruction: Order the tickets correctly in the API. ## Code After: from django.shortcuts import get_object_or_404 from rest_framework import viewsets from rest_framework.response import Response from apps.broadcasts.models import Broadcast, Host, Raid, Series from apps.games.models import Game from apps.subscribers.models import Ticket from .serializers import (BroadcastSerializer, GameSerializer, HostSerializer, RaidSerializer, SeriesSerializer, TicketSerializer) class BroadcastViewSet(viewsets.ReadOnlyModelViewSet): queryset = Broadcast.objects.all() serializer_class = BroadcastSerializer class HostViewSet(viewsets.ModelViewSet): queryset = Host.objects.all() serializer_class = HostSerializer class RaidViewSet(viewsets.ModelViewSet): queryset = Raid.objects.all() serializer_class = RaidSerializer class TicketViewSet(viewsets.ModelViewSet): queryset = Ticket.objects.order_by('-updated') serializer_class = TicketSerializer def retrieve(self, request, pk=None): queryset = Ticket.objects.all() ticket = get_object_or_404(queryset, name=pk) serializer = TicketSerializer(ticket) return Response(serializer.data)
# ... existing code ... class TicketViewSet(viewsets.ModelViewSet): queryset = Ticket.objects.order_by('-updated') serializer_class = TicketSerializer # ... rest of the code ...
f8ac907837e198ddac3d4ce9c5f72243c89b5ca1
config.py
config.py
host = 'http://mech-ai.appspot.com' try: from local_config import * # Override with config-local if exists except ImportError: pass
import os host_envs = { 'prod': 'http://mech-ai.appspot.com', 'dev': 'http://127.0.0.1:8080', } environment = os.getenv('ENV', 'dev') host = host_env.get('environment') username = os.getenv('USER') access_token = os.getenv('TOKEN') try: from local_config import * # Override with local settings if exists except ImportError: pass
Enable environment variables for settings
Enable environment variables for settings
Python
mit
supermitch/mech-ai,supermitch/mech-ai,supermitch/mech-ai
+ import os + + host_envs = { - host = 'http://mech-ai.appspot.com' + 'prod': 'http://mech-ai.appspot.com', + 'dev': 'http://127.0.0.1:8080', + } + + environment = os.getenv('ENV', 'dev') + host = host_env.get('environment') + + username = os.getenv('USER') + access_token = os.getenv('TOKEN') try: - from local_config import * # Override with config-local if exists + from local_config import * # Override with local settings if exists except ImportError: pass
Enable environment variables for settings
## Code Before: host = 'http://mech-ai.appspot.com' try: from local_config import * # Override with config-local if exists except ImportError: pass ## Instruction: Enable environment variables for settings ## Code After: import os host_envs = { 'prod': 'http://mech-ai.appspot.com', 'dev': 'http://127.0.0.1:8080', } environment = os.getenv('ENV', 'dev') host = host_env.get('environment') username = os.getenv('USER') access_token = os.getenv('TOKEN') try: from local_config import * # Override with local settings if exists except ImportError: pass
... import os host_envs = { 'prod': 'http://mech-ai.appspot.com', 'dev': 'http://127.0.0.1:8080', } environment = os.getenv('ENV', 'dev') host = host_env.get('environment') username = os.getenv('USER') access_token = os.getenv('TOKEN') ... try: from local_config import * # Override with local settings if exists except ImportError: ...
76166f243b9f5f21582c95a843ddfa174ded8602
PyFVCOM/__init__.py
PyFVCOM/__init__.py
__version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
__version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
Put things in in alphabetical order.
Put things in in alphabetical order.
Python
mit
pwcazenave/PyFVCOM
__version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools + from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools - from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
Put things in in alphabetical order.
## Code Before: __version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities ## Instruction: Put things in in alphabetical order. ## Code After: __version__ = '1.6.2' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot from PyFVCOM import utilities
... from PyFVCOM import stats_tools from PyFVCOM import tidal_ellipse from PyFVCOM import tide_tools from PyFVCOM import process_results ...
e72ab305e2a90433c07300f37f7ae6fa2901b9cc
app/auth/views.py
app/auth/views.py
from flask import render_template, redirect, request, url_for, flash from flask.ext.login import login_user, logout_user, login_required, \ current_user from . import auth from .forms import LoginForm, RegistrationForm from .. import db from ..models import User @auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is not None: login_user(user) return redirect(request.args.get('next') or url_for('main.index')) flash('Invalid username or password.') return render_template('login.html', form=form) @auth.route('/logout') # @login_required def logout(): logout_user() flash('You have been logged out.') return redirect(url_for('main.index')) @auth.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() if form.validate_on_submit(): user = User(email=form.email.data, username=form.username.data, password=form.password.data) db.session.add(user) db.session.commit() flash('You successfully registered. Welcome!') return redirect(url_for('auth.login')) return render_template('register.html', form=form)
from flask import render_template, redirect, request, url_for, flash from flask.ext.login import login_user, logout_user, login_required, \ current_user from . import auth from .forms import LoginForm, RegistrationForm from ..models import User @auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is not None: login_user(user) return redirect(request.args.get('next') or url_for('main.index')) flash('Invalid username or password.') return render_template('login.html', form=form) @auth.route('/logout') # @login_required def logout(): logout_user() flash('You have been logged out.') return redirect(url_for('main.index')) @auth.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() if form.validate_on_submit(): user = User(email=form.email.data, username=form.username.data, password=form.password.data) user.save() flash('You successfully registered. Welcome!') return redirect(url_for('auth.login')) return render_template('register.html', form=form)
Use newly added save on new users.
Use newly added save on new users.
Python
mit
guillaumededrie/flask-todolist,poulp/flask-todolist,guillaumededrie/flask-todolist,rtzll/flask-todolist,0xfoo/flask-todolist,polyfunc/flask-todolist,poulp/flask-todolist,rtzll/flask-todolist,polyfunc/flask-todolist,guillaumededrie/flask-todolist,0xfoo/flask-todolist,poulp/flask-todolist,0xfoo/flask-todolist,polyfunc/flask-todolist,rtzll/flask-todolist,rtzll/flask-todolist
from flask import render_template, redirect, request, url_for, flash from flask.ext.login import login_user, logout_user, login_required, \ current_user from . import auth from .forms import LoginForm, RegistrationForm - from .. import db from ..models import User @auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is not None: login_user(user) return redirect(request.args.get('next') or url_for('main.index')) flash('Invalid username or password.') return render_template('login.html', form=form) @auth.route('/logout') # @login_required def logout(): logout_user() flash('You have been logged out.') return redirect(url_for('main.index')) @auth.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() if form.validate_on_submit(): user = User(email=form.email.data, username=form.username.data, password=form.password.data) + user.save() - db.session.add(user) - db.session.commit() flash('You successfully registered. Welcome!') return redirect(url_for('auth.login')) return render_template('register.html', form=form)
Use newly added save on new users.
## Code Before: from flask import render_template, redirect, request, url_for, flash from flask.ext.login import login_user, logout_user, login_required, \ current_user from . import auth from .forms import LoginForm, RegistrationForm from .. import db from ..models import User @auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is not None: login_user(user) return redirect(request.args.get('next') or url_for('main.index')) flash('Invalid username or password.') return render_template('login.html', form=form) @auth.route('/logout') # @login_required def logout(): logout_user() flash('You have been logged out.') return redirect(url_for('main.index')) @auth.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() if form.validate_on_submit(): user = User(email=form.email.data, username=form.username.data, password=form.password.data) db.session.add(user) db.session.commit() flash('You successfully registered. Welcome!') return redirect(url_for('auth.login')) return render_template('register.html', form=form) ## Instruction: Use newly added save on new users. ## Code After: from flask import render_template, redirect, request, url_for, flash from flask.ext.login import login_user, logout_user, login_required, \ current_user from . import auth from .forms import LoginForm, RegistrationForm from ..models import User @auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is not None: login_user(user) return redirect(request.args.get('next') or url_for('main.index')) flash('Invalid username or password.') return render_template('login.html', form=form) @auth.route('/logout') # @login_required def logout(): logout_user() flash('You have been logged out.') return redirect(url_for('main.index')) @auth.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() if form.validate_on_submit(): user = User(email=form.email.data, username=form.username.data, password=form.password.data) user.save() flash('You successfully registered. Welcome!') return redirect(url_for('auth.login')) return render_template('register.html', form=form)
... from .forms import LoginForm, RegistrationForm from ..models import User ... password=form.password.data) user.save() flash('You successfully registered. Welcome!') ...
ac5b9765d69139915f06bd52d96b1abaa3d41331
scuole/districts/views.py
scuole/districts/views.py
from __future__ import absolute_import, unicode_literals from django.views.generic import DetailView, ListView from .models import District class DistrictListView(ListView): queryset = District.objects.all().select_related('county__name') class DistrictDetailView(DetailView): queryset = District.objects.all().prefetch_related('stats__year') pk_url_kwarg = 'district_id' slug_url_kwarg = 'district_slug'
from __future__ import absolute_import, unicode_literals from django.views.generic import DetailView, ListView from .models import District class DistrictListView(ListView): queryset = District.objects.all().defer('shape') class DistrictDetailView(DetailView): queryset = District.objects.all().prefetch_related('stats__year') pk_url_kwarg = 'district_id' slug_url_kwarg = 'district_slug'
Remove unused select_related, defer the loading of shape for speed
Remove unused select_related, defer the loading of shape for speed
Python
mit
texastribune/scuole,texastribune/scuole,texastribune/scuole,texastribune/scuole
from __future__ import absolute_import, unicode_literals from django.views.generic import DetailView, ListView from .models import District class DistrictListView(ListView): - queryset = District.objects.all().select_related('county__name') + queryset = District.objects.all().defer('shape') class DistrictDetailView(DetailView): queryset = District.objects.all().prefetch_related('stats__year') pk_url_kwarg = 'district_id' slug_url_kwarg = 'district_slug'
Remove unused select_related, defer the loading of shape for speed
## Code Before: from __future__ import absolute_import, unicode_literals from django.views.generic import DetailView, ListView from .models import District class DistrictListView(ListView): queryset = District.objects.all().select_related('county__name') class DistrictDetailView(DetailView): queryset = District.objects.all().prefetch_related('stats__year') pk_url_kwarg = 'district_id' slug_url_kwarg = 'district_slug' ## Instruction: Remove unused select_related, defer the loading of shape for speed ## Code After: from __future__ import absolute_import, unicode_literals from django.views.generic import DetailView, ListView from .models import District class DistrictListView(ListView): queryset = District.objects.all().defer('shape') class DistrictDetailView(DetailView): queryset = District.objects.all().prefetch_related('stats__year') pk_url_kwarg = 'district_id' slug_url_kwarg = 'district_slug'
... class DistrictListView(ListView): queryset = District.objects.all().defer('shape') ...
9637218c8b544c397bcd5d433de47cafbfad973d
octodns/source/base.py
octodns/source/base.py
from __future__ import absolute_import, division, print_function, \ unicode_literals class BaseSource(object): def __init__(self, id): self.id = id if not getattr(self, 'log', False): raise NotImplementedError('Abstract base class, log property ' 'missing') if not hasattr(self, 'SUPPORTS_GEO'): raise NotImplementedError('Abstract base class, SUPPORTS_GEO ' 'property missing') if not hasattr(self, 'SUPPORTS'): raise NotImplementedError('Abstract base class, SUPPORTS ' 'property missing') def populate(self, zone, target=False): ''' Loads all zones the provider knows about When `target` is True the populate call is being made to load the current state of the provider. When `lenient` is True the populate call may skip record validation and do a "best effort" load of data. That will allow through some common, but not best practices stuff that we otherwise would reject. E.g. no trailing . or mising escapes for ;. ''' raise NotImplementedError('Abstract base class, populate method ' 'missing') def supports(self, record): return record._type in self.SUPPORTS def __repr__(self): return self.__class__.__name__
from __future__ import absolute_import, division, print_function, \ unicode_literals class BaseSource(object): def __init__(self, id): self.id = id if not getattr(self, 'log', False): raise NotImplementedError('Abstract base class, log property ' 'missing') if not hasattr(self, 'SUPPORTS_GEO'): raise NotImplementedError('Abstract base class, SUPPORTS_GEO ' 'property missing') if not hasattr(self, 'SUPPORTS'): raise NotImplementedError('Abstract base class, SUPPORTS ' 'property missing') def populate(self, zone, target=False, lenient=False): ''' Loads all zones the provider knows about When `target` is True the populate call is being made to load the current state of the provider. When `lenient` is True the populate call may skip record validation and do a "best effort" load of data. That will allow through some common, but not best practices stuff that we otherwise would reject. E.g. no trailing . or mising escapes for ;. ''' raise NotImplementedError('Abstract base class, populate method ' 'missing') def supports(self, record): return record._type in self.SUPPORTS def __repr__(self): return self.__class__.__name__
Add lenient to abstract BaseSource signature
Add lenient to abstract BaseSource signature
Python
mit
vanbroup/octodns,vanbroup/octodns,h-hwang/octodns,h-hwang/octodns
from __future__ import absolute_import, division, print_function, \ unicode_literals class BaseSource(object): def __init__(self, id): self.id = id if not getattr(self, 'log', False): raise NotImplementedError('Abstract base class, log property ' 'missing') if not hasattr(self, 'SUPPORTS_GEO'): raise NotImplementedError('Abstract base class, SUPPORTS_GEO ' 'property missing') if not hasattr(self, 'SUPPORTS'): raise NotImplementedError('Abstract base class, SUPPORTS ' 'property missing') - def populate(self, zone, target=False): + def populate(self, zone, target=False, lenient=False): ''' Loads all zones the provider knows about When `target` is True the populate call is being made to load the current state of the provider. When `lenient` is True the populate call may skip record validation and do a "best effort" load of data. That will allow through some common, but not best practices stuff that we otherwise would reject. E.g. no trailing . or mising escapes for ;. ''' raise NotImplementedError('Abstract base class, populate method ' 'missing') def supports(self, record): return record._type in self.SUPPORTS def __repr__(self): return self.__class__.__name__
Add lenient to abstract BaseSource signature
## Code Before: from __future__ import absolute_import, division, print_function, \ unicode_literals class BaseSource(object): def __init__(self, id): self.id = id if not getattr(self, 'log', False): raise NotImplementedError('Abstract base class, log property ' 'missing') if not hasattr(self, 'SUPPORTS_GEO'): raise NotImplementedError('Abstract base class, SUPPORTS_GEO ' 'property missing') if not hasattr(self, 'SUPPORTS'): raise NotImplementedError('Abstract base class, SUPPORTS ' 'property missing') def populate(self, zone, target=False): ''' Loads all zones the provider knows about When `target` is True the populate call is being made to load the current state of the provider. When `lenient` is True the populate call may skip record validation and do a "best effort" load of data. That will allow through some common, but not best practices stuff that we otherwise would reject. E.g. no trailing . or mising escapes for ;. ''' raise NotImplementedError('Abstract base class, populate method ' 'missing') def supports(self, record): return record._type in self.SUPPORTS def __repr__(self): return self.__class__.__name__ ## Instruction: Add lenient to abstract BaseSource signature ## Code After: from __future__ import absolute_import, division, print_function, \ unicode_literals class BaseSource(object): def __init__(self, id): self.id = id if not getattr(self, 'log', False): raise NotImplementedError('Abstract base class, log property ' 'missing') if not hasattr(self, 'SUPPORTS_GEO'): raise NotImplementedError('Abstract base class, SUPPORTS_GEO ' 'property missing') if not hasattr(self, 'SUPPORTS'): raise NotImplementedError('Abstract base class, SUPPORTS ' 'property missing') def populate(self, zone, target=False, lenient=False): ''' Loads all zones the provider knows about When `target` is True the populate call is being made to load the current state of the provider. When `lenient` is True the populate call may skip record validation and do a "best effort" load of data. That will allow through some common, but not best practices stuff that we otherwise would reject. E.g. no trailing . or mising escapes for ;. ''' raise NotImplementedError('Abstract base class, populate method ' 'missing') def supports(self, record): return record._type in self.SUPPORTS def __repr__(self): return self.__class__.__name__
# ... existing code ... def populate(self, zone, target=False, lenient=False): ''' # ... rest of the code ...
da90ddfd697da5be7e0c01f183e733bbc981fe85
app.py
app.py
from flask import Flask app = Flask(__name__) @app.route('/') def hello_world(): return 'Hello World!' if __name__ == '__main__': app.run()
from flask import Flask app = Flask(__name__) @app.route('/') @app.route('/index') def index(): return 'Hello World!' if __name__ == '__main__': app.run()
Index action ready with "Hello, World!" message
Index action ready with "Hello, World!" message
Python
mit
alexander-emelyanov/microblog,alexander-emelyanov/microblog
from flask import Flask app = Flask(__name__) @app.route('/') - def hello_world(): + @app.route('/index') + def index(): return 'Hello World!' if __name__ == '__main__': app.run()
Index action ready with "Hello, World!" message
## Code Before: from flask import Flask app = Flask(__name__) @app.route('/') def hello_world(): return 'Hello World!' if __name__ == '__main__': app.run() ## Instruction: Index action ready with "Hello, World!" message ## Code After: from flask import Flask app = Flask(__name__) @app.route('/') @app.route('/index') def index(): return 'Hello World!' if __name__ == '__main__': app.run()
// ... existing code ... @app.route('/') @app.route('/index') def index(): return 'Hello World!' // ... rest of the code ...
857cbff1e8ec6e4db4ac25ad10a41311f3afcd66
pombola/core/migrations/0049_del_userprofile.py
pombola/core/migrations/0049_del_userprofile.py
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models from django.db.utils import DatabaseError from django.contrib.contenttypes.models import ContentType class Migration(SchemaMigration): def forwards(self, orm): # Do the deletes in a separate transaction, as database errors when # deleting a table that does not exist would cause a transaction to be # rolled back db.start_transaction() ContentType.objects.filter(app_label='user_profile').delete() # Commit the deletes to the various tables. db.commit_transaction() try: db.delete_table('user_profile_userprofile') except DatabaseError: # table does not exist to delete, probably because the database was # not created at a time when the user_profile app was still in use. pass def backwards(self, orm): # There is no backwards - to create the user_profile tables again add the app # back in and letting its migrations do the work. pass models = {} complete_apps = ['user_profile']
import datetime from south.db import db from south.v2 import SchemaMigration from south.models import MigrationHistory from django.db import models from django.db.utils import DatabaseError from django.contrib.contenttypes.models import ContentType class Migration(SchemaMigration): def forwards(self, orm): # Do the deletes in a separate transaction, as database errors when # deleting a table that does not exist would cause a transaction to be # rolled back db.start_transaction() ContentType.objects.filter(app_label='user_profile').delete() # Remove the entries from South's tables as we don't want to leave # incorrect entries in there. MigrationHistory.objects.filter(app_name='user_profile').delete() # Commit the deletes to the various tables. db.commit_transaction() try: db.delete_table('user_profile_userprofile') except DatabaseError: # table does not exist to delete, probably because the database was # not created at a time when the user_profile app was still in use. pass def backwards(self, orm): # There is no backwards - to create the user_profile tables again add the app # back in and letting its migrations do the work. pass models = {} complete_apps = ['user_profile']
Delete entries from the South migration history too
Delete entries from the South migration history too
Python
agpl-3.0
mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,patricmutwiri/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola
import datetime from south.db import db from south.v2 import SchemaMigration + from south.models import MigrationHistory from django.db import models from django.db.utils import DatabaseError from django.contrib.contenttypes.models import ContentType class Migration(SchemaMigration): def forwards(self, orm): # Do the deletes in a separate transaction, as database errors when # deleting a table that does not exist would cause a transaction to be # rolled back db.start_transaction() ContentType.objects.filter(app_label='user_profile').delete() + + # Remove the entries from South's tables as we don't want to leave + # incorrect entries in there. + MigrationHistory.objects.filter(app_name='user_profile').delete() # Commit the deletes to the various tables. db.commit_transaction() try: db.delete_table('user_profile_userprofile') except DatabaseError: # table does not exist to delete, probably because the database was # not created at a time when the user_profile app was still in use. pass def backwards(self, orm): # There is no backwards - to create the user_profile tables again add the app # back in and letting its migrations do the work. pass models = {} complete_apps = ['user_profile']
Delete entries from the South migration history too
## Code Before: import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models from django.db.utils import DatabaseError from django.contrib.contenttypes.models import ContentType class Migration(SchemaMigration): def forwards(self, orm): # Do the deletes in a separate transaction, as database errors when # deleting a table that does not exist would cause a transaction to be # rolled back db.start_transaction() ContentType.objects.filter(app_label='user_profile').delete() # Commit the deletes to the various tables. db.commit_transaction() try: db.delete_table('user_profile_userprofile') except DatabaseError: # table does not exist to delete, probably because the database was # not created at a time when the user_profile app was still in use. pass def backwards(self, orm): # There is no backwards - to create the user_profile tables again add the app # back in and letting its migrations do the work. pass models = {} complete_apps = ['user_profile'] ## Instruction: Delete entries from the South migration history too ## Code After: import datetime from south.db import db from south.v2 import SchemaMigration from south.models import MigrationHistory from django.db import models from django.db.utils import DatabaseError from django.contrib.contenttypes.models import ContentType class Migration(SchemaMigration): def forwards(self, orm): # Do the deletes in a separate transaction, as database errors when # deleting a table that does not exist would cause a transaction to be # rolled back db.start_transaction() ContentType.objects.filter(app_label='user_profile').delete() # Remove the entries from South's tables as we don't want to leave # incorrect entries in there. MigrationHistory.objects.filter(app_name='user_profile').delete() # Commit the deletes to the various tables. db.commit_transaction() try: db.delete_table('user_profile_userprofile') except DatabaseError: # table does not exist to delete, probably because the database was # not created at a time when the user_profile app was still in use. pass def backwards(self, orm): # There is no backwards - to create the user_profile tables again add the app # back in and letting its migrations do the work. pass models = {} complete_apps = ['user_profile']
# ... existing code ... from south.v2 import SchemaMigration from south.models import MigrationHistory from django.db import models # ... modified code ... ContentType.objects.filter(app_label='user_profile').delete() # Remove the entries from South's tables as we don't want to leave # incorrect entries in there. MigrationHistory.objects.filter(app_name='user_profile').delete() # ... rest of the code ...
ba57b3c016ed3bc3c8db9ccc3c637c2c58de1e1d
reddit/admin.py
reddit/admin.py
from django.contrib import admin from reddit.models import RedditUser,Submission,Comment,Vote # Register your models here. class SubmissionInline(admin.TabularInline): model = Submission max_num = 10 class CommentsInline(admin.StackedInline): model = Comment max_num = 10 class SubmissionAdmin(admin.ModelAdmin): list_display = ('title', 'url', 'author') inlines = [CommentsInline] class RedditUserAdmin(admin.ModelAdmin): inlines = [ SubmissionInline, CommentsInline ] admin.site.register(RedditUser, RedditUserAdmin) admin.site.register(Submission, SubmissionAdmin) admin.site.register(Comment) admin.site.register(Vote)
from django.contrib import admin from reddit.models import RedditUser,Submission,Comment,Vote # Register your models here. class SubmissionInline(admin.TabularInline): model = Submission max_num = 10 class CommentsInline(admin.StackedInline): model = Comment max_num = 10 class SubmissionAdmin(admin.ModelAdmin): list_display = ('title', 'url', 'author') inlines = [CommentsInline] class RedditUserAdmin(admin.ModelAdmin): inlines = [ SubmissionInline, ] admin.site.register(RedditUser, RedditUserAdmin) admin.site.register(Submission, SubmissionAdmin) admin.site.register(Comment) admin.site.register(Vote)
Remove commentsInLine for RedditUser because there is no longer foreignKey from Comment to RedditUser
Remove commentsInLine for RedditUser because there is no longer foreignKey from Comment to RedditUser
Python
apache-2.0
Nikola-K/django_reddit,Nikola-K/django_reddit,Nikola-K/django_reddit
from django.contrib import admin from reddit.models import RedditUser,Submission,Comment,Vote # Register your models here. class SubmissionInline(admin.TabularInline): model = Submission max_num = 10 class CommentsInline(admin.StackedInline): model = Comment max_num = 10 class SubmissionAdmin(admin.ModelAdmin): list_display = ('title', 'url', 'author') inlines = [CommentsInline] class RedditUserAdmin(admin.ModelAdmin): inlines = [ SubmissionInline, - CommentsInline ] admin.site.register(RedditUser, RedditUserAdmin) admin.site.register(Submission, SubmissionAdmin) admin.site.register(Comment) admin.site.register(Vote)
Remove commentsInLine for RedditUser because there is no longer foreignKey from Comment to RedditUser
## Code Before: from django.contrib import admin from reddit.models import RedditUser,Submission,Comment,Vote # Register your models here. class SubmissionInline(admin.TabularInline): model = Submission max_num = 10 class CommentsInline(admin.StackedInline): model = Comment max_num = 10 class SubmissionAdmin(admin.ModelAdmin): list_display = ('title', 'url', 'author') inlines = [CommentsInline] class RedditUserAdmin(admin.ModelAdmin): inlines = [ SubmissionInline, CommentsInline ] admin.site.register(RedditUser, RedditUserAdmin) admin.site.register(Submission, SubmissionAdmin) admin.site.register(Comment) admin.site.register(Vote) ## Instruction: Remove commentsInLine for RedditUser because there is no longer foreignKey from Comment to RedditUser ## Code After: from django.contrib import admin from reddit.models import RedditUser,Submission,Comment,Vote # Register your models here. class SubmissionInline(admin.TabularInline): model = Submission max_num = 10 class CommentsInline(admin.StackedInline): model = Comment max_num = 10 class SubmissionAdmin(admin.ModelAdmin): list_display = ('title', 'url', 'author') inlines = [CommentsInline] class RedditUserAdmin(admin.ModelAdmin): inlines = [ SubmissionInline, ] admin.site.register(RedditUser, RedditUserAdmin) admin.site.register(Submission, SubmissionAdmin) admin.site.register(Comment) admin.site.register(Vote)
# ... existing code ... SubmissionInline, ] # ... rest of the code ...
f32ab8ebd509df7e815fb96189974e7db44af3e3
plugins/owner.py
plugins/owner.py
import inspect import traceback from curious import commands from curious.commands.context import Context from curious.commands.plugin import Plugin class Owner(Plugin): """ Owner-only commands. """ @commands.command(name="eval") async def _eval(self, ctx: Context, *, eval_str: str): msg = await ctx.channel.send("Evaluating...") try: result = eval(eval_str) if inspect.isawaitable(result): result = await result result = str(result) except Exception as e: tb = ''.join(traceback.format_exc()) result = tb fmtted = "```py\n{}\n```".format(result) await msg.edit(fmtted)
import inspect import traceback from curious import commands from curious.commands.context import Context from curious.commands.plugin import Plugin def is_owner(self, ctx: Context): return ctx.author.id == 141545699442425856 or ctx.message.author.id == ctx.bot.application_info.owner.id class Owner(Plugin): """ Owner-only commands. """ plugin_check = is_owner @commands.command(name="eval") async def _eval(self, ctx: Context, *, eval_str: str): msg = await ctx.channel.send("Evaluating...") try: result = eval(eval_str) if inspect.isawaitable(result): result = await result result = str(result) except Exception as e: tb = ''.join(traceback.format_exc()) result = tb fmtted = "```py\n{}\n```".format(result) await msg.edit(fmtted) @commands.command(name="load", invokation_checks=[is_owner]) async def _load(self, ctx: Context, *, import_name: str): """ Loads a plugin. """ await self.bot.load_plugins_from(import_name) await ctx.message.channel.send(":heavy_check_mark: Loaded.") @commands.command(name="unload", invokation_checks=[is_owner]) async def _unload(self, ctx: Context, *, import_name: str): """ Unloads a plugin. """ await self.bot.unload_plugins_from(import_name) await ctx.message.channel.send(":heavy_check_mark: Unloaded.")
Add load and unload commands.
Add load and unload commands.
Python
mit
SunDwarf/curiosity
import inspect import traceback from curious import commands from curious.commands.context import Context from curious.commands.plugin import Plugin + def is_owner(self, ctx: Context): + return ctx.author.id == 141545699442425856 or ctx.message.author.id == ctx.bot.application_info.owner.id + + class Owner(Plugin): """ Owner-only commands. """ + plugin_check = is_owner + @commands.command(name="eval") async def _eval(self, ctx: Context, *, eval_str: str): msg = await ctx.channel.send("Evaluating...") try: result = eval(eval_str) if inspect.isawaitable(result): result = await result result = str(result) except Exception as e: tb = ''.join(traceback.format_exc()) result = tb fmtted = "```py\n{}\n```".format(result) await msg.edit(fmtted) + @commands.command(name="load", invokation_checks=[is_owner]) + async def _load(self, ctx: Context, *, import_name: str): + """ + Loads a plugin. + """ + await self.bot.load_plugins_from(import_name) + await ctx.message.channel.send(":heavy_check_mark: Loaded.") + + @commands.command(name="unload", invokation_checks=[is_owner]) + async def _unload(self, ctx: Context, *, import_name: str): + """ + Unloads a plugin. + """ + await self.bot.unload_plugins_from(import_name) + await ctx.message.channel.send(":heavy_check_mark: Unloaded.") +
Add load and unload commands.
## Code Before: import inspect import traceback from curious import commands from curious.commands.context import Context from curious.commands.plugin import Plugin class Owner(Plugin): """ Owner-only commands. """ @commands.command(name="eval") async def _eval(self, ctx: Context, *, eval_str: str): msg = await ctx.channel.send("Evaluating...") try: result = eval(eval_str) if inspect.isawaitable(result): result = await result result = str(result) except Exception as e: tb = ''.join(traceback.format_exc()) result = tb fmtted = "```py\n{}\n```".format(result) await msg.edit(fmtted) ## Instruction: Add load and unload commands. ## Code After: import inspect import traceback from curious import commands from curious.commands.context import Context from curious.commands.plugin import Plugin def is_owner(self, ctx: Context): return ctx.author.id == 141545699442425856 or ctx.message.author.id == ctx.bot.application_info.owner.id class Owner(Plugin): """ Owner-only commands. """ plugin_check = is_owner @commands.command(name="eval") async def _eval(self, ctx: Context, *, eval_str: str): msg = await ctx.channel.send("Evaluating...") try: result = eval(eval_str) if inspect.isawaitable(result): result = await result result = str(result) except Exception as e: tb = ''.join(traceback.format_exc()) result = tb fmtted = "```py\n{}\n```".format(result) await msg.edit(fmtted) @commands.command(name="load", invokation_checks=[is_owner]) async def _load(self, ctx: Context, *, import_name: str): """ Loads a plugin. """ await self.bot.load_plugins_from(import_name) await ctx.message.channel.send(":heavy_check_mark: Loaded.") @commands.command(name="unload", invokation_checks=[is_owner]) async def _unload(self, ctx: Context, *, import_name: str): """ Unloads a plugin. """ await self.bot.unload_plugins_from(import_name) await ctx.message.channel.send(":heavy_check_mark: Unloaded.")
# ... existing code ... def is_owner(self, ctx: Context): return ctx.author.id == 141545699442425856 or ctx.message.author.id == ctx.bot.application_info.owner.id class Owner(Plugin): # ... modified code ... """ plugin_check = is_owner @commands.command(name="eval") ... await msg.edit(fmtted) @commands.command(name="load", invokation_checks=[is_owner]) async def _load(self, ctx: Context, *, import_name: str): """ Loads a plugin. """ await self.bot.load_plugins_from(import_name) await ctx.message.channel.send(":heavy_check_mark: Loaded.") @commands.command(name="unload", invokation_checks=[is_owner]) async def _unload(self, ctx: Context, *, import_name: str): """ Unloads a plugin. """ await self.bot.unload_plugins_from(import_name) await ctx.message.channel.send(":heavy_check_mark: Unloaded.") # ... rest of the code ...
0c6a5c55df5680bd8589f1040f2f16cf6aac86b3
openprescribing/frontend/migrations/0030_add_ccg_centroids.py
openprescribing/frontend/migrations/0030_add_ccg_centroids.py
from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), # This is now commented out because the SQL generated to execute # set_centroids_without_args includes a reference to fiels which aren't # created until migration 36. # migrations.RunPython(set_centroids_without_args), ]
from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), ]
Remove commented-out RunPython from migration
Remove commented-out RunPython from migration
Python
mit
ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing
from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), - - # This is now commented out because the SQL generated to execute - # set_centroids_without_args includes a reference to fiels which aren't - # created until migration 36. - - # migrations.RunPython(set_centroids_without_args), ]
Remove commented-out RunPython from migration
## Code Before: from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), # This is now commented out because the SQL generated to execute # set_centroids_without_args includes a reference to fiels which aren't # created until migration 36. # migrations.RunPython(set_centroids_without_args), ] ## Instruction: Remove commented-out RunPython from migration ## Code After: from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), ]
# ... existing code ... ), ] # ... rest of the code ...
fa2ac624bc33add0e88f158c525885eef8bf555b
user_management/models/tests/factories.py
user_management/models/tests/factories.py
import factory from django.contrib.auth import get_user_model class UserFactory(factory.DjangoModelFactory): FACTORY_FOR = get_user_model() name = factory.Sequence(lambda i: 'Test User {}'.format(i)) email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
import factory from django.contrib.auth import get_user_model class UserFactory(factory.DjangoModelFactory): FACTORY_FOR = get_user_model() name = factory.Sequence(lambda i: 'Test User {}'.format(i)) email = factory.Sequence(lambda i: 'email{}@example.com'.format(i)) @classmethod def _prepare(cls, create, **kwargs): password = kwargs.pop('password', None) user = super(UserFactory, cls)._prepare(create=False, **kwargs) user.set_password(password) user.raw_password = password if create: user.save() return user
Improve password handling in UserFactory.
Improve password handling in UserFactory.
Python
bsd-2-clause
incuna/django-user-management,incuna/django-user-management
import factory from django.contrib.auth import get_user_model class UserFactory(factory.DjangoModelFactory): FACTORY_FOR = get_user_model() name = factory.Sequence(lambda i: 'Test User {}'.format(i)) email = factory.Sequence(lambda i: 'email{}@example.com'.format(i)) + @classmethod + def _prepare(cls, create, **kwargs): + password = kwargs.pop('password', None) + user = super(UserFactory, cls)._prepare(create=False, **kwargs) + user.set_password(password) + user.raw_password = password + if create: + user.save() + return user +
Improve password handling in UserFactory.
## Code Before: import factory from django.contrib.auth import get_user_model class UserFactory(factory.DjangoModelFactory): FACTORY_FOR = get_user_model() name = factory.Sequence(lambda i: 'Test User {}'.format(i)) email = factory.Sequence(lambda i: 'email{}@example.com'.format(i)) ## Instruction: Improve password handling in UserFactory. ## Code After: import factory from django.contrib.auth import get_user_model class UserFactory(factory.DjangoModelFactory): FACTORY_FOR = get_user_model() name = factory.Sequence(lambda i: 'Test User {}'.format(i)) email = factory.Sequence(lambda i: 'email{}@example.com'.format(i)) @classmethod def _prepare(cls, create, **kwargs): password = kwargs.pop('password', None) user = super(UserFactory, cls)._prepare(create=False, **kwargs) user.set_password(password) user.raw_password = password if create: user.save() return user
... email = factory.Sequence(lambda i: 'email{}@example.com'.format(i)) @classmethod def _prepare(cls, create, **kwargs): password = kwargs.pop('password', None) user = super(UserFactory, cls)._prepare(create=False, **kwargs) user.set_password(password) user.raw_password = password if create: user.save() return user ...
96b3911faadc22a07176c9338420ac8cd9fb06e5
tests/test_vector2_scale.py
tests/test_vector2_scale.py
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0)
Test that scaling doesn't rotate vectors
Test that scaling doesn't rotate vectors
Python
artistic-2.0
ppb/ppb-vector,ppb/ppb-vector
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose - from utils import vectors + from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 + + @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) + def test_scale_aligned(x: Vector2, length: float): + """Test that the length of x.scale_to(length) is length.""" + assume(length > 0) + try: + assert angle_isclose(x.scale_to(length).angle(x), 0) + except ZeroDivisionError: + assert x == (0, 0) +
Test that scaling doesn't rotate vectors
## Code Before: import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 ## Instruction: Test that scaling doesn't rotate vectors ## Code After: import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0)
# ... existing code ... from math import isclose from utils import angle_isclose, vectors # ... modified code ... assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0) # ... rest of the code ...
53bbb9bfa6fdc1e946365e746b1acf4b03a0635e
regulations/templatetags/in_context.py
regulations/templatetags/in_context.py
from django import template register = template.Library() class InContextNode(template.Node): def __init__(self, nodelist, subcontext_names): self.nodelist = nodelist self.subcontext_names = subcontext_names def render(self, context): new_context = {} for field in self.subcontext_names: value = context.get(field, {}) if isinstance(value, dict): new_context.update(context.get(field, {})) else: new_context[field] = value return self.nodelist.render(template.Context(new_context)) @register.tag('begincontext') def in_context(parser, token): """ Replaces the context (inside of this block) for easy (and safe) inclusion of sub-content. For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}} 1: {{ name }} {{ size }} {% begincontext sub %} 2: {{ name }} {{ size }} {% endcontext %} 3: {{ name }} {{ size }} Will print 1: Kitty 2: 5 3: Kitty Arguments which are not dictionaries will 'cascade' into the inner context. """ nodelist = parser.parse(('endcontext',)) parser.delete_first_token() return InContextNode(nodelist, token.split_contents()[1:])
from django import template register = template.Library() class InContextNode(template.Node): def __init__(self, nodelist, subcontext_names): self.nodelist = nodelist self.subcontext_names = subcontext_names def render(self, context): new_context = {} for field in self.subcontext_names: value = context.get(field, {}) if isinstance(value, dict): new_context.update(context.get(field, {})) else: new_context[field] = value new_context = context.new(new_context) return self.nodelist.render(new_context) @register.tag('begincontext') def in_context(parser, token): """ Replaces the context (inside of this block) for easy (and safe) inclusion of sub-content. For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}} 1: {{ name }} {{ size }} {% begincontext sub %} 2: {{ name }} {{ size }} {% endcontext %} 3: {{ name }} {{ size }} Will print 1: Kitty 2: 5 3: Kitty Arguments which are not dictionaries will 'cascade' into the inner context. """ nodelist = parser.parse(('endcontext',)) parser.delete_first_token() return InContextNode(nodelist, token.split_contents()[1:])
Fix custom template tag to work with django 1.8
Fix custom template tag to work with django 1.8
Python
cc0-1.0
willbarton/regulations-site,grapesmoker/regulations-site,willbarton/regulations-site,willbarton/regulations-site,grapesmoker/regulations-site,willbarton/regulations-site,ascott1/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,ascott1/regulations-site
from django import template register = template.Library() class InContextNode(template.Node): def __init__(self, nodelist, subcontext_names): self.nodelist = nodelist self.subcontext_names = subcontext_names def render(self, context): new_context = {} for field in self.subcontext_names: value = context.get(field, {}) if isinstance(value, dict): new_context.update(context.get(field, {})) else: new_context[field] = value + new_context = context.new(new_context) - return self.nodelist.render(template.Context(new_context)) + return self.nodelist.render(new_context) @register.tag('begincontext') def in_context(parser, token): """ Replaces the context (inside of this block) for easy (and safe) inclusion of sub-content. For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}} 1: {{ name }} {{ size }} {% begincontext sub %} 2: {{ name }} {{ size }} {% endcontext %} 3: {{ name }} {{ size }} Will print 1: Kitty 2: 5 3: Kitty Arguments which are not dictionaries will 'cascade' into the inner context. """ nodelist = parser.parse(('endcontext',)) parser.delete_first_token() return InContextNode(nodelist, token.split_contents()[1:])
Fix custom template tag to work with django 1.8
## Code Before: from django import template register = template.Library() class InContextNode(template.Node): def __init__(self, nodelist, subcontext_names): self.nodelist = nodelist self.subcontext_names = subcontext_names def render(self, context): new_context = {} for field in self.subcontext_names: value = context.get(field, {}) if isinstance(value, dict): new_context.update(context.get(field, {})) else: new_context[field] = value return self.nodelist.render(template.Context(new_context)) @register.tag('begincontext') def in_context(parser, token): """ Replaces the context (inside of this block) for easy (and safe) inclusion of sub-content. For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}} 1: {{ name }} {{ size }} {% begincontext sub %} 2: {{ name }} {{ size }} {% endcontext %} 3: {{ name }} {{ size }} Will print 1: Kitty 2: 5 3: Kitty Arguments which are not dictionaries will 'cascade' into the inner context. """ nodelist = parser.parse(('endcontext',)) parser.delete_first_token() return InContextNode(nodelist, token.split_contents()[1:]) ## Instruction: Fix custom template tag to work with django 1.8 ## Code After: from django import template register = template.Library() class InContextNode(template.Node): def __init__(self, nodelist, subcontext_names): self.nodelist = nodelist self.subcontext_names = subcontext_names def render(self, context): new_context = {} for field in self.subcontext_names: value = context.get(field, {}) if isinstance(value, dict): new_context.update(context.get(field, {})) else: new_context[field] = value new_context = context.new(new_context) return self.nodelist.render(new_context) @register.tag('begincontext') def in_context(parser, token): """ Replaces the context (inside of this block) for easy (and safe) inclusion of sub-content. For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}} 1: {{ name }} {{ size }} {% begincontext sub %} 2: {{ name }} {{ size }} {% endcontext %} 3: {{ name }} {{ size }} Will print 1: Kitty 2: 5 3: Kitty Arguments which are not dictionaries will 'cascade' into the inner context. """ nodelist = parser.parse(('endcontext',)) parser.delete_first_token() return InContextNode(nodelist, token.split_contents()[1:])
# ... existing code ... new_context[field] = value new_context = context.new(new_context) return self.nodelist.render(new_context) # ... rest of the code ...
4510a4a22965d002bd41293fd8fe629c8285800d
tests/test_errors.py
tests/test_errors.py
import pytest from pyxl.codec.register import pyxl_decode from pyxl.codec.parser import ParseError def test_malformed_if(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> this is incorrect! <else>bar</else> </frag>""") def test_multiple_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else>bar</else> <else>baz</else> </frag>""") def test_nested_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else><else>bar</else></else> </frag>""")
import pytest from pyxl.codec.register import pyxl_decode from pyxl.codec.parser import ParseError from pyxl.codec.html_tokenizer import BadCharError def test_malformed_if(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> this is incorrect! <else>bar</else> </frag>""") def test_multiple_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else>bar</else> <else>baz</else> </frag>""") def test_nested_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else><else>bar</else></else> </frag>""") def test_bad_char(): with pytest.raises(BadCharError): pyxl_decode(b"""<_bad_element></lm>""")
Add test for BadCharError exception.
Add test for BadCharError exception.
Python
apache-2.0
pyxl4/pyxl4
import pytest from pyxl.codec.register import pyxl_decode from pyxl.codec.parser import ParseError + from pyxl.codec.html_tokenizer import BadCharError def test_malformed_if(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> this is incorrect! <else>bar</else> </frag>""") def test_multiple_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else>bar</else> <else>baz</else> </frag>""") def test_nested_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else><else>bar</else></else> </frag>""") + def test_bad_char(): + with pytest.raises(BadCharError): + pyxl_decode(b"""<_bad_element></lm>""") +
Add test for BadCharError exception.
## Code Before: import pytest from pyxl.codec.register import pyxl_decode from pyxl.codec.parser import ParseError def test_malformed_if(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> this is incorrect! <else>bar</else> </frag>""") def test_multiple_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else>bar</else> <else>baz</else> </frag>""") def test_nested_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else><else>bar</else></else> </frag>""") ## Instruction: Add test for BadCharError exception. ## Code After: import pytest from pyxl.codec.register import pyxl_decode from pyxl.codec.parser import ParseError from pyxl.codec.html_tokenizer import BadCharError def test_malformed_if(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> this is incorrect! <else>bar</else> </frag>""") def test_multiple_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else>bar</else> <else>baz</else> </frag>""") def test_nested_else(): with pytest.raises(ParseError): pyxl_decode(b""" <frag> <if cond="{true}">foo</if> <else><else>bar</else></else> </frag>""") def test_bad_char(): with pytest.raises(BadCharError): pyxl_decode(b"""<_bad_element></lm>""")
// ... existing code ... from pyxl.codec.parser import ParseError from pyxl.codec.html_tokenizer import BadCharError // ... modified code ... </frag>""") def test_bad_char(): with pytest.raises(BadCharError): pyxl_decode(b"""<_bad_element></lm>""") // ... rest of the code ...
43ae9bdec900081d6ff91fc3847a4d8d9a42eaeb
contrib/plugins/w3cdate.py
contrib/plugins/w3cdate.py
__author__ = "Ted Leung <[email protected]>" __version__ = "$Id:" __copyright__ = "Copyright (c) 2003 Ted Leung" __license__ = "Python" import xml.utils.iso8601 import time def cb_prepare(args): request = args["request"] form = request.getHttp()['form'] config = request.getConfiguration() data = request.getData() entry_list = data['entry_list'] for i in range(len(entry_list)): entry = entry_list[i] entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(entry['timetuple']))
__author__ = "Ted Leung <[email protected]>" __version__ = "$Id:" __copyright__ = "Copyright (c) 2003 Ted Leung" __license__ = "Python" import xml.utils.iso8601 import time def cb_prepare(args): request = args["request"] form = request.getHttp()['form'] config = request.getConfiguration() data = request.getData() entry_list = data['entry_list'] for i in range(len(entry_list)): entry = entry_list[i] t = entry['timetuple'] # adjust for daylight savings time t = t[0],t[1],t[2],t[3]+time.localtime()[-1],t[4],t[5],t[6],t[7],t[8] entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(t))
Fix daylight savings time bug
Fix daylight savings time bug
Python
mit
daitangio/pyblosxom,daitangio/pyblosxom,willkg/douglas,willkg/douglas
__author__ = "Ted Leung <[email protected]>" __version__ = "$Id:" __copyright__ = "Copyright (c) 2003 Ted Leung" __license__ = "Python" import xml.utils.iso8601 import time def cb_prepare(args): request = args["request"] form = request.getHttp()['form'] config = request.getConfiguration() data = request.getData() entry_list = data['entry_list'] for i in range(len(entry_list)): entry = entry_list[i] + t = entry['timetuple'] + # adjust for daylight savings time + t = t[0],t[1],t[2],t[3]+time.localtime()[-1],t[4],t[5],t[6],t[7],t[8] - entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(entry['timetuple'])) + entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(t))
Fix daylight savings time bug
## Code Before: __author__ = "Ted Leung <[email protected]>" __version__ = "$Id:" __copyright__ = "Copyright (c) 2003 Ted Leung" __license__ = "Python" import xml.utils.iso8601 import time def cb_prepare(args): request = args["request"] form = request.getHttp()['form'] config = request.getConfiguration() data = request.getData() entry_list = data['entry_list'] for i in range(len(entry_list)): entry = entry_list[i] entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(entry['timetuple'])) ## Instruction: Fix daylight savings time bug ## Code After: __author__ = "Ted Leung <[email protected]>" __version__ = "$Id:" __copyright__ = "Copyright (c) 2003 Ted Leung" __license__ = "Python" import xml.utils.iso8601 import time def cb_prepare(args): request = args["request"] form = request.getHttp()['form'] config = request.getConfiguration() data = request.getData() entry_list = data['entry_list'] for i in range(len(entry_list)): entry = entry_list[i] t = entry['timetuple'] # adjust for daylight savings time t = t[0],t[1],t[2],t[3]+time.localtime()[-1],t[4],t[5],t[6],t[7],t[8] entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(t))
... entry = entry_list[i] t = entry['timetuple'] # adjust for daylight savings time t = t[0],t[1],t[2],t[3]+time.localtime()[-1],t[4],t[5],t[6],t[7],t[8] entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(t)) ...
a560868cd6658a4a5134ce8d53a03a5f86d92d6d
dimensionful/common_units.py
dimensionful/common_units.py
from sympy.core import Integer from dimensionful.dimensions import * from dimensionful.units import Unit, unit_symbols_dict # cgs base units g = Unit("g") cm = Unit("cm") s = Unit("s") K = Unit("K") # other cgs dyne = Unit("dyne") erg = Unit("erg") esu = Unit("esu") # SI stuff m = Unit("m") # times minute = Unit("min") # can't use `min` because of Python keyword :( hr = Unit("hr") day = Unit("day") yr = Unit("yr") # solar units Msun = Unit("Msun") Rsun = Unit("Rsun") Lsun = Unit("Lsun") Tsum = Unit("Tsun") # astro distances AU = Unit("AU") pc = Unit("pc") ly = Unit("ly") gauss = Unit("gauss")
from dimensionful.dimensions import * from dimensionful.units import Unit # cgs base units g = Unit("g") cm = Unit("cm") s = Unit("s") K = Unit("K") # other cgs dyne = Unit("dyne") erg = Unit("erg") esu = Unit("esu") # SI stuff m = Unit("m") # times minute = Unit("min") # can't use `min` because of Python keyword :( hr = Unit("hr") day = Unit("day") yr = Unit("yr") # solar units Msun = Unit("Msun") Rsun = Unit("Rsun") Lsun = Unit("Lsun") Tsum = Unit("Tsun") # astro distances AU = Unit("AU") pc = Unit("pc") ly = Unit("ly") gauss = Unit("gauss")
Remove imports we don't need anymore.
Remove imports we don't need anymore.
Python
bsd-2-clause
caseywstark/dimensionful
- - from sympy.core import Integer from dimensionful.dimensions import * - from dimensionful.units import Unit, unit_symbols_dict + from dimensionful.units import Unit # cgs base units g = Unit("g") cm = Unit("cm") s = Unit("s") K = Unit("K") # other cgs dyne = Unit("dyne") erg = Unit("erg") esu = Unit("esu") # SI stuff m = Unit("m") # times minute = Unit("min") # can't use `min` because of Python keyword :( hr = Unit("hr") day = Unit("day") yr = Unit("yr") # solar units Msun = Unit("Msun") Rsun = Unit("Rsun") Lsun = Unit("Lsun") Tsum = Unit("Tsun") # astro distances AU = Unit("AU") pc = Unit("pc") ly = Unit("ly") gauss = Unit("gauss")
Remove imports we don't need anymore.
## Code Before: from sympy.core import Integer from dimensionful.dimensions import * from dimensionful.units import Unit, unit_symbols_dict # cgs base units g = Unit("g") cm = Unit("cm") s = Unit("s") K = Unit("K") # other cgs dyne = Unit("dyne") erg = Unit("erg") esu = Unit("esu") # SI stuff m = Unit("m") # times minute = Unit("min") # can't use `min` because of Python keyword :( hr = Unit("hr") day = Unit("day") yr = Unit("yr") # solar units Msun = Unit("Msun") Rsun = Unit("Rsun") Lsun = Unit("Lsun") Tsum = Unit("Tsun") # astro distances AU = Unit("AU") pc = Unit("pc") ly = Unit("ly") gauss = Unit("gauss") ## Instruction: Remove imports we don't need anymore. ## Code After: from dimensionful.dimensions import * from dimensionful.units import Unit # cgs base units g = Unit("g") cm = Unit("cm") s = Unit("s") K = Unit("K") # other cgs dyne = Unit("dyne") erg = Unit("erg") esu = Unit("esu") # SI stuff m = Unit("m") # times minute = Unit("min") # can't use `min` because of Python keyword :( hr = Unit("hr") day = Unit("day") yr = Unit("yr") # solar units Msun = Unit("Msun") Rsun = Unit("Rsun") Lsun = Unit("Lsun") Tsum = Unit("Tsun") # astro distances AU = Unit("AU") pc = Unit("pc") ly = Unit("ly") gauss = Unit("gauss")
// ... existing code ... // ... modified code ... from dimensionful.dimensions import * from dimensionful.units import Unit // ... rest of the code ...
df9691aecf19d31eab1f52f7d735ed746877ffac
dache/__init__.py
dache/__init__.py
from six.moves.urllib.parse import urlparse from dache.backends.base import CacheKeyWarning # noqa from dache.backends.filebased import FileBasedCache from dache.backends.locmem import LocMemCache from dache.backends.redis import RedisCache from dache.utils.module_loading import import_string __version__ = '0.0.1' __all__ = ('register_backend', 'Cache', 'CacheKeyWarning') _BACKENDS = { 'file': FileBasedCache, 'locmem': LocMemCache, 'redis': RedisCache, } def register_backend(url_scheme, backend_class): """Register a cache backend.""" _BACKENDS[url_scheme] = backend_class class Cache(object): def __init__(self, url, **options): # Create cache backend result = urlparse(url) backend_class = _BACKENDS[result.scheme] if isinstance(backend_class, basestring): backend_class = import_string(backend_class) self._backend = backend_class(result, **options) public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key', 'incr', 'decr', 'set_many', 'delete_many', 'clear', 'validate_key', 'incr_version', 'decr_version', 'close') for method in public_methods: setattr(self, method, getattr(self._backend, method)) def __contains__(self, item): return item in self._backend
import six from six.moves.urllib.parse import urlparse from dache.backends.base import CacheKeyWarning # noqa from dache.backends.filebased import FileBasedCache from dache.backends.locmem import LocMemCache from dache.backends.redis import RedisCache from dache.utils.module_loading import import_string __version__ = '0.0.1' __all__ = ('register_backend', 'Cache', 'CacheKeyWarning') _BACKENDS = { 'file': FileBasedCache, 'locmem': LocMemCache, 'redis': RedisCache, } def register_backend(url_scheme, backend_class): """Register a cache backend.""" _BACKENDS[url_scheme] = backend_class class Cache(object): def __init__(self, url, **options): # Create cache backend result = urlparse(url) backend_class = _BACKENDS[result.scheme] if isinstance(backend_class, six.string_types): backend_class = import_string(backend_class) self._backend = backend_class(result, **options) public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key', 'incr', 'decr', 'set_many', 'delete_many', 'clear', 'validate_key', 'incr_version', 'decr_version', 'close') for method in public_methods: setattr(self, method, getattr(self._backend, method)) def __contains__(self, item): return item in self._backend
Fix Python 3 string type checking
Fix Python 3 string type checking
Python
bsd-3-clause
eliangcs/dache
+ import six + from six.moves.urllib.parse import urlparse from dache.backends.base import CacheKeyWarning # noqa from dache.backends.filebased import FileBasedCache from dache.backends.locmem import LocMemCache from dache.backends.redis import RedisCache from dache.utils.module_loading import import_string __version__ = '0.0.1' __all__ = ('register_backend', 'Cache', 'CacheKeyWarning') _BACKENDS = { 'file': FileBasedCache, 'locmem': LocMemCache, 'redis': RedisCache, } def register_backend(url_scheme, backend_class): """Register a cache backend.""" _BACKENDS[url_scheme] = backend_class class Cache(object): def __init__(self, url, **options): # Create cache backend result = urlparse(url) backend_class = _BACKENDS[result.scheme] - if isinstance(backend_class, basestring): + if isinstance(backend_class, six.string_types): backend_class = import_string(backend_class) self._backend = backend_class(result, **options) public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key', 'incr', 'decr', 'set_many', 'delete_many', 'clear', 'validate_key', 'incr_version', 'decr_version', 'close') for method in public_methods: setattr(self, method, getattr(self._backend, method)) def __contains__(self, item): return item in self._backend
Fix Python 3 string type checking
## Code Before: from six.moves.urllib.parse import urlparse from dache.backends.base import CacheKeyWarning # noqa from dache.backends.filebased import FileBasedCache from dache.backends.locmem import LocMemCache from dache.backends.redis import RedisCache from dache.utils.module_loading import import_string __version__ = '0.0.1' __all__ = ('register_backend', 'Cache', 'CacheKeyWarning') _BACKENDS = { 'file': FileBasedCache, 'locmem': LocMemCache, 'redis': RedisCache, } def register_backend(url_scheme, backend_class): """Register a cache backend.""" _BACKENDS[url_scheme] = backend_class class Cache(object): def __init__(self, url, **options): # Create cache backend result = urlparse(url) backend_class = _BACKENDS[result.scheme] if isinstance(backend_class, basestring): backend_class = import_string(backend_class) self._backend = backend_class(result, **options) public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key', 'incr', 'decr', 'set_many', 'delete_many', 'clear', 'validate_key', 'incr_version', 'decr_version', 'close') for method in public_methods: setattr(self, method, getattr(self._backend, method)) def __contains__(self, item): return item in self._backend ## Instruction: Fix Python 3 string type checking ## Code After: import six from six.moves.urllib.parse import urlparse from dache.backends.base import CacheKeyWarning # noqa from dache.backends.filebased import FileBasedCache from dache.backends.locmem import LocMemCache from dache.backends.redis import RedisCache from dache.utils.module_loading import import_string __version__ = '0.0.1' __all__ = ('register_backend', 'Cache', 'CacheKeyWarning') _BACKENDS = { 'file': FileBasedCache, 'locmem': LocMemCache, 'redis': RedisCache, } def register_backend(url_scheme, backend_class): """Register a cache backend.""" _BACKENDS[url_scheme] = backend_class class Cache(object): def __init__(self, url, **options): # Create cache backend result = urlparse(url) backend_class = _BACKENDS[result.scheme] if isinstance(backend_class, six.string_types): backend_class = import_string(backend_class) self._backend = backend_class(result, **options) public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key', 'incr', 'decr', 'set_many', 'delete_many', 'clear', 'validate_key', 'incr_version', 'decr_version', 'close') for method in public_methods: setattr(self, method, getattr(self._backend, method)) def __contains__(self, item): return item in self._backend
# ... existing code ... import six from six.moves.urllib.parse import urlparse # ... modified code ... backend_class = _BACKENDS[result.scheme] if isinstance(backend_class, six.string_types): backend_class = import_string(backend_class) # ... rest of the code ...
5b18ec2219cbdfa479a1d32f9bf62f7460171f09
live_studio/queue/models.py
live_studio/queue/models.py
import datetime from django.db import models from .managers import EntryManager class Entry(models.Model): config = models.ForeignKey('config.Config') enqueued = models.DateTimeField(default=datetime.datetime.utcnow) started = models.DateTimeField(null=True) finished = models.DateTimeField(null=True) success = models.BooleanField(default=False) objects = EntryManager() class Meta: ordering = ('-enqueued',)
import datetime from django.db import models from .managers import EntryManager class Entry(models.Model): config = models.ForeignKey('config.Config') enqueued = models.DateTimeField(default=datetime.datetime.utcnow) started = models.DateTimeField(null=True) finished = models.DateTimeField(null=True) success = models.BooleanField(default=False) objects = EntryManager() class Meta: ordering = ('-enqueued',) verbose_name_plural = 'Entries'
Set verbose_name_plural properly for queue.Entry.
Set verbose_name_plural properly for queue.Entry. Signed-off-by: Chris Lamb <[email protected]>
Python
agpl-3.0
debian-live/live-studio,debian-live/live-studio,lamby/live-studio,lamby/live-studio,lamby/live-studio,debian-live/live-studio
import datetime from django.db import models from .managers import EntryManager class Entry(models.Model): config = models.ForeignKey('config.Config') enqueued = models.DateTimeField(default=datetime.datetime.utcnow) started = models.DateTimeField(null=True) finished = models.DateTimeField(null=True) success = models.BooleanField(default=False) objects = EntryManager() class Meta: ordering = ('-enqueued',) + verbose_name_plural = 'Entries'
Set verbose_name_plural properly for queue.Entry.
## Code Before: import datetime from django.db import models from .managers import EntryManager class Entry(models.Model): config = models.ForeignKey('config.Config') enqueued = models.DateTimeField(default=datetime.datetime.utcnow) started = models.DateTimeField(null=True) finished = models.DateTimeField(null=True) success = models.BooleanField(default=False) objects = EntryManager() class Meta: ordering = ('-enqueued',) ## Instruction: Set verbose_name_plural properly for queue.Entry. ## Code After: import datetime from django.db import models from .managers import EntryManager class Entry(models.Model): config = models.ForeignKey('config.Config') enqueued = models.DateTimeField(default=datetime.datetime.utcnow) started = models.DateTimeField(null=True) finished = models.DateTimeField(null=True) success = models.BooleanField(default=False) objects = EntryManager() class Meta: ordering = ('-enqueued',) verbose_name_plural = 'Entries'
... ordering = ('-enqueued',) verbose_name_plural = 'Entries' ...
2a6f0f7fbb655c568a42493e1181aeef9fa1ead1
test_setup.py
test_setup.py
"""Test setup.py.""" import os import subprocess import sys def test_setup(): """Run setup.py check.""" command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict'] assert subprocess.run(command).returncode == 0 def test_console_scripts(): """Ensure console scripts were installed correctly.""" assert any( os.path.isfile(os.path.join(directory, 'backlog')) for directory in sys.path )
"""Test setup.py.""" import os import subprocess import sys def test_setup(): """Run setup.py check.""" command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict'] assert subprocess.run(command).returncode == 0 def test_console_scripts(): """Ensure console scripts were installed correctly.""" assert any( os.path.isfile(os.path.join(directory, 'backlog')) for directory in os.environ['PATH'].split(':') )
Use $PATH instead of sys.path
Use $PATH instead of sys.path
Python
lgpl-2.1
dmtucker/backlog
"""Test setup.py.""" import os import subprocess import sys def test_setup(): """Run setup.py check.""" command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict'] assert subprocess.run(command).returncode == 0 def test_console_scripts(): """Ensure console scripts were installed correctly.""" assert any( os.path.isfile(os.path.join(directory, 'backlog')) - for directory in sys.path + for directory in os.environ['PATH'].split(':') )
Use $PATH instead of sys.path
## Code Before: """Test setup.py.""" import os import subprocess import sys def test_setup(): """Run setup.py check.""" command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict'] assert subprocess.run(command).returncode == 0 def test_console_scripts(): """Ensure console scripts were installed correctly.""" assert any( os.path.isfile(os.path.join(directory, 'backlog')) for directory in sys.path ) ## Instruction: Use $PATH instead of sys.path ## Code After: """Test setup.py.""" import os import subprocess import sys def test_setup(): """Run setup.py check.""" command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict'] assert subprocess.run(command).returncode == 0 def test_console_scripts(): """Ensure console scripts were installed correctly.""" assert any( os.path.isfile(os.path.join(directory, 'backlog')) for directory in os.environ['PATH'].split(':') )
// ... existing code ... os.path.isfile(os.path.join(directory, 'backlog')) for directory in os.environ['PATH'].split(':') ) // ... rest of the code ...
f40fca40d5e09d7ae64acab1258f58cea6810662
setup.py
setup.py
from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from os.path import join flags = ['-W', '-Wall', '-march=opteron', '-O3'] def configuration(parent_package='', top_path=None): config = Configuration('scattering', parent_package, top_path, author = 'Ryan May', author_email = '[email protected]', platforms = ['Linux'], description = 'Software for simulating weather radar data.', url = 'http://weather.ou.edu/~rmay/research.html') lapack = get_info('lapack_opt') sources = ['ampld.lp.pyf', 'ampld.lp.f', 'modified_double_precision_drop.f'] config.add_extension('_tmatrix', [join('src', f) for f in sources], extra_compile_args=flags, **lapack) return config setup(**configuration(top_path='').todict())
from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from os.path import join flags = ['-W', '-Wall', '-march=opteron', '-O3'] def configuration(parent_package='', top_path=None): config = Configuration('scattering', parent_package, top_path, version='0.8', author = 'Ryan May', author_email = '[email protected]', platforms = ['Linux'], description = 'Software for simulating weather radar data.', url = 'http://weather.ou.edu/~rmay/research.html') lapack = get_info('lapack_opt') sources = ['ampld.lp.pyf', 'ampld.lp.f', 'modified_double_precision_drop.f'] config.add_extension('_tmatrix', [join('src', f) for f in sources], extra_compile_args=flags, **lapack) return config setup(**configuration(top_path='').todict())
Add a version number for scattering.
Add a version number for scattering.
Python
bsd-2-clause
dopplershift/Scattering
from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from os.path import join flags = ['-W', '-Wall', '-march=opteron', '-O3'] def configuration(parent_package='', top_path=None): config = Configuration('scattering', parent_package, top_path, + version='0.8', author = 'Ryan May', author_email = '[email protected]', platforms = ['Linux'], description = 'Software for simulating weather radar data.', url = 'http://weather.ou.edu/~rmay/research.html') lapack = get_info('lapack_opt') sources = ['ampld.lp.pyf', 'ampld.lp.f', 'modified_double_precision_drop.f'] config.add_extension('_tmatrix', [join('src', f) for f in sources], extra_compile_args=flags, **lapack) return config setup(**configuration(top_path='').todict())
Add a version number for scattering.
## Code Before: from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from os.path import join flags = ['-W', '-Wall', '-march=opteron', '-O3'] def configuration(parent_package='', top_path=None): config = Configuration('scattering', parent_package, top_path, author = 'Ryan May', author_email = '[email protected]', platforms = ['Linux'], description = 'Software for simulating weather radar data.', url = 'http://weather.ou.edu/~rmay/research.html') lapack = get_info('lapack_opt') sources = ['ampld.lp.pyf', 'ampld.lp.f', 'modified_double_precision_drop.f'] config.add_extension('_tmatrix', [join('src', f) for f in sources], extra_compile_args=flags, **lapack) return config setup(**configuration(top_path='').todict()) ## Instruction: Add a version number for scattering. ## Code After: from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from os.path import join flags = ['-W', '-Wall', '-march=opteron', '-O3'] def configuration(parent_package='', top_path=None): config = Configuration('scattering', parent_package, top_path, version='0.8', author = 'Ryan May', author_email = '[email protected]', platforms = ['Linux'], description = 'Software for simulating weather radar data.', url = 'http://weather.ou.edu/~rmay/research.html') lapack = get_info('lapack_opt') sources = ['ampld.lp.pyf', 'ampld.lp.f', 'modified_double_precision_drop.f'] config.add_extension('_tmatrix', [join('src', f) for f in sources], extra_compile_args=flags, **lapack) return config setup(**configuration(top_path='').todict())
// ... existing code ... config = Configuration('scattering', parent_package, top_path, version='0.8', author = 'Ryan May', // ... rest of the code ...
9dafef749aaf2fca9e865cf28b043ea22bafe3a5
backend/django/apps/accounts/tests.py
backend/django/apps/accounts/tests.py
from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data)
from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) class CreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_create_user(self): self.user.email = '[email protected]' data = json.dumps(WholeAccountSerializer(self.user).data) response = self.client.post( reverse('_accounts:account-list'), data, content_type='application/json') self.assertEqual( first=response.status_code, second=status.HTTP_201_CREATED)
Create a test for Account creation
Create a test for Account creation
Python
mit
slavpetroff/sweetshop,slavpetroff/sweetshop
from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) + + class CreateUserTest(APITestCase): + + def setUp(self): + self.user = UserFactory() + + def test_create_user(self): + self.user.email = '[email protected]' + data = json.dumps(WholeAccountSerializer(self.user).data) + response = self.client.post( + reverse('_accounts:account-list'), + data, + content_type='application/json') + self.assertEqual( + first=response.status_code, second=status.HTTP_201_CREATED) +
Create a test for Account creation
## Code Before: from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) ## Instruction: Create a test for Account creation ## Code After: from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) class CreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_create_user(self): self.user.email = '[email protected]' data = json.dumps(WholeAccountSerializer(self.user).data) response = self.client.post( reverse('_accounts:account-list'), data, content_type='application/json') self.assertEqual( first=response.status_code, second=status.HTTP_201_CREATED)
# ... existing code ... expected_data=WholeAccountSerializer(self.user).data) class CreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_create_user(self): self.user.email = '[email protected]' data = json.dumps(WholeAccountSerializer(self.user).data) response = self.client.post( reverse('_accounts:account-list'), data, content_type='application/json') self.assertEqual( first=response.status_code, second=status.HTTP_201_CREATED) # ... rest of the code ...
6155cfa0d16bfde8b412a3b2c68983ef939d518c
synapse/tests/test_init.py
synapse/tests/test_init.py
import os import imp import synapse from synapse.tests.common import * class InitTest(SynTest): def test_init_modules(self): os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math' msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')' with self.getLoggerStream('synapse', msg) as stream: imp.reload(synapse) self.true(stream.wait(10)) stream.seek(0) self.isin(msg, stream.read()) self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
import os import imp import synapse from synapse.tests.common import * class InitTest(SynTest): pass ''' def test_init_modules(self): os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math' msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')' with self.getLoggerStream('synapse', msg) as stream: imp.reload(synapse) self.true(stream.wait(10)) stream.seek(0) self.isin(msg, stream.read()) self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4)) '''
Comment out broken init test
Comment out broken init test
Python
apache-2.0
vertexproject/synapse,vertexproject/synapse,vivisect/synapse,vertexproject/synapse
import os import imp import synapse from synapse.tests.common import * class InitTest(SynTest): + pass + ''' def test_init_modules(self): os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math' msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')' with self.getLoggerStream('synapse', msg) as stream: imp.reload(synapse) self.true(stream.wait(10)) stream.seek(0) self.isin(msg, stream.read()) self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4)) + '''
Comment out broken init test
## Code Before: import os import imp import synapse from synapse.tests.common import * class InitTest(SynTest): def test_init_modules(self): os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math' msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')' with self.getLoggerStream('synapse', msg) as stream: imp.reload(synapse) self.true(stream.wait(10)) stream.seek(0) self.isin(msg, stream.read()) self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4)) ## Instruction: Comment out broken init test ## Code After: import os import imp import synapse from synapse.tests.common import * class InitTest(SynTest): pass ''' def test_init_modules(self): os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math' msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')' with self.getLoggerStream('synapse', msg) as stream: imp.reload(synapse) self.true(stream.wait(10)) stream.seek(0) self.isin(msg, stream.read()) self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4)) '''
... class InitTest(SynTest): pass ''' def test_init_modules(self): ... self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4)) ''' ...
621a97a0904e085c33ef78d68cd733af0d816aee
app/aflafrettir/routes.py
app/aflafrettir/routes.py
from flask import render_template from . import aflafrettir @aflafrettir.route('/') def index(): return render_template('aflafrettir/index.html') @talks.route('/user/<username>') def user(username): return render_template('aflafrettir/index.html', username = username)
from flask import render_template from . import aflafrettir @aflafrettir.route('/') def index(): return render_template('aflafrettir/index.html') @aflafrettir.route('/user/<username>') def user(username): return render_template('aflafrettir/user.html', username = username)
Use the correct template, and call the aflafrettir route decorator
Use the correct template, and call the aflafrettir route decorator
Python
mit
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
from flask import render_template from . import aflafrettir @aflafrettir.route('/') def index(): return render_template('aflafrettir/index.html') - @talks.route('/user/<username>') + @aflafrettir.route('/user/<username>') def user(username): - return render_template('aflafrettir/index.html', username = username) + return render_template('aflafrettir/user.html', username = username)
Use the correct template, and call the aflafrettir route decorator
## Code Before: from flask import render_template from . import aflafrettir @aflafrettir.route('/') def index(): return render_template('aflafrettir/index.html') @talks.route('/user/<username>') def user(username): return render_template('aflafrettir/index.html', username = username) ## Instruction: Use the correct template, and call the aflafrettir route decorator ## Code After: from flask import render_template from . import aflafrettir @aflafrettir.route('/') def index(): return render_template('aflafrettir/index.html') @aflafrettir.route('/user/<username>') def user(username): return render_template('aflafrettir/user.html', username = username)
# ... existing code ... @aflafrettir.route('/user/<username>') def user(username): return render_template('aflafrettir/user.html', username = username) # ... rest of the code ...
32ddc769bffed640e83e99e2657f20bbb3ef5e38
mopidy_soundcloud/__init__.py
mopidy_soundcloud/__init__.py
from __future__ import unicode_literals import os from mopidy import ext, config from mopidy.exceptions import ExtensionError __version__ = '1.0.18' __url__ = 'https://github.com/mopidy/mopidy-soundcloud' class SoundCloudExtension(ext.Extension): dist_name = 'Mopidy-SoundCloud' ext_name = 'soundcloud' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(SoundCloudExtension, self).get_config_schema() schema['explore'] = config.List() schema['explore_pages'] = config.Integer() schema['auth_token'] = config.Secret() return schema def validate_config(self, config): if not config.getboolean('soundcloud', 'enabled'): return if not config.get('soundcloud', 'auth_token'): raise ExtensionError("In order to use SoundCloud extension you\ must provide auth_token, for more information referrer to \ https://github.com/mopidy/mopidy-soundcloud/") def validate_environment(self): try: import requests # noqa except ImportError as e: raise ExtensionError('Library requests not found', e) def get_backend_classes(self): from .actor import SoundCloudBackend return [SoundCloudBackend]
from __future__ import unicode_literals import os from mopidy import ext, config from mopidy.exceptions import ExtensionError __version__ = '1.0.18' __url__ = 'https://github.com/mopidy/mopidy-soundcloud' class SoundCloudExtension(ext.Extension): dist_name = 'Mopidy-SoundCloud' ext_name = 'soundcloud' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(SoundCloudExtension, self).get_config_schema() schema['explore'] = config.List() schema['explore_pages'] = config.Integer() schema['auth_token'] = config.Secret() return schema def validate_config(self, config): if not config.getboolean('soundcloud', 'enabled'): return if not config.get('soundcloud', 'auth_token'): raise ExtensionError("In order to use SoundCloud extension you\ must provide auth_token, for more information referrer to \ https://github.com/mopidy/mopidy-soundcloud/") def get_backend_classes(self): from .actor import SoundCloudBackend return [SoundCloudBackend]
Remove env check as Mopidy checks deps automatically
ext: Remove env check as Mopidy checks deps automatically
Python
mit
mopidy/mopidy-soundcloud,yakumaa/mopidy-soundcloud
from __future__ import unicode_literals import os from mopidy import ext, config from mopidy.exceptions import ExtensionError __version__ = '1.0.18' __url__ = 'https://github.com/mopidy/mopidy-soundcloud' class SoundCloudExtension(ext.Extension): dist_name = 'Mopidy-SoundCloud' ext_name = 'soundcloud' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(SoundCloudExtension, self).get_config_schema() schema['explore'] = config.List() schema['explore_pages'] = config.Integer() schema['auth_token'] = config.Secret() return schema def validate_config(self, config): if not config.getboolean('soundcloud', 'enabled'): return if not config.get('soundcloud', 'auth_token'): raise ExtensionError("In order to use SoundCloud extension you\ must provide auth_token, for more information referrer to \ https://github.com/mopidy/mopidy-soundcloud/") - def validate_environment(self): - try: - import requests # noqa - except ImportError as e: - raise ExtensionError('Library requests not found', e) - def get_backend_classes(self): from .actor import SoundCloudBackend return [SoundCloudBackend]
Remove env check as Mopidy checks deps automatically
## Code Before: from __future__ import unicode_literals import os from mopidy import ext, config from mopidy.exceptions import ExtensionError __version__ = '1.0.18' __url__ = 'https://github.com/mopidy/mopidy-soundcloud' class SoundCloudExtension(ext.Extension): dist_name = 'Mopidy-SoundCloud' ext_name = 'soundcloud' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(SoundCloudExtension, self).get_config_schema() schema['explore'] = config.List() schema['explore_pages'] = config.Integer() schema['auth_token'] = config.Secret() return schema def validate_config(self, config): if not config.getboolean('soundcloud', 'enabled'): return if not config.get('soundcloud', 'auth_token'): raise ExtensionError("In order to use SoundCloud extension you\ must provide auth_token, for more information referrer to \ https://github.com/mopidy/mopidy-soundcloud/") def validate_environment(self): try: import requests # noqa except ImportError as e: raise ExtensionError('Library requests not found', e) def get_backend_classes(self): from .actor import SoundCloudBackend return [SoundCloudBackend] ## Instruction: Remove env check as Mopidy checks deps automatically ## Code After: from __future__ import unicode_literals import os from mopidy import ext, config from mopidy.exceptions import ExtensionError __version__ = '1.0.18' __url__ = 'https://github.com/mopidy/mopidy-soundcloud' class SoundCloudExtension(ext.Extension): dist_name = 'Mopidy-SoundCloud' ext_name = 'soundcloud' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(SoundCloudExtension, self).get_config_schema() schema['explore'] = config.List() schema['explore_pages'] = config.Integer() schema['auth_token'] = config.Secret() return schema def validate_config(self, config): if not config.getboolean('soundcloud', 'enabled'): return if not config.get('soundcloud', 'auth_token'): raise ExtensionError("In order to use SoundCloud extension you\ must provide auth_token, for more information referrer to \ https://github.com/mopidy/mopidy-soundcloud/") def get_backend_classes(self): from .actor import SoundCloudBackend return [SoundCloudBackend]
// ... existing code ... def get_backend_classes(self): // ... rest of the code ...
b62c8c905cdd332a0073ce462be3e5c5b17b282d
api/webview/views.py
api/webview/views.py
from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListCreateAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListCreateAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data)
from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data)
Make the view List only remove Create
Make the view List only remove Create
Python
apache-2.0
erinspace/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,fabianvf/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,felliott/scrapi
from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer - class DocumentList(generics.ListCreateAPIView): + class DocumentList(generics.ListAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() - class DocumentsFromSource(generics.ListCreateAPIView): + class DocumentsFromSource(generics.ListAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data)
Make the view List only remove Create
## Code Before: from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListCreateAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListCreateAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data) ## Instruction: Make the view List only remove Create ## Code After: from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data)
// ... existing code ... class DocumentList(generics.ListAPIView): """ // ... modified code ... class DocumentsFromSource(generics.ListAPIView): """ // ... rest of the code ...
3c573e2b02a18627b82f4a25fef67adae295d653
rbm2m/models/setting.py
rbm2m/models/setting.py
from sqlalchemy import Column, String from .base import Base class Setting(Base): __tablename__ = 'settings' name = Column(String(32), nullable=False, primary_key=True) value = Column(String(512)) default_value = Column(String(512)) title = Column(String(127), nullable=False) data_type = Column(String(8)) # string or text for now description = Column(String(512))
from sqlalchemy import Column, String, Text from .base import Base class Setting(Base): __tablename__ = 'settings' name = Column(String(32), nullable=False, primary_key=True) value = Column(Text) default_value = Column(Text) title = Column(String(127), nullable=False) data_type = Column(String(8)) # string or text for now description = Column(String(512))
Set setings.value type to text
Set setings.value type to text
Python
apache-2.0
notapresent/rbm2m,notapresent/rbm2m
- from sqlalchemy import Column, String + from sqlalchemy import Column, String, Text from .base import Base class Setting(Base): __tablename__ = 'settings' name = Column(String(32), nullable=False, primary_key=True) - value = Column(String(512)) + value = Column(Text) - default_value = Column(String(512)) + default_value = Column(Text) title = Column(String(127), nullable=False) data_type = Column(String(8)) # string or text for now description = Column(String(512))
Set setings.value type to text
## Code Before: from sqlalchemy import Column, String from .base import Base class Setting(Base): __tablename__ = 'settings' name = Column(String(32), nullable=False, primary_key=True) value = Column(String(512)) default_value = Column(String(512)) title = Column(String(127), nullable=False) data_type = Column(String(8)) # string or text for now description = Column(String(512)) ## Instruction: Set setings.value type to text ## Code After: from sqlalchemy import Column, String, Text from .base import Base class Setting(Base): __tablename__ = 'settings' name = Column(String(32), nullable=False, primary_key=True) value = Column(Text) default_value = Column(Text) title = Column(String(127), nullable=False) data_type = Column(String(8)) # string or text for now description = Column(String(512))
// ... existing code ... from sqlalchemy import Column, String, Text // ... modified code ... name = Column(String(32), nullable=False, primary_key=True) value = Column(Text) default_value = Column(Text) title = Column(String(127), nullable=False) // ... rest of the code ...
481df944700297300892bd14783310aad14c093c
test/selenium/src/lib/page/modal/delete_object.py
test/selenium/src/lib/page/modal/delete_object.py
"""Modals for deleting objects""" from lib import base from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
"""Modals for deleting objects""" from lib import base from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
Remove redirect from delete modal page object
Remove redirect from delete modal page object (cherry picked from commit 480ecdb)
Python
apache-2.0
VinnieJohns/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core
"""Modals for deleting objects""" from lib import base - from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) - @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
Remove redirect from delete modal page object
## Code Before: """Modals for deleting objects""" from lib import base from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click() ## Instruction: Remove redirect from delete modal page object ## Code After: """Modals for deleting objects""" from lib import base from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
// ... existing code ... from lib import base from lib.constants import locator // ... modified code ... def confirm_delete(self): // ... rest of the code ...
fc7beded3d286d831df29b8b32614b2eb56ef206
enasearch/__main__.py
enasearch/__main__.py
import click import ebisearch from pprint import pprint @click.group() def main(): pass @click.command('get_results', short_help='Get list of results') def get_results(): """Return the list of domains in EBI""" ebisearch.get_results(verbose=True) @click.command('get_filter_fields', short_help='Get filter fields') @click.option( '--result', help='Id of a result (accessible with get_results)') def get_filter_fields(result): """Get the filter fields of a result to build a query""" ebisearch.get_filter_fields(verbose=True) main.add_command(get_results) main.add_command(get_filter_fields) if __name__ == "__main__": main()
import click import ebisearch from pprint import pprint @click.group() def main(): pass @click.command('get_results', short_help='Get list of results') def get_results(): """Return the list of domains in EBI""" ebisearch.get_results(verbose=True) @click.command('get_filter_fields', short_help='Get filter fields') @click.option( '--result', help='Id of a result (accessible with get_results)') def get_filter_fields(result): """Get the filter fields of a result to build a query""" ebisearch.get_filter_fields(verbose=True) @click.command('get_filter_types', short_help='Get filter types') def get_filter_types(): """Get the types of filters usable to build a query""" ebisearch.get_filter_types(verbose=True) main.add_command(get_results) main.add_command(get_filter_fields) main.add_command(get_filter_types) if __name__ == "__main__": main()
Add function for get filter types
Add function for get filter types
Python
mit
bebatut/enasearch
import click import ebisearch from pprint import pprint @click.group() def main(): pass @click.command('get_results', short_help='Get list of results') def get_results(): """Return the list of domains in EBI""" ebisearch.get_results(verbose=True) @click.command('get_filter_fields', short_help='Get filter fields') @click.option( '--result', help='Id of a result (accessible with get_results)') def get_filter_fields(result): """Get the filter fields of a result to build a query""" ebisearch.get_filter_fields(verbose=True) + @click.command('get_filter_types', short_help='Get filter types') + def get_filter_types(): + """Get the types of filters usable to build a query""" + ebisearch.get_filter_types(verbose=True) + + main.add_command(get_results) main.add_command(get_filter_fields) + main.add_command(get_filter_types) if __name__ == "__main__": main()
Add function for get filter types
## Code Before: import click import ebisearch from pprint import pprint @click.group() def main(): pass @click.command('get_results', short_help='Get list of results') def get_results(): """Return the list of domains in EBI""" ebisearch.get_results(verbose=True) @click.command('get_filter_fields', short_help='Get filter fields') @click.option( '--result', help='Id of a result (accessible with get_results)') def get_filter_fields(result): """Get the filter fields of a result to build a query""" ebisearch.get_filter_fields(verbose=True) main.add_command(get_results) main.add_command(get_filter_fields) if __name__ == "__main__": main() ## Instruction: Add function for get filter types ## Code After: import click import ebisearch from pprint import pprint @click.group() def main(): pass @click.command('get_results', short_help='Get list of results') def get_results(): """Return the list of domains in EBI""" ebisearch.get_results(verbose=True) @click.command('get_filter_fields', short_help='Get filter fields') @click.option( '--result', help='Id of a result (accessible with get_results)') def get_filter_fields(result): """Get the filter fields of a result to build a query""" ebisearch.get_filter_fields(verbose=True) @click.command('get_filter_types', short_help='Get filter types') def get_filter_types(): """Get the types of filters usable to build a query""" ebisearch.get_filter_types(verbose=True) main.add_command(get_results) main.add_command(get_filter_fields) main.add_command(get_filter_types) if __name__ == "__main__": main()
// ... existing code ... @click.command('get_filter_types', short_help='Get filter types') def get_filter_types(): """Get the types of filters usable to build a query""" ebisearch.get_filter_types(verbose=True) main.add_command(get_results) // ... modified code ... main.add_command(get_filter_fields) main.add_command(get_filter_types) // ... rest of the code ...
d73872b8bcc6c7c32fa10d4a8ffdd77fe568a954
pyautotest/cli.py
pyautotest/cli.py
import logging import os import signal import time from optparse import OptionParser from watchdog.observers import Observer from pyautotest.observers import Notifier, ChangeHandler # Configure logging logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s', datefmt='%m-%d-%Y %H:%M:%S', level=logging.INFO) logger = logging.getLogger('pyautotest') def main(): parser = OptionParser("usage: %prog [options]") parser.set_defaults(loglevel="INFO") parser.add_option("-l", "--log-level", action="store", dest="loglevel") (options, args) = parser.parse_args() # Handle options logger.setLevel(getattr(logging, options.loglevel.upper(), None)) while True: event_handler = ChangeHandler() event_handler.run_tests() observer = Observer() observer.schedule(event_handler, os.getcwd(), recursive=True) # Avoid child zombie processes signal.signal(signal.SIGCHLD, signal.SIG_IGN) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join() if __name__ == "__main__": main()
import argparse import logging import os import signal import time from watchdog.observers import Observer from pyautotest.observers import Notifier, ChangeHandler # Configure logging logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s', datefmt='%m-%d-%Y %H:%M:%S', level=logging.INFO) logger = logging.getLogger('pyautotest') def main(): parser = argparse.ArgumentParser(description="Continuously run unit tests when changes detected") parser.add_argument('-l', '--log-level', metavar='L', default='INFO', dest='loglevel', action='store', help='set logger level') args = parser.parse_args() # Handle options logger.setLevel(getattr(logging, args.loglevel.upper(), None)) while True: event_handler = ChangeHandler() event_handler.run_tests() observer = Observer() observer.schedule(event_handler, os.getcwd(), recursive=True) # Avoid child zombie processes signal.signal(signal.SIGCHLD, signal.SIG_IGN) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join() if __name__ == "__main__": main()
Switch from optparase to argparse
Switch from optparase to argparse
Python
mit
ascarter/pyautotest
+ import argparse import logging import os import signal import time - from optparse import OptionParser from watchdog.observers import Observer from pyautotest.observers import Notifier, ChangeHandler # Configure logging logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s', datefmt='%m-%d-%Y %H:%M:%S', level=logging.INFO) logger = logging.getLogger('pyautotest') def main(): - parser = OptionParser("usage: %prog [options]") - parser.set_defaults(loglevel="INFO") - parser.add_option("-l", "--log-level", action="store", dest="loglevel") + parser = argparse.ArgumentParser(description="Continuously run unit tests when changes detected") + parser.add_argument('-l', '--log-level', + metavar='L', + default='INFO', + dest='loglevel', + action='store', + help='set logger level') - (options, args) = parser.parse_args() + args = parser.parse_args() # Handle options - logger.setLevel(getattr(logging, options.loglevel.upper(), None)) + logger.setLevel(getattr(logging, args.loglevel.upper(), None)) while True: event_handler = ChangeHandler() event_handler.run_tests() observer = Observer() observer.schedule(event_handler, os.getcwd(), recursive=True) # Avoid child zombie processes signal.signal(signal.SIGCHLD, signal.SIG_IGN) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join() if __name__ == "__main__": main()
Switch from optparase to argparse
## Code Before: import logging import os import signal import time from optparse import OptionParser from watchdog.observers import Observer from pyautotest.observers import Notifier, ChangeHandler # Configure logging logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s', datefmt='%m-%d-%Y %H:%M:%S', level=logging.INFO) logger = logging.getLogger('pyautotest') def main(): parser = OptionParser("usage: %prog [options]") parser.set_defaults(loglevel="INFO") parser.add_option("-l", "--log-level", action="store", dest="loglevel") (options, args) = parser.parse_args() # Handle options logger.setLevel(getattr(logging, options.loglevel.upper(), None)) while True: event_handler = ChangeHandler() event_handler.run_tests() observer = Observer() observer.schedule(event_handler, os.getcwd(), recursive=True) # Avoid child zombie processes signal.signal(signal.SIGCHLD, signal.SIG_IGN) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join() if __name__ == "__main__": main() ## Instruction: Switch from optparase to argparse ## Code After: import argparse import logging import os import signal import time from watchdog.observers import Observer from pyautotest.observers import Notifier, ChangeHandler # Configure logging logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s', datefmt='%m-%d-%Y %H:%M:%S', level=logging.INFO) logger = logging.getLogger('pyautotest') def main(): parser = argparse.ArgumentParser(description="Continuously run unit tests when changes detected") parser.add_argument('-l', '--log-level', metavar='L', default='INFO', dest='loglevel', action='store', help='set logger level') args = parser.parse_args() # Handle options logger.setLevel(getattr(logging, args.loglevel.upper(), None)) while True: event_handler = ChangeHandler() event_handler.run_tests() observer = Observer() observer.schedule(event_handler, os.getcwd(), recursive=True) # Avoid child zombie processes signal.signal(signal.SIGCHLD, signal.SIG_IGN) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join() if __name__ == "__main__": main()
// ... existing code ... import argparse import logging // ... modified code ... from watchdog.observers import Observer ... def main(): parser = argparse.ArgumentParser(description="Continuously run unit tests when changes detected") parser.add_argument('-l', '--log-level', metavar='L', default='INFO', dest='loglevel', action='store', help='set logger level') args = parser.parse_args() ... # Handle options logger.setLevel(getattr(logging, args.loglevel.upper(), None)) // ... rest of the code ...
9a121f309ded039f770339d51b43d0933a98d982
app/main/views.py
app/main/views.py
from flask import render_template, current_app, flash, redirect, url_for from . import main from forms import ContactForm from ..email import send_email @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
from flask import render_template, current_app, flash, redirect, url_for, send_from_directory from . import main from forms import ContactForm from ..email import send_email @main.route('/<path:filename>') def static_from_root(filename): return send_from_directory(current_app.static_folder, filename) @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
Add additional view for sitemap.xml
Add additional view for sitemap.xml
Python
mit
jordandietch/workforsushi,jordandietch/workforsushi,jordandietch/workforsushi,jordandietch/workforsushi
- from flask import render_template, current_app, flash, redirect, url_for + from flask import render_template, current_app, flash, redirect, url_for, send_from_directory from . import main from forms import ContactForm from ..email import send_email + + @main.route('/<path:filename>') + def static_from_root(filename): + return send_from_directory(current_app.static_folder, filename) @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
Add additional view for sitemap.xml
## Code Before: from flask import render_template, current_app, flash, redirect, url_for from . import main from forms import ContactForm from ..email import send_email @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html') ## Instruction: Add additional view for sitemap.xml ## Code After: from flask import render_template, current_app, flash, redirect, url_for, send_from_directory from . import main from forms import ContactForm from ..email import send_email @main.route('/<path:filename>') def static_from_root(filename): return send_from_directory(current_app.static_folder, filename) @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
... from flask import render_template, current_app, flash, redirect, url_for, send_from_directory from . import main ... from ..email import send_email @main.route('/<path:filename>') def static_from_root(filename): return send_from_directory(current_app.static_folder, filename) ...
4eecac0764e8abfc33c9e77b8eb6b700b536f1a0
pull_me.py
pull_me.py
from random import randint from time import sleep from os import system from easygui import msgbox while True: delay = randint(60, 2000) sleep(delay) system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav") msgbox("Hi Dan", "Time is up")
from random import randint from time import sleep from os import system import os.path from easygui import msgbox while True: delay = randint(60, 2000) sleep(delay) if os.path.isfile("/usr/share/sounds/GNUstep/Glass.wav"): system("aplay /usr/share/sounds/GNUstep/Glass.wav") else: system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav") msgbox("Hi Dan", "Time is up")
Use Glass.wav if it exists.
Use Glass.wav if it exists.
Python
apache-2.0
dnuffer/carrot_slots
from random import randint from time import sleep from os import system + import os.path from easygui import msgbox while True: delay = randint(60, 2000) sleep(delay) + if os.path.isfile("/usr/share/sounds/GNUstep/Glass.wav"): + system("aplay /usr/share/sounds/GNUstep/Glass.wav") + else: - system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav") + system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav") msgbox("Hi Dan", "Time is up")
Use Glass.wav if it exists.
## Code Before: from random import randint from time import sleep from os import system from easygui import msgbox while True: delay = randint(60, 2000) sleep(delay) system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav") msgbox("Hi Dan", "Time is up") ## Instruction: Use Glass.wav if it exists. ## Code After: from random import randint from time import sleep from os import system import os.path from easygui import msgbox while True: delay = randint(60, 2000) sleep(delay) if os.path.isfile("/usr/share/sounds/GNUstep/Glass.wav"): system("aplay /usr/share/sounds/GNUstep/Glass.wav") else: system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav") msgbox("Hi Dan", "Time is up")
... from os import system import os.path from easygui import msgbox ... sleep(delay) if os.path.isfile("/usr/share/sounds/GNUstep/Glass.wav"): system("aplay /usr/share/sounds/GNUstep/Glass.wav") else: system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav") msgbox("Hi Dan", "Time is up") ...
debb03975e8b647f27980081371bd9fdad7b292f
solar/solar/system_log/operations.py
solar/solar/system_log/operations.py
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(task_uuid, item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
Fix update of logitem bug in system_log
Fix update of logitem bug in system_log
Python
apache-2.0
loles/solar,openstack/solar,loles/solar,pigmej/solar,pigmej/solar,torgartor21/solar,torgartor21/solar,loles/solar,dshulyak/solar,zen/solar,zen/solar,dshulyak/solar,zen/solar,Mirantis/solar,Mirantis/solar,Mirantis/solar,pigmej/solar,Mirantis/solar,CGenie/solar,CGenie/solar,openstack/solar,zen/solar,loles/solar,openstack/solar
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error - sl.update(task_uuid, item) + sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
Fix update of logitem bug in system_log
## Code Before: from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(task_uuid, item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data ## Instruction: Fix update of logitem bug in system_log ## Code After: from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
// ... existing code ... item.state = data.STATES.error sl.update(item) // ... rest of the code ...
b2e1fd5727eed1818d0ddc3c29a1cf9f7e38d024
wger/exercises/management/commands/submitted-exercises.py
wger/exercises/management/commands/submitted-exercises.py
from django.core.management.base import BaseCommand from wger.exercises.models import Exercise class Command(BaseCommand): ''' Read out the user submitted exercise. Used to generate the AUTHORS file for a release ''' help = 'Read out the user submitted exercise' def handle(self, *args, **options): exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED) usernames = [] for exercise in exercises: username = exercise.user.username if username not in usernames: usernames.append(username) self.stdout.write('{0}\n'.format(username))
from django.core.management.base import BaseCommand from wger.exercises.models import Exercise class Command(BaseCommand): ''' Read out the user submitted exercise. Used to generate the AUTHORS file for a release ''' help = 'Read out the user submitted exercise' def handle(self, *args, **options): exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED) usernames = [] for exercise in exercises: if exercise.user not in usernames: usernames.append(exercise.user) self.stdout.write(exercise.user)
Fix management command for submitted exercises
Fix management command for submitted exercises
Python
agpl-3.0
DeveloperMal/wger,kjagoo/wger_stark,rolandgeider/wger,rolandgeider/wger,DeveloperMal/wger,petervanderdoes/wger,kjagoo/wger_stark,wger-project/wger,wger-project/wger,petervanderdoes/wger,wger-project/wger,kjagoo/wger_stark,rolandgeider/wger,rolandgeider/wger,petervanderdoes/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,petervanderdoes/wger,kjagoo/wger_stark
from django.core.management.base import BaseCommand from wger.exercises.models import Exercise class Command(BaseCommand): ''' Read out the user submitted exercise. Used to generate the AUTHORS file for a release ''' help = 'Read out the user submitted exercise' def handle(self, *args, **options): exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED) usernames = [] for exercise in exercises: - username = exercise.user.username - if username not in usernames: + if exercise.user not in usernames: - usernames.append(username) + usernames.append(exercise.user) - self.stdout.write('{0}\n'.format(username)) + self.stdout.write(exercise.user)
Fix management command for submitted exercises
## Code Before: from django.core.management.base import BaseCommand from wger.exercises.models import Exercise class Command(BaseCommand): ''' Read out the user submitted exercise. Used to generate the AUTHORS file for a release ''' help = 'Read out the user submitted exercise' def handle(self, *args, **options): exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED) usernames = [] for exercise in exercises: username = exercise.user.username if username not in usernames: usernames.append(username) self.stdout.write('{0}\n'.format(username)) ## Instruction: Fix management command for submitted exercises ## Code After: from django.core.management.base import BaseCommand from wger.exercises.models import Exercise class Command(BaseCommand): ''' Read out the user submitted exercise. Used to generate the AUTHORS file for a release ''' help = 'Read out the user submitted exercise' def handle(self, *args, **options): exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED) usernames = [] for exercise in exercises: if exercise.user not in usernames: usernames.append(exercise.user) self.stdout.write(exercise.user)
// ... existing code ... for exercise in exercises: if exercise.user not in usernames: usernames.append(exercise.user) self.stdout.write(exercise.user) // ... rest of the code ...
df638a33d6f0812a22bb775fded2d1790bd1e409
router/config/settings.py
router/config/settings.py
import os import sys from salmon.server import SMTPReceiver, LMTPReceiver sys.path.append('..') os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings # where to listen for incoming messages if settings.SALMON_SERVER["type"] == "lmtp": receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"]) elif settings.SALMON_SERVER["type"] == "smtp": receiver = SMTPReceiver(settings.SALMON_SERVER['host'], settings.SALMON_SERVER['port'])
import os import sys from salmon.server import SMTPReceiver, LMTPReceiver sys.path.append('..') os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings import django django.setup() # where to listen for incoming messages if settings.SALMON_SERVER["type"] == "lmtp": receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"]) elif settings.SALMON_SERVER["type"] == "smtp": receiver = SMTPReceiver(settings.SALMON_SERVER['host'], settings.SALMON_SERVER['port'])
Call `django.setup()` in router app
Call `django.setup()` in router app This should have been there before, but somehow we managed to get away without it :) fixes #99
Python
agpl-3.0
Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen
import os import sys from salmon.server import SMTPReceiver, LMTPReceiver sys.path.append('..') os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings + import django + + django.setup() # where to listen for incoming messages if settings.SALMON_SERVER["type"] == "lmtp": receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"]) elif settings.SALMON_SERVER["type"] == "smtp": receiver = SMTPReceiver(settings.SALMON_SERVER['host'], settings.SALMON_SERVER['port'])
Call `django.setup()` in router app
## Code Before: import os import sys from salmon.server import SMTPReceiver, LMTPReceiver sys.path.append('..') os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings # where to listen for incoming messages if settings.SALMON_SERVER["type"] == "lmtp": receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"]) elif settings.SALMON_SERVER["type"] == "smtp": receiver = SMTPReceiver(settings.SALMON_SERVER['host'], settings.SALMON_SERVER['port']) ## Instruction: Call `django.setup()` in router app ## Code After: import os import sys from salmon.server import SMTPReceiver, LMTPReceiver sys.path.append('..') os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings import django django.setup() # where to listen for incoming messages if settings.SALMON_SERVER["type"] == "lmtp": receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"]) elif settings.SALMON_SERVER["type"] == "smtp": receiver = SMTPReceiver(settings.SALMON_SERVER['host'], settings.SALMON_SERVER['port'])
// ... existing code ... from django.conf import settings import django django.setup() // ... rest of the code ...
76b47fec3b24410f875db96b3404c47d4c3634cb
sheepdog_tables/__init__.py
sheepdog_tables/__init__.py
__version__ = '1.2.0' try: from django.conf import settings getattr(settings, 'dummy_attr', 'dummy_value') _LOAD_PACKAGES = True except: # Just running sdist, we think _LOAD_PACKAGES = False if _LOAD_PACKAGES: from mixins import (TablesMixin, EditTablesMixin, FilteredListView, CSVTableMixin) from column import ColumnURL, Column, DictColumn, FieldColumn from table import Table, EditTable
__version__ = '1.2.0' try: from django.conf import settings getattr(settings, 'dummy_attr', 'dummy_value') _LOAD_PACKAGES = True except: # Just running sdist, we think _LOAD_PACKAGES = False if _LOAD_PACKAGES: from mixins import TablesMixin, EditTablesMixin, FilteredListView from column import ColumnURL, Column, DictColumn, FieldColumn from table import Table, EditTable
Fix import error after removal of old csv table mixin
Fix import error after removal of old csv table mixin
Python
bsd-3-clause
SheepDogInc/sheepdog_tables,SheepDogInc/sheepdog_tables
__version__ = '1.2.0' try: from django.conf import settings getattr(settings, 'dummy_attr', 'dummy_value') _LOAD_PACKAGES = True except: # Just running sdist, we think _LOAD_PACKAGES = False if _LOAD_PACKAGES: - from mixins import (TablesMixin, EditTablesMixin, FilteredListView, + from mixins import TablesMixin, EditTablesMixin, FilteredListView - CSVTableMixin) from column import ColumnURL, Column, DictColumn, FieldColumn from table import Table, EditTable
Fix import error after removal of old csv table mixin
## Code Before: __version__ = '1.2.0' try: from django.conf import settings getattr(settings, 'dummy_attr', 'dummy_value') _LOAD_PACKAGES = True except: # Just running sdist, we think _LOAD_PACKAGES = False if _LOAD_PACKAGES: from mixins import (TablesMixin, EditTablesMixin, FilteredListView, CSVTableMixin) from column import ColumnURL, Column, DictColumn, FieldColumn from table import Table, EditTable ## Instruction: Fix import error after removal of old csv table mixin ## Code After: __version__ = '1.2.0' try: from django.conf import settings getattr(settings, 'dummy_attr', 'dummy_value') _LOAD_PACKAGES = True except: # Just running sdist, we think _LOAD_PACKAGES = False if _LOAD_PACKAGES: from mixins import TablesMixin, EditTablesMixin, FilteredListView from column import ColumnURL, Column, DictColumn, FieldColumn from table import Table, EditTable
... if _LOAD_PACKAGES: from mixins import TablesMixin, EditTablesMixin, FilteredListView from column import ColumnURL, Column, DictColumn, FieldColumn ...
a8fcd8c56db0ce862c6c0ac79fc58a9e65992f6e
onlineweb4/context_processors.py
onlineweb4/context_processors.py
from django.conf import settings from apps.feedback.models import FeedbackRelation def context_settings(request): context_extras = {} if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'): context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY if hasattr(settings, 'HOT_RELOAD'): context_extras['HOT_RELOAD'] = settings.HOT_RELOAD return context_extras def feedback_notifier(request): context_extras = {} context_extras['feedback_pending'] = [] if not request.user.is_authenticated(): return context_extras active_feedbacks = FeedbackRelation.objects.filter(active=True) for active_feedback in active_feedbacks: if active_feedback.content_object is None: continue # This method returns both bools and a list for some reason. Python crashes with the expression: x in bool, # so we do this to fetch once and test twice not_answered = active_feedback.not_answered() if not_answered == False or request.user not in not_answered: continue context_extras['feedback_pending'].append(active_feedback) return context_extras
from django.conf import settings from django.utils import timezone from apps.feedback.models import FeedbackRelation def context_settings(request): context_extras = {} if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'): context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY if hasattr(settings, 'HOT_RELOAD'): context_extras['HOT_RELOAD'] = settings.HOT_RELOAD return context_extras def feedback_notifier(request): context_extras = {} context_extras['feedback_pending'] = [] if not request.user.is_authenticated(): return context_extras active_feedbacks = FeedbackRelation.objects.filter(active=True) for active_feedback in active_feedbacks: if active_feedback.content_object is None: continue # Making sure we have an end data, and that the event is over # and that the feedback deadline is not passed (logic reused from apps.feedback.mommy) end_date = active_feedback.content_end_date() today_date = timezone.now().date() if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0: continue # This method returns both bools and a list for some reason. Python crashes with the expression: x in bool, # so we do this to fetch once and test twice not_answered = active_feedback.not_answered() if not_answered == False or request.user not in not_answered: continue context_extras['feedback_pending'].append(active_feedback) return context_extras
Add more constraints to active feedback schemas
Add more constraints to active feedback schemas
Python
mit
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
+ from django.conf import settings + from django.utils import timezone from apps.feedback.models import FeedbackRelation def context_settings(request): context_extras = {} if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'): context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY if hasattr(settings, 'HOT_RELOAD'): context_extras['HOT_RELOAD'] = settings.HOT_RELOAD return context_extras def feedback_notifier(request): context_extras = {} context_extras['feedback_pending'] = [] if not request.user.is_authenticated(): return context_extras active_feedbacks = FeedbackRelation.objects.filter(active=True) for active_feedback in active_feedbacks: if active_feedback.content_object is None: continue + # Making sure we have an end data, and that the event is over + # and that the feedback deadline is not passed (logic reused from apps.feedback.mommy) + end_date = active_feedback.content_end_date() + today_date = timezone.now().date() + if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0: + continue + # This method returns both bools and a list for some reason. Python crashes with the expression: x in bool, # so we do this to fetch once and test twice not_answered = active_feedback.not_answered() if not_answered == False or request.user not in not_answered: continue context_extras['feedback_pending'].append(active_feedback) return context_extras
Add more constraints to active feedback schemas
## Code Before: from django.conf import settings from apps.feedback.models import FeedbackRelation def context_settings(request): context_extras = {} if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'): context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY if hasattr(settings, 'HOT_RELOAD'): context_extras['HOT_RELOAD'] = settings.HOT_RELOAD return context_extras def feedback_notifier(request): context_extras = {} context_extras['feedback_pending'] = [] if not request.user.is_authenticated(): return context_extras active_feedbacks = FeedbackRelation.objects.filter(active=True) for active_feedback in active_feedbacks: if active_feedback.content_object is None: continue # This method returns both bools and a list for some reason. Python crashes with the expression: x in bool, # so we do this to fetch once and test twice not_answered = active_feedback.not_answered() if not_answered == False or request.user not in not_answered: continue context_extras['feedback_pending'].append(active_feedback) return context_extras ## Instruction: Add more constraints to active feedback schemas ## Code After: from django.conf import settings from django.utils import timezone from apps.feedback.models import FeedbackRelation def context_settings(request): context_extras = {} if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'): context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY if hasattr(settings, 'HOT_RELOAD'): context_extras['HOT_RELOAD'] = settings.HOT_RELOAD return context_extras def feedback_notifier(request): context_extras = {} context_extras['feedback_pending'] = [] if not request.user.is_authenticated(): return context_extras active_feedbacks = FeedbackRelation.objects.filter(active=True) for active_feedback in active_feedbacks: if active_feedback.content_object is None: continue # Making sure we have an end data, and that the event is over # and that the feedback deadline is not passed (logic reused from apps.feedback.mommy) end_date = active_feedback.content_end_date() today_date = timezone.now().date() if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0: continue # This method returns both bools and a list for some reason. Python crashes with the expression: x in bool, # so we do this to fetch once and test twice not_answered = active_feedback.not_answered() if not_answered == False or request.user not in not_answered: continue context_extras['feedback_pending'].append(active_feedback) return context_extras
... from django.conf import settings from django.utils import timezone ... # Making sure we have an end data, and that the event is over # and that the feedback deadline is not passed (logic reused from apps.feedback.mommy) end_date = active_feedback.content_end_date() today_date = timezone.now().date() if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0: continue # This method returns both bools and a list for some reason. Python crashes with the expression: x in bool, ...