hexsha
stringlengths
40
40
size
int64
4
1.02M
ext
stringclasses
8 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
209
max_stars_repo_name
stringlengths
5
121
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
209
max_issues_repo_name
stringlengths
5
121
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
209
max_forks_repo_name
stringlengths
5
121
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
4
1.02M
avg_line_length
float64
1.07
66.1k
max_line_length
int64
4
266k
alphanum_fraction
float64
0.01
1
c02b45a8f695333458665968e09c2c1f509da5d6
791
py
Python
tests/util4tests.py
vliz-be-opsci/py-xmlasdict
783ab9ecbce47650800f3b9fa4d85c433792670d
[ "MIT" ]
null
null
null
tests/util4tests.py
vliz-be-opsci/py-xmlasdict
783ab9ecbce47650800f3b9fa4d85c433792670d
[ "MIT" ]
5
2022-02-16T10:11:09.000Z
2022-03-31T15:28:44.000Z
tests/util4tests.py
vliz-be-opsci/py-xmlasdict
783ab9ecbce47650800f3b9fa4d85c433792670d
[ "MIT" ]
1
2022-02-14T10:38:58.000Z
2022-02-14T10:38:58.000Z
import logging import logging.config import os import sys import pytest import yaml from dotenv import load_dotenv log = logging.getLogger('tests') def enable_test_logging(): load_dotenv() if 'PYTEST_LOGCONF' in os.environ: logconf = os.environ['PYTEST_LOGCONF'] with open(logconf, 'r') as yml_logconf: logging.config.dictConfig(yaml.load(yml_logconf, Loader=yaml.SafeLoader)) log.info(f"Logging enabled according to config in {logconf}") def run_single_test(testfile): enable_test_logging() log.info( f"Running tests in {testfile} " + "with -v(erbose) and -s(no stdout capturing) " + "and logging to stdout, level controlled by env var ${PYTEST_LOGCONF}") sys.exit(pytest.main(["-vv", "-s", testfile]))
27.275862
85
0.683944
1c0832d0296309956fc0b1ed09c32b7f92ace24e
556
py
Python
Instructions.py
TomDouris/solitaire
255ad0f7b0959479aad98c55817457cd1c7037ea
[ "MIT" ]
null
null
null
Instructions.py
TomDouris/solitaire
255ad0f7b0959479aad98c55817457cd1c7037ea
[ "MIT" ]
null
null
null
Instructions.py
TomDouris/solitaire
255ad0f7b0959479aad98c55817457cd1c7037ea
[ "MIT" ]
null
null
null
# Instructions.py import pygame import constants from Location import Location class Instructions: def __init__(self, instructions): self.instructions = instructions def draw(self, screen): font = pygame.font.SysFont(constants.FONT_ARIAL, int(round(constants.CELL_WIDTH*1/2)), False, False) for i, instruction in enumerate(self.instructions): text = font.render(instruction, True, constants.BLACK) screen.blit(text, [constants.FRAME_MAX_X/25, constants.FRAME_MAX_Y*20/25 + constants.FRAME_MAX_Y*1/25*i])
30.888889
114
0.726619
1318b50c046c157f87b40e2c01b999abe49874f4
16,278
py
Python
pytweet/paginations.py
TheFarGG/PyTweet
d8dbade5957fc6aba184806ffd44a9b431bd324a
[ "MIT" ]
614
2021-10-30T04:27:34.000Z
2021-11-11T22:16:00.000Z
pytweet/paginations.py
PyTweet/PyTweet
d8dbade5957fc6aba184806ffd44a9b431bd324a
[ "MIT" ]
14
2021-11-17T07:29:19.000Z
2022-03-29T08:48:23.000Z
pytweet/paginations.py
PyTweet/PyTweet
d8dbade5957fc6aba184806ffd44a9b431bd324a
[ "MIT" ]
16
2021-11-13T17:20:31.000Z
2022-02-21T18:57:24.000Z
from __future__ import annotations from typing import Any, List, Tuple, Optional, TYPE_CHECKING from .errors import NoPageAvailable if TYPE_CHECKING: from .http import HTTPClient from .type import Payload class Pagination: """Represents the base class of all pagination objects. .. versionadded:: 1.5.0 """ __slots__ = ( "__original_payload", "_payload", "_meta", "_next_token", "_previous_token", "_count", "_paginate_over", "_current_page_number", "_params", "item_type", "endpoint_request", "http_client", "pages_cache", ) def __init__( self, data: Payload, *, item_type: Any, endpoint_request: str, http_client: HTTPClient, **kwargs: Any, ): self.__original_payload = data self._payload = self.__original_payload.get("data") self._meta = self.__original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 self._paginate_over = 0 self._current_page_number = 1 self._params = kwargs.get("params", None) self.item_type = item_type self.endpoint_request = endpoint_request self.http_client = http_client self.pages_cache = {1: {obj.id: obj for obj in self.content}} @property def original_payload(self): return self.__original_payload @original_payload.setter def original_payload(self, other: dict): self.__original_payload = other return self.original_payload @property def payload(self): return self._payload @payload.setter def payload(self, other: dict): self._payload = other return self._payload @property def content(self) -> list: """:class:`list`: Returns a list of objects from the current page's content. .. versionadded:: 1.5.0 """ return [self.item_type(data, http_client=self.http_client) for data in self.payload] @property def paginate_over(self) -> int: """:class:`int`: Returns how many times you change page over the pagination. .. versionadded:: 1.5.0 """ return self._paginate_over @property def current_page_number(self) -> int: """:class:`int`: Returns the current page number. .. versionadded:: 1.5.0 """ return self._current_page_number @property def pages(self) -> List[Tuple[int, list]]: """List[Tuple[:class:`int`, :class:`list`]]: Returns the zipped pages with the page number and content from a cache. If you never been into the page you want, it might not be returns in this property. example to use: .. code-block:: py for page_number, page_content in pagination.pages: ... #do something .. versionadded:: 1.5.0 """ fulldata = [] for page_number in self.pages_cache.keys(): fulldata.append(list(self.pages_cache.get(page_number).values())) return zip(range(1, len(self.pages_cache) + 1), fulldata) def get_page_content(self, page_number: int) -> Optional[list]: """Gets the page `content` from the pagination pages cache. If you never been into the page you want, it might not be returns. .. note:: Note that, if the page_number is 0 it automatically would returns None. Specify number 1 or above. Returns --------- Optional[:class:`list`] This method returns a list of objects. .. versionadded:: 1.5.0 """ content = self.pages_cache.get(page_number) if not content: return None return list(content.values()) def next_page(self): raise NotImplementedError def previous_page(self): raise NotImplementedError class UserPagination(Pagination): """Represents a pagination that handles users object. This inherits :class:`Pagination`. These following methods return this object: * :meth:`User.fetch_following` * :meth:`User.fetch_followers` * :meth:`User.fetch_muters` * :meth:`User.fetch_blockers` * :meth:`Tweet.fetch_likers` * :meth:`Tweet.fetch_retweeters` * :meth:`List.fetch_members` .. versionadded:: 1.5.0 """ def __init__(self, data, **kwargs): from .user import User # Avoid circular import error. super().__init__(data, item_type=User, **kwargs) def next_page(self): """Change `content` property to the next page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._next_token: raise NoPageAvailable() self._params["pagination_token"] = self._next_token res = self.http_client.request( "GET", "2", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number += 1 self.original_payload = res self.payload = self.original_payload.get("data") self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = {user.id: user for user in self.content} def previous_page(self): """Change `content` property to the previous page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._previous_token: raise NoPageAvailable() self._params["pagination_token"] = self._previous_token res = self.http_client.request( "GET", "2", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number -= 1 self.original_payload = res self.payload = self.original_payload.get("data") self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = {user.id: user for user in self.content} class TweetPagination(Pagination): """Represents a pagination that handles tweets object. This inherits :class:`Pagination`. These following methods return this object: * meth:`User.fetch_timelines` * meth:`User.fetch_liked_tweets` * meth:`List.fetch_tweets` .. versionadded:: 1.5.0 """ def __init__(self, data, **kwargs): from .tweet import Tweet # Avoid circular import error. super().__init__(data, item_type=Tweet, **kwargs) @property def content(self) -> list: """:class:`list`: Returns a list of objects from the current page's content. .. versionadded:: 1.5.0 """ return [ self.item_type(data, http_client=self.http_client) for data in self.http_client.payload_parser.insert_pagination_object_author(self.original_payload) ] def next_page(self): """Change `content` property to the next page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._next_token: raise NoPageAvailable() self._params["pagination_token"] = self._next_token res = self.http_client.request( "GET", "2", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number += 1 self.original_payload = res self.payload = self.content self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = {tweet.id: tweet for tweet in self.content} def previous_page(self): """Change `content` property to the previous page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._previous_token: raise NoPageAvailable() self._params["pagination_token"] = self._previous_token res = self.http_client.request( "GET", "2", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number -= 1 self.original_payload = res self.payload = self.content self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = {tweet.id: tweet for tweet in self.content} class ListPagination(Pagination): """Represents a pagination that handles list objects. This inherits :class:`Pagination`. These following methods return this object: * :meth:`User.fetch_lists` * :meth:`User.fetch_list_memberships` .. versionadded:: 1.5.0 """ def __init__(self, data, **kwargs): from .list import List as TwitterList # Avoid circular import error super().__init__(data, item_type=TwitterList, **kwargs) @property def content(self) -> list: """:class:`list`: Returns a list of objects from the current page's content. .. versionadded:: 1.5.0 """ return [ self.item_type(data, http_client=self.http_client) for data in self.http_client.payload_parser.insert_pagination_object_author(self.original_payload) ] def next_page(self): """Change `content` property to the next page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._next_token: raise NoPageAvailable() self._params["pagination_token"] = self._next_token res = self.http_client.request( "GET", "2", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number += 1 self.original_payload = res self.payload = self.content self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = { _TwitterList.id: _TwitterList for _TwitterList in self.content } def previous_page(self): """Change `content` property to the previous page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._previous_token: raise NoPageAvailable() self._params["pagination_token"] = self._previous_token res = self.http_client.request( "GET", "2", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number -= 1 self.original_payload = res self.payload = self.content self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = { _TwitterList.id: _TwitterList for _TwitterList in self.content } class MessagePagination(Pagination): """ "Represents a pagination for message objects. These methods returns this pagination object: * :meth:`Client.fetch_message_history` .. versionadded:: 1.5.0 """ def __init__(self, data, **kwargs): from .message import DirectMessage # Avoid circular import error. data = kwargs.get("http_client").payload_parser.parse_message_to_pagination_data(data) super().__init__(data, item_type=DirectMessage, **kwargs) def next_page(self): """Change `content` property to the next page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._next_token: raise NoPageAvailable() self._params["cursor"] = self._next_token res = self.http_client.request( "GET", "1.1", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number += 1 self.original_payload = self.http_client.payload_parser.parse_message_to_pagination_data(res) self.payload = self.content self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = {message.id: message for message in self.content} def previous_page(self): """Change `content` property to the previous page's contents.. Raises -------- :class:`NoPageAvailable` Raises when you reached the end of the pagination. .. versionadded:: 1.5.0 """ if not self._previous_token: raise NoPageAvailable() self._params["cursor"] = self._previous_token res = self.http_client.request( "GET", "1.1", self.endpoint_request, auth=True, params=self._params, ) if not res: raise NoPageAvailable() previous_content = self.content self._current_page_number -= 1 self.original_payload = self.http_client.payload_parser.parse_message_to_pagination_data(res) self.payload = self.content self._meta = self.original_payload.get("meta") self._next_token = self._meta.get("next_token") self._previous_token = self._meta.get("previous_token") self._count = 0 if not previous_content[0] == self.content[0]: self.pages_cache[len(self.pages_cache) + 1] = {message.id: message for message in self.content}
30.426168
224
0.602224
853f85e5abff2e69a3d31d1e2d1ccc8806ced4b2
623
py
Python
je_auto_control/utils/timeout/multiprocess_timeout.py
JE-Chen/Python_JEAutoControl
477bf9612e28e9ab6d0a8e269db2f699e50a3744
[ "MIT" ]
9
2020-10-12T06:33:36.000Z
2021-09-13T07:07:36.000Z
je_auto_control/utils/timeout/multiprocess_timeout.py
JE-Chen/Python_JEAutoControl
477bf9612e28e9ab6d0a8e269db2f699e50a3744
[ "MIT" ]
2
2021-11-19T13:45:37.000Z
2021-12-03T12:25:28.000Z
je_auto_control/utils/timeout/multiprocess_timeout.py
JE-Chen/Python_JEAutoControl
477bf9612e28e9ab6d0a8e269db2f699e50a3744
[ "MIT" ]
null
null
null
from multiprocessing import Process from je_auto_control.utils.exception.exceptions import AutoControlTimeoutException from je_auto_control.utils.exception.exception_tag import timeout_need_on_main_error def multiprocess_timeout(check_function, time: int): try: new_process = Process(target=check_function) new_process.start() new_process.join(timeout=time) except AutoControlTimeoutException: raise AutoControlTimeoutException(timeout_need_on_main_error) new_process.terminate() if new_process.exitcode is None: return "timeout" else: return "success"
34.611111
84
0.772071
8b9b1d6d5c3c64f635f4fcdf7f528eff4ce0daea
460
py
Python
plugins/__init__.py
Jesus-E-Rodriguez/DiscordRedditPoster
fede2328a1bc488a90c637dbbc7d20dc7f9404b9
[ "MIT" ]
null
null
null
plugins/__init__.py
Jesus-E-Rodriguez/DiscordRedditPoster
fede2328a1bc488a90c637dbbc7d20dc7f9404b9
[ "MIT" ]
null
null
null
plugins/__init__.py
Jesus-E-Rodriguez/DiscordRedditPoster
fede2328a1bc488a90c637dbbc7d20dc7f9404b9
[ "MIT" ]
null
null
null
import importlib import inspect import os from discord.ext import commands Cogs = [] for module in os.listdir(os.path.dirname(os.path.abspath(__file__))): if module == "__init__.py" or module[-3:] != ".py": continue mdl = importlib.import_module(f"plugins.{module[:-3]}") Cogs.extend( obj for name, obj in inspect.getmembers(object=mdl, predicate=inspect.isclass) if issubclass(obj, commands.Cog) ) del module
25.555556
82
0.669565
59eb86e4f8d54b89b350c5585da089f166336173
3,261
py
Python
ODE2.py
niktryf/RungeKutta_2ndOrder
dbb6af34fbaee08ca348a8273b72c158bb2db93f
[ "MIT" ]
1
2019-02-12T22:18:26.000Z
2019-02-12T22:18:26.000Z
ODE2.py
niktryf/Python_RungeKutta_2ndOrder
dbb6af34fbaee08ca348a8273b72c158bb2db93f
[ "MIT" ]
null
null
null
ODE2.py
niktryf/Python_RungeKutta_2ndOrder
dbb6af34fbaee08ca348a8273b72c158bb2db93f
[ "MIT" ]
null
null
null
############################################################################## ### Python 2nd Order ODE solver, using the Runge-Kutta 4th order method ### Solves ordinary differential equations of the form ### ### (d^2)x/(dt^2) = f(t, x, v) ### ### As usual, the 2nd order ODE is split into two coupled 1st order ODEs, ### which are solved with the Runge-Kutta function: ### ### | x'(t) = v(t) ### x''(t) = f(t, x, v) -> | ### | v'(t) = f(t, x, v) ### ### User must input the following parameters: ### 1. Via command line arguments: time step, total time, output interval. ### 2. The right-hand side of the differential equation ### inside the rhs function, in the rungekutta2.py file. ### 3. Initial conditions t_0, x_0, v_0 (example: x(t_0) = x_0, v(t_0) = v_0), ### at the beginning of the code, right below the imports. ### ### The force can be specified inside the "force" function, in rungekutta2.py. ### ### Example: python ODE2.py 0.1 100 10 ### This will solve the ODE with a timestep of dt = 0.1, for 100 time units, ### writing output every 10 time steps. ### ### Output: File "RK4_2_output.txt", also plots results. ### ### Author: Nikos Tryfonidis, November 2015 ### The MIT License (MIT) ### Copyright (c) 2015 Nikos Tryfonidis ### See LICENSE.txt ############################################################################## # Import necessary packages import numpy as np import sys # Import functions from rungekutta2 import RK4_2nd from plot1D import plot1D ##### Set Initial Conditions Here: ##### t_0 = 0.0 x_0 = 0.0 v_0 = 2.0 ######################################## ################################################################################# ### Command line arguments: ################################################################################# # Check number of command line arguments if len(sys.argv) != 4: print "Usage: python ODE2.py <timestep> <total time> <output interval>" print "Please run again following the command line input format above." print "Exiting..." sys.exit(1) # Get command line arguments h = float(sys.argv[1]) totalTime = float(sys.argv[2]) interval = int(sys.argv[3]) ################################################################################# # Calculate number of time steps totalSteps = totalTime/h # Find size of output size = int(totalSteps/interval) + 1 # Print a summary of parameters print "Running Runge-Kutta with the following parameters:" print "Time step: %f\tTotal time: %f\t" %(h, totalTime) print "Output interval: %d" %interval # Allocate variable arrays and apply given initial conditions t = np.linspace(t_0, t_0 + totalTime, size) x = np.zeros(size) x[0] = x_0 v = np.zeros(size) v[0] = v_0 # Call Runge Kutta tOld = t[0] xOld = x[0] vOld = v[0] for i in xrange(1, size): for j in xrange(0, interval): tOld += h # Unnecessary, but to keep generality xOld, vOld = RK4_2nd(tOld, xOld, vOld, h) x[i] = xOld v[i] = vOld # Save t, x and v into output file (each array is a column) np.savetxt('RK4_2_output.txt', np.c_[t, x, v], fmt='%.10f') # Plot t, x (plot1d function in plot1D.py) plot1D(size, "RK4_2_output.txt")
31.057143
81
0.560564
d68e8d560c8159d5ea982b775a79bd87d1bb1030
260
py
Python
ticktick/managers/pomo.py
prnake/ticktick-py
33f2131deca65dc322f0c6a8447c50122fa9006b
[ "MIT" ]
null
null
null
ticktick/managers/pomo.py
prnake/ticktick-py
33f2131deca65dc322f0c6a8447c50122fa9006b
[ "MIT" ]
null
null
null
ticktick/managers/pomo.py
prnake/ticktick-py
33f2131deca65dc322f0c6a8447c50122fa9006b
[ "MIT" ]
null
null
null
class PomoManager: def __init__(self, client_class): self._client = client_class self.access_token = '' def start(self): pass def statistics(self): # https://api.dida365.com/api/v2/statistics/general pass
20
59
0.611538
866e11d79fe6747354a3caa3492f636422cd3f9e
624
py
Python
couchbase/analytics.py
griels/couchbase-python-client-ng
bcda55109f82e41041cf727d604bb335546f64e4
[ "Apache-2.0" ]
1
2019-10-01T19:06:29.000Z
2019-10-01T19:06:29.000Z
couchbase/analytics.py
pauldx/couchbase-python-client
98bdd44604675f7ad844b39f72e754dec6445cbb
[ "Apache-2.0" ]
null
null
null
couchbase/analytics.py
pauldx/couchbase-python-client
98bdd44604675f7ad844b39f72e754dec6445cbb
[ "Apache-2.0" ]
null
null
null
from .n1ql import * from couchbase_core.n1ql import N1QLRequest class AnalyticsResult(QueryResult): def client_context_id(self): return super(AnalyticsResult, self).client_context_id() def signature(self): return super(AnalyticsResult, self).signature() def warnings(self): return super(AnalyticsResult, self).warnings() def request_id(self): return super(AnalyticsResult, self).request_id() def __init__(self, parent # type: N1QLRequest ): super(AnalyticsResult, self).__init__(parent) self._params=parent._params
27.130435
63
0.669872
dfb816afd4701b6a0dcbfd18d8b4f557cf035703
16,281
py
Python
pycwr/io/CCFile.py
zhaopingsun/pycwr
7459371588e6d0d6d0737e249afa3921fe073151
[ "MIT" ]
4
2019-12-24T06:07:59.000Z
2020-10-13T02:24:18.000Z
pycwr/io/CCFile.py
zhaopingsun/pycwr
7459371588e6d0d6d0737e249afa3921fe073151
[ "MIT" ]
null
null
null
pycwr/io/CCFile.py
zhaopingsun/pycwr
7459371588e6d0d6d0737e249afa3921fe073151
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import numpy as np from .BaseDataProtocol.CCProtocol import dtype_cc from .util import _prepare_for_read, _unpack_from_buf, get_radar_info, make_time_unit_str, get_radar_sitename import datetime import pandas as pd from ..core.NRadar import PRD from ..configure.pyart_config import get_metadata, get_fillvalue from ..configure.default_config import CINRAD_field_mapping, _LIGHT_SPEED from ..core.PyartRadar import Radar from netCDF4 import date2num class CCBaseData(object): """ 解码CC/CCJ的数据格式 """ def __init__(self, filename): super(CCBaseData, self).__init__() self.filename = filename self.fid = _prepare_for_read(self.filename) ##判断是否是压缩文件 # print(len(self.fid.read())) buf_header = self.fid.read(dtype_cc.BaseDataHeaderSize) ##取出header的buf self.header = self._parse_BaseDataHeader(buf_header) self._check_cc_basedata() self.fid.seek(dtype_cc.BaseDataHeaderSize, 0) ##移动到径向数据的位置 self.radial = self._parse_radial() def _check_cc_basedata(self): """检查雷达数据是否完整""" buf_radial_data = self.fid.read() assert len(buf_radial_data) == self.nrays * dtype_cc.PerRadialSize, "CC basedata size has problems!" return def _parse_BaseDataHeader(self, buf_header): """ :param buf_header: 只包含头文件的buf :return: """ BaseDataHeader_dict = {} ##解码第一部分观测参数 BaseDataHeader_dict['ObsParam1'], _ = _unpack_from_buf(buf_header, \ dtype_cc.HeaderSize1_pos, dtype_cc.BaseDataHeader['RadarHeader1']) ##解码不同仰角的观测参数 assert BaseDataHeader_dict['ObsParam1']['ucScanMode'] > 100, "only vol support!" self.nsweeps = BaseDataHeader_dict['ObsParam1']['ucScanMode'] - 100 BaseDataHeader_dict['CutConfig'] = np.frombuffer(buf_header, \ dtype_cc.BaseDataHeader['CutConfigX30'], count=self.nsweeps, offset=dtype_cc.CutSize_pos) ##解码第二部分观测参数 BaseDataHeader_dict['ObsParam2'], _ = _unpack_from_buf(buf_header, \ dtype_cc.HeaderSize2_pos, dtype_cc.BaseDataHeader['RadarHeader2']) self.nrays = np.sum(BaseDataHeader_dict['CutConfig']['usRecordNumber']) self.sweep_end_ray_index_add1 = np.cumsum(BaseDataHeader_dict['CutConfig']['usRecordNumber']) ##python格式的结束 self.sweep_start_ray_index = self.sweep_end_ray_index_add1 - BaseDataHeader_dict['CutConfig']['usRecordNumber'] return BaseDataHeader_dict def _parse_radial_single(self, buf_radial, radialnumber): """解析径向的数据""" Radial = {} RadialData = np.frombuffer(buf_radial, dtype_cc.RadialData(radialnumber)) Radial['fields'] = {} Radial['fields']['dBZ'] = np.where(RadialData['dBZ'] != -32768, RadialData['dBZ'] / 10., np.nan).astype(np.float32) Radial['fields']['V'] = np.where(RadialData['V'] != -32768, RadialData['V'] / 10., np.nan).astype(np.float32) Radial['fields']['W'] = np.where(RadialData['W'] != -32768, RadialData['W'] / 10., np.nan).astype(np.float32) return Radial def _parse_radial(self): radial = [] for isweep in range(self.nsweeps): radialnumber = self.header['CutConfig']['usBinNumber'][isweep] for _ in range(self.header['CutConfig']['usRecordNumber'][isweep]): buf_radial = self.fid.read(dtype_cc.PerRadialSize) radial.append(self._parse_radial_single(buf_radial, radialnumber)) return radial def get_nyquist_velocity(self): """get nyquist vel per ray 获取每根径向的不模糊速度 :return:(nRays) """ nyquist_velocity = np.concatenate([np.array([self.header['CutConfig']['usMaxV'][isweep] / 100.] \ * self.header['CutConfig']['usRecordNumber'][isweep]) for \ isweep in range(self.nsweeps)]) return nyquist_velocity.astype(np.float32) def get_unambiguous_range(self): """ 获取每根径向的不模糊距离 :return:(nRays) """ return np.concatenate([np.array([self.header['CutConfig']['usMaxL'][isweep] * 10. \ ] * self.header['CutConfig']['usRecordNumber'][isweep]) for \ isweep in range(self.nsweeps)]) def get_scan_time(self): """ 获取每根径向的扫描时间 :return:(nRays) """ params = self.header['ObsParam1'] start_year = params['ucSYear1'] * 100 + params['ucSYear2'] end_year = params['ucEYear1'] * 100 + params['ucEYear2'] start_time = datetime.datetime(year=start_year, month=params['ucSMonth'], day=params['ucSDay'], hour=params['ucSHour'], minute=params['ucSMinute'], second=params['ucSSecond']) end_time = datetime.datetime(year=end_year, month=params['ucEMonth'], day=params['ucEDay'], hour=params['ucEHour'], minute=params['ucEMinute'], second=params['ucESecond']) return pd.date_range(start_time, end_time, periods=self.nrays).to_pydatetime() def get_sweep_end_ray_index(self): """ 获取每个sweep的结束的index,包含在内 :return:(nsweep) """ return self.sweep_end_ray_index_add1 - 1 def get_sweep_start_ray_index(self): """ 获取每个sweep的开始的index :return:(nsweep) """ return self.sweep_start_ray_index def get_rays_per_sweep(self): """ 获取每个sweep的径向数 :return:(nsweep) """ return (self.header['CutConfig']['usRecordNumber']).astype(np.int32) def get_azimuth(self): """ 获取每根径向的方位角 :return:(nRays) """ return np.concatenate([np.linspace(0, 360, self.header['CutConfig']['usRecordNumber'][isweep]) \ for isweep in range(self.nsweeps)], axis=0) def get_elevation(self): """ 获取每根径向的仰角 :return: (nRays) """ return np.concatenate([np.array([self.header['CutConfig']['usAngle'][isweep] / 100. \ ] * self.header['CutConfig']['usRecordNumber'][isweep]) for \ isweep in range(self.nsweeps)]) def get_latitude_longitude_altitude_frequency(self): """ 获取经纬度高度,雷达频率 :return:lat, lon, alt, frequency """ return self.header['ObsParam1']['lLatitudeValue'] / 3600000., self.header['ObsParam1'][ 'lLongitudeValue'] / 3600000., \ self.header['ObsParam1']['lHeight'] / 1000., 3 * 10 ** 5 / self.header['ObsParam2']['lWavelength'] def get_scan_type(self): """ 获取扫描的类型 :return: """ ## only ppi support! return "ppi" def get_sitename(self): return get_radar_sitename(self.filename) class CC2NRadar(object): """到NusitRadar object 的桥梁""" def __init__(self, CC): self.CC = CC self.radial = self.CC.radial self.azimuth = self.get_azimuth() self.elevation = self.get_elevation() self.sweep_start_ray_index = self.get_sweep_start_ray_index() self.sweep_end_ray_index = self.get_sweep_end_ray_index() self.nrays = self.CC.nrays self.nsweeps = self.CC.nsweeps self.scan_type = self.CC.get_scan_type() self.latitude, self.longitude, self.altitude, self.frequency = \ self.CC.get_latitude_longitude_altitude_frequency() self.bins_per_sweep = self.get_nbins_per_sweep() self.max_bins = self.bins_per_sweep.max() self.range = self.get_range_per_radial(self.max_bins) self.fields = self._get_fields() self.sitename = self.CC.get_sitename() def get_azimuth(self): """ 获取每根径向的方位角 :return:(nRays) """ return self.CC.get_azimuth() def get_elevation(self): """ 获取每根径向的仰角 :return: (nRays) """ return self.CC.get_elevation() def get_rays_per_sweep(self): """ 获取每个sweep的径向数 :return:(nsweep) """ return (self.CC.header['CutConfig']['usRecordNumber']).astype(int) def get_scan_time(self): """ 获取每根径向的扫描时间 :return:(nRays) """ return self.CC.get_scan_time() def get_nyquist_velocity(self): """get nyquist vel per ray 获取每根径向的不模糊速度 :return:(nRays) """ return self.CC.get_nyquist_velocity() def get_unambiguous_range(self): """ 获取每根径向的不模糊距离 :return:(nRays) """ return self.CC.get_unambiguous_range() def get_sweep_end_ray_index(self): """ 获取每个sweep的结束的index,包含在内 :return:(nsweep) """ return self.CC.sweep_end_ray_index_add1 - 1 def get_sweep_start_ray_index(self): """ 获取每个sweep的开始的index :return:(nsweep) """ return self.CC.sweep_start_ray_index def get_nbins_per_sweep(self): """ 确定每个sweep V探测的库数 :return: """ return (self.CC.header['CutConfig']['usBinNumber']).astype(int) def get_range_per_radial(self, length): """ 确定径向每个库的距离 :param length: :return: """ Resolution = self.CC.header['CutConfig']['usBindWidth'][0] * 2 return np.linspace(Resolution, Resolution * length, length) def _get_fields(self): """将所有的field的数据提取出来""" fields = {} field_keys = self.radial[0]['fields'].keys() for ikey in field_keys: fields[ikey] = np.array([(iray['fields'][ikey]).ravel() for iray in self.radial]) return fields def get_NRadar_nyquist_speed(self): """array shape (nsweeps)""" return self.CC.header['CutConfig']['usMaxV'] / 100. def get_NRadar_unambiguous_range(self): """array shape (nsweeps)""" return self.CC.header['CutConfig']['usMaxL'] * 10. def get_fixed_angle(self): return self.CC.header['CutConfig']['usAngle'] / 100. def ToPRD(self): """将WSR98D数据转为PRD 的数据格式""" return PRD(fields=self.fields, scan_type=self.scan_type, time=self.get_scan_time(), \ range=self.range, azimuth=self.azimuth, elevation=self.elevation, latitude=self.latitude, \ longitude=self.longitude, altitude=self.altitude, sweep_start_ray_index=self.sweep_start_ray_index, \ sweep_end_ray_index=self.sweep_end_ray_index, fixed_angle=self.get_fixed_angle(), \ bins_per_sweep=self.bins_per_sweep, nyquist_velocity=self.get_NRadar_nyquist_speed(), \ frequency=self.frequency, unambiguous_range=self.get_NRadar_unambiguous_range(), \ nrays=self.nrays, nsweeps=self.nsweeps, sitename = self.sitename) def ToPyartRadar(self): dts = self.get_scan_time() units = make_time_unit_str(min(dts)) time = get_metadata('time') time['units'] = units time['data'] = date2num(dts, units).astype('float32') # range _range = get_metadata('range') # assume that the number of gates and spacing from the first ray is # representative of the entire volume _range['data'] = self.range _range['meters_to_center_of_first_gate'] = self.CC.header['CutConfig']['usBindWidth'][0] * 2 _range['meters_between_gates'] = self.CC.header['CutConfig']['usBindWidth'][0] * 2 latitude = get_metadata('latitude') longitude = get_metadata('longitude') altitude = get_metadata('altitude') latitude['data'] = np.array([self.latitude], dtype='float64') longitude['data'] = np.array([self.longitude], dtype='float64') altitude['data'] = np.array([self.altitude], dtype='float64') metadata = get_metadata('metadata') metadata['original_container'] = 'CINRAD/CC' metadata['site_name'] = self.sitename metadata['radar_name'] = "CINRAD/CC" sweep_start_ray_index = get_metadata('sweep_start_ray_index') sweep_end_ray_index = get_metadata('sweep_end_ray_index') sweep_start_ray_index['data'] = self.sweep_start_ray_index sweep_end_ray_index['data'] = self.sweep_end_ray_index sweep_number = get_metadata('sweep_number') sweep_number['data'] = np.arange(self.nsweeps, dtype='int32') scan_type = self.scan_type sweep_mode = get_metadata('sweep_mode') if self.scan_type == "ppi": sweep_mode['data'] = np.array(self.nsweeps * ['azimuth_surveillance'], dtype='S') elif self.scan_type == "rhi": sweep_mode['data'] = np.array(self.nsweeps * ['rhi'], dtype='S') else: sweep_mode['data'] = np.array(self.nsweeps * ['sector'], dtype='S') # elevation elevation = get_metadata('elevation') elevation['data'] = self.elevation # azimuth azimuth = get_metadata('azimuth') azimuth['data'] = self.azimuth # fixed_angle fixed_angle = get_metadata('fixed_angle') fixed_angle['data'] = self.get_fixed_angle() # instrument_parameters instrument_parameters = self._get_instrument_parameters() # fields fields = {} for field_name_abbr in self.fields.keys(): field_name = CINRAD_field_mapping[field_name_abbr] if field_name is None: continue field_dic = get_metadata(field_name) field_dic['data'] = np.ma.masked_array(self.fields[field_name_abbr],\ mask=np.isnan(self.fields[field_name_abbr]), fill_value=get_fillvalue()) field_dic['_FillValue'] = get_fillvalue() fields[field_name] = field_dic return Radar(time, _range, fields, metadata, scan_type, latitude, longitude, altitude, sweep_number, sweep_mode, fixed_angle, sweep_start_ray_index, sweep_end_ray_index, azimuth, elevation, instrument_parameters=instrument_parameters) def _get_instrument_parameters(self): """ Return a dictionary containing instrument parameters. """ # pulse width pulse_width = get_metadata('pulse_width') pulse_width['data'] = self.CC.header['CutConfig']['usBindWidth'][0] * 2. / _LIGHT_SPEED # m->sec # assume that the parameters in the first ray represent the beam widths, # bandwidth and frequency in the entire volume wavelength_hz = self.frequency * 10 ** 9 # radar_beam_width_h radar_beam_width_h = get_metadata('radar_beam_width_h') radar_beam_width_h['data'] = np.array([0.703125, ], dtype='float32') # radar_beam_width_v radar_beam_width_v = get_metadata('radar_beam_width_v') radar_beam_width_v['data'] = np.array([0.703125, ], dtype='float32') # frequency frequency = get_metadata('frequency') frequency['data'] = np.array([wavelength_hz], dtype='float32') instrument_parameters = { 'pulse_width': pulse_width, 'radar_beam_width_h': radar_beam_width_h, 'radar_beam_width_v': radar_beam_width_v, 'frequency': frequency, } # nyquist velocity if defined nyquist_velocity = get_metadata('nyquist_velocity') nyquist_velocity['data'] = self.get_nyquist_velocity() instrument_parameters['nyquist_velocity'] = nyquist_velocity return instrument_parameters
39.904412
119
0.592531
d191db9d3897d5a9d6134d100d3719b90c1e9852
2,624
py
Python
examples/async/client_batch.py
brubbel/Pyro4
791f5aff6e0c89e74264843defdd694cdaf99cc5
[ "MIT" ]
638
2015-01-04T14:59:55.000Z
2022-03-29T02:28:39.000Z
examples/async/client_batch.py
brubbel/Pyro4
791f5aff6e0c89e74264843defdd694cdaf99cc5
[ "MIT" ]
173
2015-01-05T17:29:19.000Z
2021-12-25T01:47:07.000Z
examples/async/client_batch.py
brubbel/Pyro4
791f5aff6e0c89e74264843defdd694cdaf99cc5
[ "MIT" ]
103
2015-01-10T10:00:08.000Z
2022-03-06T14:19:20.000Z
from __future__ import print_function import sys import time import Pyro4 if sys.version_info < (3, 0): input = raw_input def asyncFunction(values): results = [value + 1 for value in values] print(">>> async batch function called, returning:", results) return results uri = input("enter async server object uri: ").strip() proxy = Pyro4.Proxy(uri) print("\n* batch async call:") batch = Pyro4.batch(proxy) batch.divide(100, 5) batch.divide(99, 9) batch.divide(555, 2) print("getting results...") asyncresults = batch(asynchronous=True) # returns immediately print("result value available?", asyncresults.ready) # prints False because the server is still 'busy' print("client can do other stuff here.") time.sleep(2) print("such as sleeping ;-)") time.sleep(2) print("sleeping some more, batch takes a while") time.sleep(2) print("getting result values...(will block until available)") results = asyncresults.value # blocks until the result is available print("resultvalues=", list(results)) print("\n* batch async call with chained function:") batch = Pyro4.batch(proxy) batch.divide(100, 5) batch.divide(99, 9) batch.divide(555, 2) asyncresults = batch(asynchronous=True) # returns immediately asyncresults.then(asyncFunction) \ .then(asyncFunction) \ .then(asyncFunction) print("getting result values...(will block until available)") print("final value=", asyncresults.value) print("\n* batch async call with exception:") batch = Pyro4.batch(proxy) batch.divide(1, 1) # first call is ok batch.divide(100, 0) # second call will trigger a zero division error, 100//0 asyncresults = batch(asynchronous=True) # returns immediately print("getting result values...") try: value = asyncresults.value print("Weird, this shouldn't succeed!?... resultvalues=", list(value)) except ZeroDivisionError as x: print("got exception (expected):", repr(x)) print("\n* batch async call with timeout:") batch = Pyro4.batch(proxy) batch.divide(100, 5) batch.divide(99, 9) batch.divide(555, 2) asyncresults = batch(asynchronous=True) # returns immediately print("checking if ready within 2 seconds...") ready = asyncresults.wait(2) # wait for ready within 2 seconds but the server takes 3 print("status after wait=", ready) # should print False print("checking again if ready within 10 seconds...(should be ok now)") ready = asyncresults.wait(timeout=10) # wait 10 seconds now (but server will be done within ~8 more seconds) print("status after wait=", ready) print("available=", asyncresults.ready) results = asyncresults.value print("resultvalues=", list(results)) print("\ndone.")
32.8
109
0.73247
4017a9efd4813dddbd618d164b86dea4735b33c9
8,710
py
Python
py/test/selenium/webdriver/common/frame_switching_tests.py
chromium-googlesource-mirror/selenium
fcf26da81afa5d3e8edfc776f558eebf2e7d28b3
[ "Apache-2.0" ]
1
2018-08-24T18:01:34.000Z
2018-08-24T18:01:34.000Z
py/test/selenium/webdriver/common/frame_switching_tests.py
chromium-googlesource-mirror/selenium
fcf26da81afa5d3e8edfc776f558eebf2e7d28b3
[ "Apache-2.0" ]
1
2021-10-18T12:23:37.000Z
2021-10-18T12:23:37.000Z
py/test/selenium/webdriver/common/frame_switching_tests.py
chromium-googlesource-mirror/selenium
fcf26da81afa5d3e8edfc776f558eebf2e7d28b3
[ "Apache-2.0" ]
2
2018-04-30T21:35:30.000Z
2021-05-14T08:11:46.000Z
#!/usr/bin/python # Copyright 2008-2010 WebDriver committers # Copyright 2008-2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import re import tempfile import time import shutil import unittest from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import NoSuchFrameException def not_available_on_remote(func): def testMethod(self): print self.driver if type(self.driver) == 'remote': return lambda x: None else: return func(self) return testMethod class FrameSwitchingTest(unittest.TestCase): def testShouldBeAbleToSwitchToAFrameByItsIndex(self): self._loadPage("frameset") self.driver.switch_to_frame(2) element = self.driver.find_element_by_id("email") self.assertEquals("email", element.get_attribute("type")) def testShouldBeAbleToSwitchToAnIframeByItsIndex(self): self._loadPage("iframes") self.driver.switch_to_frame(0) element = self.driver.find_element_by_id("id-name1") self.assertEquals("id", element.get_attribute("value")) def testShouldBeAbleToSwitchToAFrameByItsName(self): self._loadPage("frameset") self.driver.switch_to_frame("fourth") element = self.driver.find_element_by_tag_name("frame") self.assertEquals("child1", element.get_attribute("name")) def testShouldBeAbleToSwitchToAnIframeByItsName(self): self._loadPage("iframes") self.driver.switch_to_frame("iframe1-name"); element = self.driver.find_element_by_name("id-name1") self.assertEquals("name", element.get_attribute("value")) def testShouldBeAbleToSwitchToAFrameByItsID(self): self._loadPage("frameset") self.driver.switch_to_frame("fifth") element = self.driver.find_element_by_name("windowOne") self.assertEquals("Open new window", element.text) def testShouldBeAbleToSwitchToAnIframeByItsID(self): self._loadPage("iframes") self.driver.switch_to_frame("iframe1"); element = self.driver.find_element_by_name("id-name1") self.assertEquals("name", element.get_attribute("value")) def testShouldBeAbleToSwitchToAFrameUsingAPreviouslyLocatedWebElement(self): self._loadPage("frameset") frame = self.driver.find_element_by_name("third") self.driver.switch_to_frame(frame) element = self.driver.find_element_by_id("email") self.assertEquals("email", element.get_attribute("type")) def testShouldBeAbleToSwitchToAnIFrameUsingAPreviouslyLocatedWebElement(self): self._loadPage("iframes") frame = self.driver.find_element_by_tag_name("iframe") self.driver.switch_to_frame(frame) element = self.driver.find_element_by_name("id-name1") self.assertEquals("name", element.get_attribute("value")) def testShouldEnsureElementIsAFrameBeforeSwitching(self): self._loadPage("frameset") frame = self.driver.find_element_by_tag_name("frameset") try: self.driver.switch_to_frame(frame) self.fail() except NoSuchFrameException: pass def testFrameSearchesShouldBeRelativeToTheCurrentlySelectedFrame(self): self._loadPage("frameset") self.driver.switch_to_frame("sixth") element = self.driver.find_element_by_id("iframe_page_heading") self.assertEquals("This is the heading", element.text) try: self.driver.switch_to_frame("third") self.fail() except NoSuchFrameException: pass self.driver.switch_to_default_content() self.driver.switch_to_frame("third") try: self.driver.switch_to_frame("third") self.fail() except NoSuchFrameException: pass # Now make sure we can go back. self.driver.switch_to_default_content() self.driver.switch_to_frame("sixth") element = self.driver.find_element_by_id("iframe_page_heading") self.assertEquals("This is the heading", element.text) def testShouldBeAbleToSelectChildFrames(self): self._loadPage("frameset") self.driver.switch_to_frame("sixth") self.driver.switch_to_frame(0) element = self.driver.find_element_by_id("id-name1") self.assertEquals("id", element.get_attribute("value")) def testShouldThrowFrameNotFoundExceptionLookingUpSubFramesWithSuperFrameNames(self): self._loadPage("frameset") self.driver.switch_to_frame("fourth") try: self.driver.switch_to_frame("second") self.fail("Expected NoSuchFrameException") except NoSuchFrameException: pass def testShouldThrowAnExceptionWhenAFrameCannotBeFound(self): self._loadPage("xhtmlTest") try: self.driver.switch_to_frame("nothing here") self.fail("Should not have been able to switch") except NoSuchFrameException: pass def testShouldThrowAnExceptionWhenAFrameCannotBeFoundByIndex(self): self._loadPage("xhtmlTest") try: self.driver.switch_to_frame(27) self.fail("Should not have been able to switch") except NoSuchFrameException: pass # disabled till we use the Java Webserver #def testThatWeStillReferToTheSameFrameOnceItHasBeenSelected(self): # self._loadPage("frameset") # self.driver.switch_to_frame(2) # checkbox = self.driver.find_element_by_xpath("//input[@name='cheeky']") # checkbox.toggle() # checkbox.submit() # self.assertEqual(self.driver.find_element_by_xpath("//p").text, "Success") # Disabled till we use the Java WebServer #def testThatWeShouldUseTheFirstFrameOnAPage(self): # self._loadPage("frameset") # time.sleep(1) # pageNumber = self.driver.find_element_by_xpath('//span[@id="pageNumber"]') # self.assertEqual(pageNumber.text.strip(), "1") #Disabled till we use the Java WebServer #def testThatWeFocusOnTheReplacementWhenAFrameFollowsALinkToATopTargettedPage(self): # self._loadPage("frameset") # time.sleep(1) # self.driver.switch_to_frame(0) # self.driver.find_element_by_link_text("top").click() # time.sleep(1) # self.assertEqual("XHTML Test Page", self.driver.title) # self.assertEqual("XHTML Test Page", # self.driver.find_element_by_xpath("/html/head/title").text) def testThatWeShouldNotAutoSwitchFocusToAnIFrameWhenAPageContainingThemIsLoaded(self): self._loadPage("iframes") time.sleep(0.5) self.driver.find_element_by_id("iframe_page_heading") def testShouldAllowAUserToSwitchFromAnIframeBackToTheMainContentOfThePage(self): self._loadPage("iframes") self.driver.switch_to_frame(0) self.driver.switch_to_default_content() self.driver.find_element_by_id('iframe_page_heading') # Disabled till we use the Java WebServer #def testShouldAllowTheUserToSwitchToAnIFrameAndRemainFocusedOnIt(self): # self._loadPage("iframes") # self.driver.switch_to_frame(0) # submitButton = self.driver.find_element_by_id("greeting") # submitButton.click() # time.sleep(1) # hello = self.driver.find_element_by_id('greeting') # self.assertEqual(hello.text, "Success!") # Disalbled till we used the Java Webserver #def testShouldBeAbleToClickInAFrame(self): # self._loadPage("frameset") # self.driver.switch_to_frame("third") # time.sleep(1) # submitButton = self.driver.find_element_by_id("greeting") # submitButton.click() # time.sleep(0.5) # hello = self.driver.find_element_by_id('greeting') # self.assertEqual(hello.text, "Success!") # self.driver.switch_to_default_content() def testShouldReturnFrameTitleNotWindowTitle(self): self._loadPage("frameset") self.driver.switch_to_frame("third") self.assertEqual("Unique title", self.driver.title) def _pageURL(self, name): return "http://localhost:%d/%s.html" % (self.webserver.port, name) def _loadSimplePage(self): self._loadPage("simpleTest") def _loadPage(self, name): self.driver.get(self._pageURL(name))
37.222222
90
0.707922
f435c1f45144c25135fa5924bbca446862c7a86a
1,261
py
Python
tests/feature/test_no_scenario.py
veliakiner/pytest-bdd
dc82652f82a08df3410d51afc30891f8ec0c5d37
[ "MIT" ]
null
null
null
tests/feature/test_no_scenario.py
veliakiner/pytest-bdd
dc82652f82a08df3410d51afc30891f8ec0c5d37
[ "MIT" ]
1
2019-01-12T13:09:35.000Z
2019-01-12T13:09:35.000Z
tests/feature/test_no_scenario.py
veliakiner/pytest-bdd
dc82652f82a08df3410d51afc30891f8ec0c5d37
[ "MIT" ]
null
null
null
"""Test no scenarios defined in the feature file.""" import textwrap def test_no_scenarios(testdir): """Test no scenarios defined in the feature file.""" features = testdir.mkdir('features') features.join('test.feature').write_text(textwrap.dedent(u""" Given foo When bar Then baz """), 'utf-8', ensure=True) testdir.makepyfile(textwrap.dedent(""" from pytest_bdd import scenarios scenarios('features') """)) result = testdir.runpytest() result.stdout.fnmatch_lines( [ '*FeatureError: Step definition outside of a Scenario or a Background.*', ], ) def test_only_background_strict_mode(testdir): """Test only wrong background defined in the feature file.""" features = testdir.mkdir('features') features.join('test.feature').write_text(textwrap.dedent(u""" Background: Given foo When bar """), 'utf-8', ensure=True) testdir.makepyfile(textwrap.dedent(""" from pytest_bdd import scenarios scenarios('features') """)) result = testdir.runpytest() result.stdout.fnmatch_lines( [ '*FeatureError: Background section can only contain Given steps.*', ], )
26.270833
85
0.626487
b7f2cc77b85a4bbc1915a1b506882079a65acab8
5,877
py
Python
cloudify_aws/codepipeline/tests/test_pipeline.py
szpotona/cloudify-aws-plugin
52c5240aaa6638816e7973742ad8b6f5d1c59e79
[ "Apache-2.0" ]
null
null
null
cloudify_aws/codepipeline/tests/test_pipeline.py
szpotona/cloudify-aws-plugin
52c5240aaa6638816e7973742ad8b6f5d1c59e79
[ "Apache-2.0" ]
null
null
null
cloudify_aws/codepipeline/tests/test_pipeline.py
szpotona/cloudify-aws-plugin
52c5240aaa6638816e7973742ad8b6f5d1c59e79
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2018 Cloudify Platform Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Standard imports import copy import datetime # Third party imports from mock import patch, MagicMock from cloudify.state import current_ctx # Local imports from cloudify_aws.codepipeline.resources import pipeline from cloudify_aws.common.tests.test_base import TestBase, CLIENT_CONFIG from cloudify_aws.common.tests.test_base import DELETE_RESPONSE from cloudify_aws.common.tests.test_base import DEFAULT_RUNTIME_PROPERTIES # Constants PIPELINE_NAME = 'Demopipeline' PIPELINE_TH = ['cloudify.nodes.Root', 'cloudify.nodes.aws.codepipeline.Pipeline'] NODE_PROPERTIES = { 'resource_id': 'node_resource_id', 'use_external_resource': False, 'resource_config': { 'kwargs': {'pipeline': {'name': PIPELINE_NAME, 'version': 1}}}, 'client_config': CLIENT_CONFIG } RUNTIME_PROPERTIES_AFTER_CREATE = { 'aws_resource_id': PIPELINE_NAME, 'resource_config': {}, } TEST_DATE = datetime.datetime(2020, 1, 1) class TestCodePipeline(TestBase): def setUp(self): super(TestCodePipeline, self).setUp() self.fake_boto, self.fake_client = self.fake_boto_client( 'codepipeline') self.mock_patch = patch('boto3.client', self.fake_boto) self.mock_patch.start() def tearDown(self): self.mock_patch.stop() self.fake_boto = None self.fake_client = None super(TestCodePipeline, self).tearDown() def test_create(self): _ctx = self.get_mock_ctx( 'test_create', test_properties=NODE_PROPERTIES, test_runtime_properties=DEFAULT_RUNTIME_PROPERTIES, type_hierarchy=PIPELINE_TH, ctx_operation_name='cloudify.interfaces.lifecycle.create', ) current_ctx.set(_ctx) self.fake_client.create_pipeline = MagicMock( return_value={'pipeline': {'name': PIPELINE_NAME, 'version': 1}}) self.fake_client.get_pipeline_state = MagicMock(return_value={ 'pipelineName': PIPELINE_NAME, 'pipelineVersion': 1, 'created': TEST_DATE }) pipeline.create( ctx=_ctx, iface=None, params=None ) self.fake_boto.assert_called_with('codepipeline', **CLIENT_CONFIG) self.fake_client.create_pipeline.assert_called_with( pipeline={"name": PIPELINE_NAME, "version": 1} ) updated_runtime_prop = copy.deepcopy(RUNTIME_PROPERTIES_AFTER_CREATE) updated_runtime_prop['create_response'] = { 'pipelineName': PIPELINE_NAME, 'pipelineVersion': 1, 'created': str(TEST_DATE)} # This is just because I'm not interested in the content # of remote_configuration right now. # If it doesn't exist, this test will fail, and that's good. _ctx.instance.runtime_properties.pop('remote_configuration') self.assertEqual(_ctx.instance.runtime_properties, updated_runtime_prop) def test_delete(self): _ctx = self.get_mock_ctx( 'test_delete', test_properties=NODE_PROPERTIES, test_runtime_properties=RUNTIME_PROPERTIES_AFTER_CREATE, type_hierarchy=PIPELINE_TH, ctx_operation_name='cloudify.interfaces.lifecycle.delete' ) current_ctx.set(_ctx) self.fake_client.delete_pipeline = self.mock_return(DELETE_RESPONSE) pipeline.delete(ctx=_ctx, resource_config=None, iface=None) self.fake_boto.assert_called_with('codepipeline', **CLIENT_CONFIG) self.fake_client.delete_pipeline.assert_called_with( name=PIPELINE_NAME ) self.assertEqual( _ctx.instance.runtime_properties, { '__deleted': True, } ) def test_execute(self): _ctx = self.get_mock_ctx( 'test_execute_pipeline', test_properties=NODE_PROPERTIES, test_runtime_properties=RUNTIME_PROPERTIES_AFTER_CREATE, type_hierarchy=PIPELINE_TH ) current_ctx.set(_ctx) self.fake_client.start_pipeline_execution = MagicMock( return_value={'pipelineExecutionId': '12345'}) pipeline.execute(ctx=_ctx, iface=None, name=PIPELINE_NAME, clientRequestToken=None) self.fake_boto.assert_called_with('codepipeline', **CLIENT_CONFIG) self.fake_client.start_pipeline_execution.assert_called_with( name=PIPELINE_NAME ) pipeline.execute(ctx=_ctx, iface=None, name=PIPELINE_NAME, clientRequestToken='fake-token123') self.fake_client.start_pipeline_execution.assert_called_with( name=PIPELINE_NAME, clientRequestToken='fake-token123' ) pipeline.execute(ctx=_ctx, iface=None) self.fake_client.start_pipeline_execution.assert_called_with( name=PIPELINE_NAME ) def test_create_raises_UnknownServiceError(self): self._prepare_create_raises_UnknownServiceError( type_hierarchy=PIPELINE_TH, type_name='codepipeline', type_class=pipeline, operation_name='cloudify.interfaces.lifecycle.create', )
32.469613
77
0.670921
570914b45fb44c7831d88ce26836d3e5793c0a7c
648
py
Python
payroll/manage.py
the-krafty-koder/pay-3
20426ef00b78c2a65fb4031a7692866bd22a991a
[ "MIT" ]
1
2021-03-26T19:39:30.000Z
2021-03-26T19:39:30.000Z
payroll/manage.py
the-krafty-koder/pay-3
20426ef00b78c2a65fb4031a7692866bd22a991a
[ "MIT" ]
12
2020-10-27T22:47:01.000Z
2022-03-12T00:42:49.000Z
payroll/manage.py
the-krafty-koder/pay-3
20426ef00b78c2a65fb4031a7692866bd22a991a
[ "MIT" ]
null
null
null
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'payroll.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
29.454545
74
0.660494
01c719c4f24136821389b8e42bb6684219f906c9
4,853
py
Python
ShadowPager/src/pager_client.py
cPolaris/Toys
3aae402144d43813633e39110786b5eab5a906a3
[ "MIT" ]
2
2018-04-02T17:22:58.000Z
2018-04-02T17:23:05.000Z
ShadowPager/src/pager_client.py
cPolaris/toys
3aae402144d43813633e39110786b5eab5a906a3
[ "MIT" ]
null
null
null
ShadowPager/src/pager_client.py
cPolaris/toys
3aae402144d43813633e39110786b5eab5a906a3
[ "MIT" ]
null
null
null
from __future__ import absolute_import, division, print_function, \ with_statement from shadowsocks import shell, daemon, eventloop, tcprelay, udprelay, asyncdns, encrypt from db import ServersManager import os import random import string import sys import json import logging import signal import socket sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../')) BUF_SIZE = 2048 class ClientPager: def __init__(self, addr, secret, config): self.crypto = encrypt.Encryptor(config['pager_secret'], config['method']) self.server_addr = addr self.server_secret = secret self.config = config self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) def send_crypt(self, data, action): data = json.dumps(data) self.sock.connect(self.server_addr) self.sock.send(self.crypto.encrypt(data)) logging.info('Sent %s to [%s]: ' % (action, self.server_addr,)) def recv_crypt(self): reply = self.sock.recv(BUF_SIZE) return self.crypto.decrypt(reply) def get_garbage(self): return ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(random.randint(100, 200))) def log_reply(self, dict): pretty_str = json.dumps(dict, sort_keys=True, indent=4, separators=(',', ': ')) logging.info('Reply from [%s]\n%s: ' % ((self.server_addr,), pretty_str)) def exec_command(self, command): if command == 'poke': self.send_crypt({'act': 'POKE', 'discard': self.get_garbage()}, 'POKE') reply = self.recv_crypt() self.log_reply(json.loads(reply)) elif command == 'add': logging.warning('Adding a new service port for %s' % (self.server_addr,)) port = int(raw_input('Enter port:')) secret = raw_input('Enter secret:') self.send_crypt({'act': 'ADD_SERVICE', 'port': port, 'secret': secret}, 'ADD_SERVICE') reply = self.recv_crypt() self.log_reply(json.loads(reply)) elif command == 'rm': logging.warning('Adding a new service port for %s' % (self.server_addr,)) port = int(raw_input('Enter port:')) self.send_crypt({'act': 'RM_SERVICE', 'port': port}, 'RM_SERVICE') reply = self.recv_crypt() self.log_reply(json.loads(reply)) elif command == 'ls': self.send_crypt({'act': 'LS_SERVICE', 'discard': self.get_garbage()}, 'LS_SERVICE') reply = self.recv_crypt() self.log_reply(json.loads(reply)) # elif command == 'lsall': # self.send_crypt({'act': 'LS_SERVICE_ALL', 'discard': self.get_garbage()}, # 'LS_SERVICE_ALL') # reply = self.recv_crypt() # self.log_reply(json.loads(reply)) # elif command == 'join': # logging.error('') # ip = raw_input('Enter IP address:') # port = raw_input('Enter port:') # secret = raw_input('Enter secret:') # self.send_crypt({'act': 'JOIN', 'ip': ip, 'port': port, 'secret': secret}, # 'JOIN') # reply = self.recv_crypt() # self.log_reply(json.loads(reply)) else: logging.error('NOT SUPPORTED') def start_service(config): # start local service logging.info("starting local at %s:%d" % (config['local_address'], config['local_port'])) dns_resolver = asyncdns.DNSResolver() tcp_server = tcprelay.TCPRelay(config, dns_resolver, True) udp_server = udprelay.UDPRelay(config, dns_resolver, True) loop = eventloop.EventLoop() dns_resolver.add_to_loop(loop) tcp_server.add_to_loop(loop) udp_server.add_to_loop(loop) def handler(signum, _): logging.warn('received SIGQUIT, doing graceful shutting down..') tcp_server.close(next_tick=True) udp_server.close(next_tick=True) signal.signal(getattr(signal, 'SIGQUIT', signal.SIGTERM), handler) def int_handler(signum, _): sys.exit(1) signal.signal(signal.SIGINT, int_handler) daemon.set_user(config.get('user', None)) loop.run() @shell.exception_handle(self_=False, exit_code=1) def main(): config = shell.get_config(True) command = config['daemon'] if not command: shell.print_help(True) return sm = ServersManager(config['db-path']) addr = (config['server'], int(config['pager_port'])) pager = ClientPager(addr, config['pager_secret'], config) if command in ['start', 'stop', 'restart']: daemon.daemon_exec(config) else: pager.exec_command(command) if config['daemon'] == 'start': start_service(config) if __name__ == '__main__': main()
34.41844
98
0.612199
4709dff73fca513e2ad364ebe0824fb995cd80d1
25,379
py
Python
codes/FA-SRGAN/models/modules/architecture.py
SamirMitha/SuperResolution
233b2736c7187d5c0c214af5eb60dba3f96bb48c
[ "MIT" ]
2
2021-12-21T21:06:23.000Z
2021-12-24T18:56:15.000Z
codes/FA-SRGAN/models/modules/architecture.py
SamirMitha/SuperResolution
233b2736c7187d5c0c214af5eb60dba3f96bb48c
[ "MIT" ]
null
null
null
codes/FA-SRGAN/models/modules/architecture.py
SamirMitha/SuperResolution
233b2736c7187d5c0c214af5eb60dba3f96bb48c
[ "MIT" ]
1
2021-12-21T21:14:05.000Z
2021-12-21T21:14:05.000Z
import math import torch import torch.nn as nn import torchvision from . import block as B from . import spectral_norm as SN #################### # Generator #################### class SRResNet(nn.Module): def __init__(self, in_nc, out_nc, nf, nb, upscale=4, norm_type='batch', act_type='relu', \ mode='NAC', res_scale=1, upsample_mode='upconv'): super(SRResNet, self).__init__() n_upscale = int(math.log(upscale, 2)) if upscale == 3: n_upscale = 1 fea_conv = B.conv_block(in_nc, nf, kernel_size=3, norm_type=None, act_type=None) resnet_blocks = [B.ResNetBlock(nf, nf, nf, norm_type=norm_type, act_type=act_type,\ mode=mode, res_scale=res_scale) for _ in range(nb)] LR_conv = B.conv_block(nf, nf, kernel_size=3, norm_type=norm_type, act_type=None, mode=mode) if upsample_mode == 'upconv': upsample_block = B.upconv_blcok elif upsample_mode == 'pixelshuffle': upsample_block = B.pixelshuffle_block else: raise NotImplementedError('upsample mode [{:s}] is not found'.format(upsample_mode)) if upscale == 3: upsampler = upsample_block(nf, nf, 3, act_type=act_type) else: upsampler = [upsample_block(nf, nf, act_type=act_type) for _ in range(n_upscale)] HR_conv0 = B.conv_block(nf, nf, kernel_size=3, norm_type=None, act_type=act_type) HR_conv1 = B.conv_block(nf, out_nc, kernel_size=3, norm_type=None, act_type=None) self.model = B.sequential(fea_conv, B.ShortcutBlock(B.sequential(*resnet_blocks, LR_conv)),\ *upsampler, HR_conv0, HR_conv1) def forward(self, x): x = self.model(x) return x class RRDBNet(nn.Module): def __init__(self, in_nc, out_nc, nf, nb, gc=32, upscale=4, norm_type=None, \ act_type='leakyrelu', mode='CNA', upsample_mode='upconv'): super(RRDBNet, self).__init__() n_upscale = int(math.log(upscale, 2)) if upscale == 3: n_upscale = 1 fea_conv = B.conv_block(in_nc, nf, kernel_size=3, norm_type=None, act_type=None) rb_blocks = [B.RRDB(nf, kernel_size=3, gc=32, stride=1, bias=True, pad_type='zero', \ norm_type=norm_type, act_type=act_type, mode='CNA') for _ in range(nb)] LR_conv = B.conv_block(nf, nf, kernel_size=3, norm_type=norm_type, act_type=None, mode=mode) if upsample_mode == 'upconv': upsample_block = B.upconv_blcok elif upsample_mode == 'pixelshuffle': upsample_block = B.pixelshuffle_block else: raise NotImplementedError('upsample mode [{:s}] is not found'.format(upsample_mode)) if upscale == 3: upsampler = upsample_block(nf, nf, 3, act_type=act_type) else: upsampler = [upsample_block(nf, nf, act_type=act_type) for _ in range(n_upscale)] HR_conv0 = B.conv_block(nf, nf, kernel_size=3, norm_type=None, act_type=act_type) HR_conv1 = B.conv_block(nf, out_nc, kernel_size=3, norm_type=None, act_type=None) self.model = B.sequential(fea_conv, B.ShortcutBlock(B.sequential(*rb_blocks, LR_conv)),\ *upsampler, HR_conv0, HR_conv1) def forward(self, x): x = self.model(x) return x # ex class model_ex(nn.Module): def __init__(self, in_nc, out_nc, nf, nb, gc=32, upscale=4, norm_type=None, \ act_type='leakyrelu', mode='CNA', upsample_mode='upconv'): super(model_ex, self).__init__() fea_conv = B.conv_block(in_nc, nf, kernel_size=3, norm_type=None, act_type=None) rb_blocks = [B.RRDB(nf, kernel_size=3, gc=32, stride=1, bias=True, pad_type='zero', \ norm_type=norm_type, act_type=act_type, mode='CNA') for _ in range(nb)] # LR_conv = B.conv_block(nf, nf, kernel_size=3, norm_type=norm_type, act_type=None, mode=mode) self.model = B.sequential(fea_conv, B.ShortcutBlock(B.sequential(*rb_blocks))) #, LR_conv def forward(self, x): x = self.model(x) return x class model_ex_rb(nn.Module): def __init__(self, in_nc, out_nc, nf, nb, upscale=4, norm_type='batch', act_type='relu', \ mode='NAC', res_scale=1, upsample_mode='upconv'): super(model_ex_rb, self).__init__() fea_conv = B.conv_block(in_nc, nf, kernel_size=3, norm_type=None, act_type=None) rb_blocks = [B.ResNetBlock(nf, nf, nf, norm_type=norm_type, act_type=act_type,\ mode=mode, res_scale=res_scale) for _ in range(nb)] LR_conv = B.conv_block(nf, nf, kernel_size=3, norm_type=norm_type, act_type=None, mode=mode) self.model = B.sequential(fea_conv, B.ShortcutBlock(B.sequential(*rb_blocks, LR_conv))) def forward(self, x): x = self.model(x) return x class SRResNet_G(nn.Module): def __init__(self, in_nc, out_nc, nf, nb, upscale=4, norm_type='batch', act_type='relu', \ mode='NAC', res_scale=1, upsample_mode='upconv'): super(SRResNet_G, self).__init__() n_upscale = int(math.log(upscale, 2)) if upscale == 3: n_upscale = 1 # fea_conv = B.conv_block(in_nc, nf, kernel_size=3, norm_type=None, act_type=None) resnet_blocks = [B.ResNetBlock(nf, nf, nf, norm_type=norm_type, act_type=act_type, \ mode=mode, res_scale=res_scale) for _ in range(nb)] LR_conv = B.conv_block(nf, nf, kernel_size=3, norm_type=norm_type, act_type=None, mode=mode) if upsample_mode == 'upconv': upsample_block = B.upconv_blcok elif upsample_mode == 'pixelshuffle': upsample_block = B.pixelshuffle_block else: raise NotImplementedError('upsample mode [{:s}] is not found'.format(upsample_mode)) if upscale == 3: upsampler = upsample_block(nf, nf, 3, act_type=act_type) else: upsampler = [upsample_block(nf, nf, act_type=act_type) for _ in range(n_upscale)] HR_conv0 = B.conv_block(nf, nf, kernel_size=3, norm_type=None, act_type=act_type) HR_conv1 = B.conv_block(nf, out_nc, kernel_size=3, norm_type=None, act_type=None) self.model = B.sequential(B.ShortcutBlock(B.sequential(*resnet_blocks, LR_conv)),\ *upsampler, HR_conv0, HR_conv1) def forward(self, x): x = self.model(x) return x class RRDBNet_G(nn.Module): def __init__(self, in_nc, out_nc, nf, nb, gc=32, upscale=4, norm_type=None, \ act_type='leakyrelu', mode='CNA', upsample_mode='upconv'): super(RRDBNet_G, self).__init__() n_upscale = int(math.log(upscale, 2)) if upscale == 3: n_upscale = 1 fea_conv = B.conv_block(in_nc, nf, kernel_size=3, norm_type=None, act_type=None) rb_blocks = [B.RRDB(nf, kernel_size=3, gc=32, stride=1, bias=True, pad_type='zero', \ norm_type=norm_type, act_type=act_type, mode='CNA') for _ in range(nb)] LR_conv = B.conv_block(nf, nf, kernel_size=3, norm_type=norm_type, act_type=None, mode=mode) if upsample_mode == 'upconv': upsample_block = B.upconv_blcok elif upsample_mode == 'pixelshuffle': upsample_block = B.pixelshuffle_block else: raise NotImplementedError('upsample mode [{:s}] is not found'.format(upsample_mode)) if upscale == 3: upsampler = upsample_block(nf, nf, 3, act_type=act_type) else: upsampler = [upsample_block(nf, nf, act_type=act_type) for _ in range(n_upscale)] HR_conv0 = B.conv_block(nf, nf, kernel_size=3, norm_type=None, act_type=act_type) HR_conv1 = B.conv_block(nf, out_nc, kernel_size=3, norm_type=None, act_type=None) # self.model = B.sequential(fea_conv, B.ShortcutBlock(B.sequential(*rb_blocks, LR_conv)),\ # *upsampler, HR_conv0, HR_conv1) self.model = B.sequential(fea_conv, B.ShortcutBlock(B.sequential(*rb_blocks))) def forward(self, x): x = self.model(x) return x #################### # Discriminator #################### # VGG style Discriminator with input size 128*128 class Discriminator_VGG_128(nn.Module): def __init__(self, in_nc, base_nf, norm_type='batch', act_type='leakyrelu', mode='CNA'): super(Discriminator_VGG_128, self).__init__() # features # hxw, c # 128, 64 conv0 = B.conv_block(in_nc, base_nf, kernel_size=3, norm_type=None, act_type=act_type, \ mode=mode) conv1 = B.conv_block(base_nf, base_nf, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 64, 64 conv2 = B.conv_block(base_nf, base_nf*2, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv3 = B.conv_block(base_nf*2, base_nf*2, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 32, 128 conv4 = B.conv_block(base_nf*2, base_nf*4, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv5 = B.conv_block(base_nf*4, base_nf*4, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 16, 256 conv6 = B.conv_block(base_nf*4, base_nf*8, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv7 = B.conv_block(base_nf*8, base_nf*8, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 8, 512 conv8 = B.conv_block(base_nf*8, base_nf*8, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv9 = B.conv_block(base_nf*8, base_nf*8, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 4, 512 self.features = B.sequential(conv0, conv1, conv2, conv3, conv4, conv5, conv6, conv7, conv8,\ conv9) # classifier self.classifier = nn.Sequential( nn.Linear(512 * 4 * 4, 100), nn.LeakyReLU(0.2, True), nn.Linear(100, 1)) def forward(self, x): x = self.features(x) x = x.view(x.size(0), -1) x = self.classifier(x) return x # VGG style Discriminator with input size 128*128, Spectral Normalization class Discriminator_VGG_128_SN(nn.Module): def __init__(self): super(Discriminator_VGG_128_SN, self).__init__() # features # hxw, c # 128, 64 self.lrelu = nn.LeakyReLU(0.2, True) self.conv0 = SN.spectral_norm(nn.Conv2d(3, 64, 3, 1, 1)) self.conv1 = SN.spectral_norm(nn.Conv2d(64, 64, 4, 2, 1)) # 64, 64 self.conv2 = SN.spectral_norm(nn.Conv2d(64, 128, 3, 1, 1)) self.conv3 = SN.spectral_norm(nn.Conv2d(128, 128, 4, 2, 1)) # 32, 128 self.conv4 = SN.spectral_norm(nn.Conv2d(128, 256, 3, 1, 1)) self.conv5 = SN.spectral_norm(nn.Conv2d(256, 256, 4, 2, 1)) # 16, 256 self.conv6 = SN.spectral_norm(nn.Conv2d(256, 512, 3, 1, 1)) self.conv7 = SN.spectral_norm(nn.Conv2d(512, 512, 4, 2, 1)) # 8, 512 self.conv8 = SN.spectral_norm(nn.Conv2d(512, 512, 3, 1, 1)) self.conv9 = SN.spectral_norm(nn.Conv2d(512, 512, 4, 2, 1)) # 4, 512 # classifier self.linear0 = SN.spectral_norm(nn.Linear(512 * 4 * 4, 100)) self.linear1 = SN.spectral_norm(nn.Linear(100, 1)) def forward(self, x): x = self.lrelu(self.conv0(x)) x = self.lrelu(self.conv1(x)) x = self.lrelu(self.conv2(x)) x = self.lrelu(self.conv3(x)) x = self.lrelu(self.conv4(x)) x = self.lrelu(self.conv5(x)) x = self.lrelu(self.conv6(x)) x = self.lrelu(self.conv7(x)) x = self.lrelu(self.conv8(x)) x = self.lrelu(self.conv9(x)) x = x.view(x.size(0), -1) x = self.lrelu(self.linear0(x)) x = self.linear1(x) return x class Discriminator_VGG_96(nn.Module): def __init__(self, in_nc, base_nf, norm_type='batch', act_type='leakyrelu', mode='CNA'): super(Discriminator_VGG_96, self).__init__() # features # hxw, c # 96, 64 conv0 = B.conv_block(in_nc, base_nf, kernel_size=3, norm_type=None, act_type=act_type, \ mode=mode) conv1 = B.conv_block(base_nf, base_nf, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 48, 64 conv2 = B.conv_block(base_nf, base_nf*2, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv3 = B.conv_block(base_nf*2, base_nf*2, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 24, 128 conv4 = B.conv_block(base_nf*2, base_nf*4, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv5 = B.conv_block(base_nf*4, base_nf*4, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 12, 256 conv6 = B.conv_block(base_nf*4, base_nf*8, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv7 = B.conv_block(base_nf*8, base_nf*8, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 6, 512 conv8 = B.conv_block(base_nf*8, base_nf*8, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv9 = B.conv_block(base_nf*8, base_nf*8, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 3, 512 self.features = B.sequential(conv0, conv1, conv2, conv3, conv4, conv5, conv6, conv7, conv8,\ conv9) # classifier self.classifier = nn.Sequential( nn.Linear(512 * 3 * 3, 100), nn.LeakyReLU(0.2, True), nn.Linear(100, 1)) def forward(self, x): x = self.features(x) x = x.view(x.size(0), -1) x = self.classifier(x) return x class Discriminator_VGG_192(nn.Module): def __init__(self, in_nc, base_nf, norm_type='batch', act_type='leakyrelu', mode='CNA'): super(Discriminator_VGG_192, self).__init__() # features # hxw, c # 192, 64 conv0 = B.conv_block(in_nc, base_nf, kernel_size=3, norm_type=None, act_type=act_type, \ mode=mode) conv1 = B.conv_block(base_nf, base_nf, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 96, 64 conv2 = B.conv_block(base_nf, base_nf*2, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv3 = B.conv_block(base_nf*2, base_nf*2, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 48, 128 conv4 = B.conv_block(base_nf*2, base_nf*4, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv5 = B.conv_block(base_nf*4, base_nf*4, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 24, 256 conv6 = B.conv_block(base_nf*4, base_nf*8, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv7 = B.conv_block(base_nf*8, base_nf*8, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 12, 512 conv8 = B.conv_block(base_nf*8, base_nf*8, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv9 = B.conv_block(base_nf*8, base_nf*8, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 6, 512 conv10 = B.conv_block(base_nf*8, base_nf*8, kernel_size=3, stride=1, norm_type=norm_type, \ act_type=act_type, mode=mode) conv11 = B.conv_block(base_nf*8, base_nf*8, kernel_size=4, stride=2, norm_type=norm_type, \ act_type=act_type, mode=mode) # 3, 512 self.features = B.sequential(conv0, conv1, conv2, conv3, conv4, conv5, conv6, conv7, conv8,\ conv9, conv10, conv11) # classifier self.classifier = nn.Sequential( nn.Linear(512 * 3 * 3, 100), nn.LeakyReLU(0.2, True), nn.Linear(100, 1)) def forward(self, x): x = self.features(x) x = x.view(x.size(0), -1) x = self.classifier(x) return x #################### # Perceptual Network #################### # Assume input range is [0, 1] class VGGFeatureExtractor(nn.Module): def __init__(self, feature_layer=34, use_bn=False, use_input_norm=True, device=torch.device('cpu')): super(VGGFeatureExtractor, self).__init__() if use_bn: model = torchvision.models.vgg19_bn(pretrained=True) else: model = torchvision.models.vgg19(pretrained=True) self.use_input_norm = use_input_norm if self.use_input_norm: mean = torch.Tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).to(device) # [0.485-1, 0.456-1, 0.406-1] if input in range [-1,1] std = torch.Tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).to(device) # [0.229*2, 0.224*2, 0.225*2] if input in range [-1,1] self.register_buffer('mean', mean) self.register_buffer('std', std) self.features = nn.Sequential(*list(model.features.children())[:(feature_layer + 1)]) # No need to BP to variable for k, v in self.features.named_parameters(): v.requires_grad = False def forward(self, x): if self.use_input_norm: x = (x - self.mean) / self.std output = self.features(x) return output # Assume input range is [0, 1] class ResNet101FeatureExtractor(nn.Module): def __init__(self, use_input_norm=True, device=torch.device('cpu')): super(ResNet101FeatureExtractor, self).__init__() model = torchvision.models.resnet101(pretrained=True) self.use_input_norm = use_input_norm if self.use_input_norm: mean = torch.Tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).to(device) # [0.485-1, 0.456-1, 0.406-1] if input in range [-1,1] std = torch.Tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).to(device) # [0.229*2, 0.224*2, 0.225*2] if input in range [-1,1] self.register_buffer('mean', mean) self.register_buffer('std', std) self.features = nn.Sequential(*list(model.children())[:8]) # No need to BP to variable for k, v in self.features.named_parameters(): v.requires_grad = False def forward(self, x): if self.use_input_norm: x = (x - self.mean) / self.std output = self.features(x) return output class MINCNet(nn.Module): def __init__(self): super(MINCNet, self).__init__() self.ReLU = nn.ReLU(True) self.conv11 = nn.Conv2d(3, 64, 3, 1, 1) self.conv12 = nn.Conv2d(64, 64, 3, 1, 1) self.maxpool1 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True) self.conv21 = nn.Conv2d(64, 128, 3, 1, 1) self.conv22 = nn.Conv2d(128, 128, 3, 1, 1) self.maxpool2 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True) self.conv31 = nn.Conv2d(128, 256, 3, 1, 1) self.conv32 = nn.Conv2d(256, 256, 3, 1, 1) self.conv33 = nn.Conv2d(256, 256, 3, 1, 1) self.maxpool3 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True) self.conv41 = nn.Conv2d(256, 512, 3, 1, 1) self.conv42 = nn.Conv2d(512, 512, 3, 1, 1) self.conv43 = nn.Conv2d(512, 512, 3, 1, 1) self.maxpool4 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True) self.conv51 = nn.Conv2d(512, 512, 3, 1, 1) self.conv52 = nn.Conv2d(512, 512, 3, 1, 1) self.conv53 = nn.Conv2d(512, 512, 3, 1, 1) def forward(self, x): out = self.ReLU(self.conv11(x)) out = self.ReLU(self.conv12(out)) out = self.maxpool1(out) out = self.ReLU(self.conv21(out)) out = self.ReLU(self.conv22(out)) out = self.maxpool2(out) out = self.ReLU(self.conv31(out)) out = self.ReLU(self.conv32(out)) out = self.ReLU(self.conv33(out)) out = self.maxpool3(out) out = self.ReLU(self.conv41(out)) out = self.ReLU(self.conv42(out)) out = self.ReLU(self.conv43(out)) out = self.maxpool4(out) out = self.ReLU(self.conv51(out)) out = self.ReLU(self.conv52(out)) out = self.conv53(out) return out # Assume input range is [0, 1] class MINCFeatureExtractor(nn.Module): def __init__(self, feature_layer=34, use_bn=False, use_input_norm=True, \ device=torch.device('cpu')): super(MINCFeatureExtractor, self).__init__() self.features = MINCNet() self.features.load_state_dict( torch.load('../experiments/pretrained_models/VGG16minc_53.pth'), strict=True) self.features.eval() # No need to BP to variable for k, v in self.features.named_parameters(): v.requires_grad = False def forward(self, x): output = self.features(x) return output import torch import torch.nn as nn import torch.nn.functional as F class double_conv(nn.Module): '''(conv => BN => ReLU) * 2''' def __init__(self, in_ch, out_ch): super(double_conv, self).__init__() self.conv = nn.Sequential( nn.Conv2d(in_ch, out_ch, 3, padding=1), nn.BatchNorm2d(out_ch, out_ch), # nn.GroupNorm(int(out_ch/4),out_ch), nn.ReLU(inplace=True), nn.Conv2d(out_ch, out_ch, 3, padding=1), nn.BatchNorm2d(out_ch), # nn.GroupNorm(int(out_ch / 4), out_ch), nn.ReLU(inplace=True) ) def forward(self, x): x = self.conv(x) return x class inconv(nn.Module): def __init__(self, in_ch, out_ch): super(inconv, self).__init__() self.conv = double_conv(in_ch, out_ch) def forward(self, x): x = self.conv(x) return x class down(nn.Module): def __init__(self, in_ch, out_ch): super(down, self).__init__() self.mpconv = nn.Sequential( nn.MaxPool2d(2), double_conv(in_ch, out_ch) ) def forward(self, x): x = self.mpconv(x) return x class up(nn.Module): def __init__(self, in_ch, out_ch, bilinear=True): super(up, self).__init__() # would be a nice idea if the upsampling could be learned too, # but my machine do not have enough memory to handle all those weights if bilinear: self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) else: self.up = nn.ConvTranspose2d(in_ch // 2, in_ch // 2, 2, stride=2) self.conv = double_conv(in_ch, out_ch) def forward(self, x1, x2): x1 = self.up(x1) # input is CHW diffY = x2.size()[2] - x1.size()[2] diffX = x2.size()[3] - x1.size()[3] x1 = F.pad(x1, (diffX // 2, diffX - diffX // 2, diffY // 2, diffY - diffY // 2)) # for padding issues, see # https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a # https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd x = torch.cat([x2, x1], dim=1) x = self.conv(x) return x class outconv(nn.Module): def __init__(self, in_ch, out_ch): super(outconv, self).__init__() self.conv = nn.Conv2d(in_ch, out_ch, 1) def forward(self, x): x = self.conv(x) return x class UNet(nn.Module): def __init__(self): super(UNet, self).__init__() n_channels=3 # 64 n_classes=3 self.inc = inconv(n_channels, 64) self.down1 = down(64, 128) self.down2 = down(128, 256) self.down3 = down(256, 256) # self.down4 = down(512, 512) # self.up1 = up(1024, 256) self.up2 = up(512, 128) self.up3 = up(256, 64) self.up4 = up(128, 64) self.outc = outconv(64, 3) # patch_size = 192 // 8 patch_size = 128 // 8 m_classifier = [ nn.Linear(256 * patch_size ** 2, 1024), nn.LeakyReLU(negative_slope=0.2, inplace=True), nn.Linear(1024, 1) ] # self.features = nn.Sequential(*m_features) self.classifier = nn.Sequential(*m_classifier) def forward(self, x): x1 = self.inc(x) x2 = self.down1(x1) #128 x3 = self.down2(x2) #256 x4 = self.down3(x3) #512 # features = self.features(x) output = self.classifier(x4.view(x4.size(0), -1)) # x5 = self.down4(x4) # x = self.up1(x5, x4) x = self.up2(x4, x3) x = self.up3(x, x2) x = self.up4(x, x1) x = self.outc(x) return output, x
40.220285
122
0.600812
5d25f0f9bbbf7a06836e92a6dbd6ccf8d0b9ee02
57,413
py
Python
main.py
wolf498/fun2
9ee17c803f6818913c9c1b91a729e7866bb57897
[ "MIT" ]
1
2021-10-02T12:51:59.000Z
2021-10-02T12:51:59.000Z
main.py
wolf498/fun2
9ee17c803f6818913c9c1b91a729e7866bb57897
[ "MIT" ]
null
null
null
main.py
wolf498/fun2
9ee17c803f6818913c9c1b91a729e7866bb57897
[ "MIT" ]
1
2022-01-04T13:42:23.000Z
2022-01-04T13:42:23.000Z
import discord from discord.ext import commands from discord.ext import tasks from discord.ext.commands import cooldown, BucketType from discord.ext.commands import (CommandOnCooldown) from PIL import Image, ImageFont, ImageDraw, ImageOps from unidecode import unidecode from io import BytesIO import asyncio import discord.utils import functools from discord.utils import get import discord, datetime, time from discord.ext import commands import math import wiki import os import keep_alive import wikipedia import traceback import random import json import utils from discord.utils import find import datetime from datetime import datetime, timedelta from platform import python_version from replit import db from time import time import discord from discord.ext import commands, tasks from io import BytesIO import datetime from datetime import datetime from discord.ext import commands intents = discord.Intents.default() from PIL import Image from io import BytesIO intents.members = True client = commands.Bot( command_prefix='!', case_insensitive=True, intents=intents) from io import BytesIO def time_encode(sec): time_type, newsec = 'seconds', int(sec) if sec > 60: newsec, time_type = round(sec / 60), 'minutes' if sec > 3600: newsec, time_type = round(sec / 3600), 'hours' if sec > 86400: newsec, time_type = round(sec / 86400), 'days' if sec > 2592000: newsec, time_type = round(sec / 2592000), 'months' if sec > 31536000: newsec, time_type = round(sec / 31536000), 'years' if str(newsec) == '1': return str(str(newsec) + ' ' + time_type[:-1]) return str(str(newsec) + ' ' + time_type) for filename in os.listdir('./Cogs'): if filename.endswith('.py'): client.load_extension(f'Cogs.{filename[:-3]}') @client.command(hidden=True) @commands.is_owner() async def load(ctx, extension): client.load_extension(f'Cogs.{extension}') await ctx.send('Succesfully loaded module') @client.command(hidden=True) @commands.is_owner() async def unload(ctx, extension): client.unload_extension(f'Cogs.{extension}') await ctx.send('Succesfully unloaded module') @client.command(hidden=True) @commands.is_owner() async def reload(ctx, extension): client.reload_extension(f'Cogs.{extension}') await ctx.send('Succesfully reloaded module') class CogName(commands.Cog): def __init__(self, bot): self.bot = bot def setup(bot): bot.add_cog(CogName(bot)) @client.event async def on_message(msg): try: if msg.mentions[0] == client.user: await msg.channel.send(f"My prefix is !") except: pass await client.process_commands(msg) @client.event async def on_guild_join(guild): channel = client.get_channel(789032560500015114) embed = discord.Embed( title="I Joined a new server", description= f"Name: {guild},\nMembers: {guild.member_count}\nid: {guild.id} \nOwner: {guild.owner}" ) embed.set_thumbnail(url=guild.icon_url) embed.set_footer(text=f"{len(client.guilds)} servers.") await channel.send(embed=embed) @client.event async def on_guild_remove(guild): channel = client.get_channel(789032560500015114) embed = discord.Embed( title="I was removed from a server", description= f"Name: {guild},\nMembers: {guild.member_count}id: {guild.id}") embed.set_thumbnail(url=guild.icon_url) embed.set_footer(text=f"{len(client.guilds)} servers.") await channel.send(embed=embed) filtered_words = ["asshole", "wtf", "fuck", "fuck you", "fck", "fuck off"] @client.event async def on_ready(): print('Logged in as') print(client.user.name) print(client.user.id) print(discord.__version__) print('------') print('Servers connected to:') for guild in client.guilds: print(guild.name) def convert(time): pos = ["s", "m", "h", "d"] time_dict = {"s": 1, "m": 60, "h": 3600, "d": 3600 * 24} unit = time[-1] if unit not in pos: return -1 try: val = int(time[:-1]) except: return -2 return val * time_dict[unit] @client.event async def on_message(msg): for word in filtered_words: if word in msg.content: await msg.delete() await client.process_commands(msg) @client.event async def on_ready(): global startdate startdate = datetime.now() @client.event async def on_command_error(ctx, error): embed = discord.Embed(title=f"***{error}***") await ctx.send(embed=embed) print(error) raise error class MemberID(commands.Converter): async def convert(self, ctx, argument): try: m = await commands.MemberConverter().convert(ctx, argument) except commands.BadArgument: try: return int(argument, base=10) except ValueError: raise commands.BadArgument( f"{argument} is not a valid member or member ID." ) from None else: return m.id @client.command() #@client.has_permissions(administrator = True) async def embed(ctx, *, channel: discord.TextChannel): await ctx.send("Send me a title for the embed") def check(message): return (message.author == ctx.author and message.channel == ctx.channel and not message.author.bot) try: msg = await client.wait_for("message", timeout=60.0, check=check) except asyncio.TimeoutError: return await ctx.send("No title was provided time up") else: t = msg.content if t.content.lower() == "none": tit = None else: tit = t await ctx.send("Now send me the text you want to be in the embed") try: msg = await client.wait_for("message", timeout=60.0, check=check) except asyncio.TimeoutError: return await ctx.send("No description was provided") else: desc = msg.content await ctx.send("all set") embed = discord.Embed(title=tit, description=desc, colour=0x00c8ff) if len(msg.attachments) > 0: embed.set_image(url=msg.attachments[0].url) await channel.send(embed=embed) @client.command() async def hey(ctx): await ctx.send("Hello") @client.command() async def good_day(ctx): await ctx.send("Have a really good and a productive day") @client.command() async def hi(ctx): await ctx.send("Hey") @client.command() async def hello(ctx): await ctx.send("Hi wassup") @client.command() async def invite(ctx): embed = discord.Embed( title="Information about the bot", colour=discord.Colour.blue()) embed.set_thumbnail( url= "https://cdn.discordapp.com/avatars/566193564502196235/b624ea7737776938c070f6693c91abc9?size=2048" ) embed.add_field( name="Invite", value= "[Invite the bot](https://discord.com/oauth2/authorize?client_id=760415780176658442&permissions=8&scope=bot)", inline=False) embed.add_field( name="Community Server", value="[Join the Community Server](https://discord.gg/4fNdfNjKd9)", inline=False) await ctx.send(embed=embed) @client.command(aliases=["stats"]) async def info(ctx): embed = discord.Embed( title="Information about the bot", colour=discord.Colour.blue()) embed.set_thumbnail( url= 'https://cdn.discordapp.com/attachments/771998022807584797/785758835066667008/White_Red_and_Orange_Badge_Recess_Food_Festival_Logo_3.gif' ) embed.add_field(name="Name", value="RKS", inline=False) embed.add_field(name="Developing Language", value="Python", inline=False) embed.add_field( name="Developed By", value="<@566193564502196235>", inline=False) embed.add_field( name="Help", value="use !help command to get to know about the other commands", inline=False) embed.add_field( name="Invite", value= "[Invite the bot](https://discord.com/oauth2/authorize?client_id=760415780176658442&permissions=8&scope=bot)", inline=False) embed.add_field( name="Community Server", value="[Join the Community Server](https://discord.gg/4fNdfNjKd9)", inline=False) embed.add_field( name="Website", value= "[Bot's Official Website](https://rksbot.netlify.app/) \n [Vote for RKS](https://top.gg/bot/760415780176658442/vote)", inline=False) embed.add_field( name="Servers RKS getting used on", value=f"{len(client.guilds)} servers.", inline=False) embed.add_field( name="RKS Users", value=f"{len(client.users)} Users.", inline=False) await ctx.send(embed=embed) @client.command() async def Bots(ctx): embed = discord.Embed( title="Bots \n \n Spotify Bot - $75 BTC \n Twitch Bot - $85 BTC", colour=discord.Colour.blue()) await ctx.send(embed=embed) @client.command() async def dev_info(ctx): embed = discord.Embed( title="Developing information about the bot", colour=discord.Colour.blue()) embed.set_thumbnail( url= 'https://cdn.discordapp.com/attachments/771998022807584797/785758835066667008/White_Red_and_Orange_Badge_Recess_Food_Festival_Logo_3.gif' ) embed.add_field(name="Name", value="RKS", inline=False) embed.add_field(name="Developing Language", value="Python", inline=False) embed.add_field( name="Developed By", value="Aryaman Srivastava", inline=False) embed.add_field( name="Help", value="use !help command to get to know about the other commands", inline=False) embed.add_field( name="Invite", value= "[Invite the bot](https://discord.com/oauth2/authorize?client_id=760415780176658442&permissions=8&scope=bot)", inline=False) embed.add_field( name="Community Server", value="[Join the Community Server](https://discord.gg/4fNdfNjKd9)", inline=False) embed.add_field( name="Servers RKS getting used on", value=f"{len(client.guilds)} servers.", inline=False) embed.add_field(name="Bot Created On", value="Repl.it", inline=False) embed.add_field( name="Running 24/7 ", value="Google Cloud Console", inline=False) embed.add_field(name="Python Version", value="3.8.2", inline=False) await ctx.send(embed=embed) @client.command() async def gm(ctx): await ctx.send("good morning,have a great day") @client.command() async def wassup(ctx): await ctx.send("I am Bored, what about you?") @client.command(aliases=["Good Morning"]) async def good_morning(ctx): await ctx.send("Good Morning") @client.command() async def bored(ctx): await ctx.send( "Here are somethings you can do https://www.arkadium.com/free-online-games/ or listen to some songs? https://www.spotify.com/in/ or maybe some videos https://www.youtube.com/" ) @client.command() async def rule1(ctx): await ctx.send( "Welcome, Witness me peform some tasks,I am RKS, nice to meet you.") @client.command() async def gn(ctx): await ctx.send("Good night, Have sweet dreams ") @client.command() async def games(ctx): await ctx.send("https://www.arkadium.com/free-online-games/") @client.command() async def songs(ctx): await ctx.send("https://www.spotify.com/in/") @client.command() async def Sports(ctx): embed = discord.Embed( title="Sports news and live scored", colour=discord.Colour.blue()) embed.add_field( name="Football", value= "Find all football news and scores on https://www.espn.in/football/scoreboard", inline=False) embed.add_field( name="Cricket", value="Find all cricket news and scores on https://www.cricbuzz.com/") embed.add_field( name="BasketBall", value= "Find all BasketbaLL news and scores on https://in.nba.com/?gr=www") await ctx.send(embed=embed) @client.command() async def talk(ctx): await ctx.send( "Hi i am RKS, i would love to talk to you..so what do you want to talk about" ) @client.command() async def life(ctx): await ctx.send("Ok..how is life going? You happy or sad?") @client.command() async def news(ctx): await ctx.send( "https://news.google.com/topstories?hl=en-IN&gl=IN&ceid=IN:en") @client.command() async def happy(ctx): await ctx.send("Wow, nice to know..i am happy that you are enjoying life") @client.command() async def weather(ctx): await ctx.send("https://www.accuweather.com/") @client.command() async def videos(ctx): await ctx.send("https://www.youtube.com/") @client.command() async def game(ctx): await ctx.send( "Among us (Mobile & PC/Laptop) \nValorant(PC/laptop) \nCall of duty (Mobile/laptop)\nGrand Theft Auto (Laptop),\nGetting over it (Laptop)" ) @tasks.loop(seconds=500) async def status_change(): await client.change_presence( activity=discord.Activity( type=discord.ActivityType.listening, name="@AryamanSri#0001")) await asyncio.sleep(500) await client.change_presence( activity=discord.Activity( type=discord.ActivityType.watching, name="https://rksbot.netlify.app/")) await asyncio.sleep(500) await client.change_presence( activity=discord.Activity( type=discord.ActivityType.Updating, name="RKS Version 1.07")) await asyncio.sleep(500) await client.change_presence( activity=discord.Activity( type=discord.ActivityType.listening, name=f"{len(client.users)} users.")) await asyncio.sleep(500) await client.change_presence( activity=discord.Activity( type=discord.ActivityType.watching, name=f"{len(client.guilds)} servers.")) await asyncio.sleep(500) status_change.before_loop(client.wait_until_ready) status_change.start() client.remove_command("help") @client.group(invoke_without_command=True) async def help(ctx): embed = discord.Embed(title="RKS COMMANDS 🤖", colour=discord.Colour.blue()) embed.add_field( name="Moderation 💂‍♀️", value= "`announcerole, ban, kick, find, massban, mute, unmute, prune, lock, unlock, cooldown, warn, checkwarns, removewarn` ", inline=True) embed.add_field( name="Talking 🤷🏻‍♂️", value="`!Hi !Hey !Hello wassup bored` ") embed.add_field(name="Sports 🏆", value="`!sports`") embed.add_field(name="Greetings ✌️", value="`!gm !gn !good_day`") embed.add_field( name="Information Commands 🔎", value= "`!info !dev_info !server_info !whois/!userinfo !suggest report !unlock` " ) embed.add_field( name="Clock ⏰", value="`!timer{time in seconds} !remind{time in mins,reminder name}`") embed.add_field( name="Important Links :link:", value= "[Invite the bot](https://discord.com/oauth2/authorize?client_id=760415780176658442&permissions=8&scope=bot) \n [Join the Community Server](https://discord.gg/4fNdfNjKd9) \n [Bot's Official Website](https://rksbot.netlify.app/) \n [Vote for RKS](https://top.gg/bot/760415780176658442/vote)" ) embed.add_field( name="Total Commands", value=f"{len([x.name for x in client.commands])}", inline=True) embed.add_field( name="Fun Commands 🥳", value= "`!treat` `!ping` `!gayrate` `!quote` `ball` `!shrug` `Tableflip` `UNFLIP`" ) embed.add_field( name="Music 🎶", value= "`!play{songname}` `!stop` `!pause` `!resume` `!np` `!skip` `!queue` `!shuffle`" ) embed.add_field(name="Chatbot 🧍🏻‍♂️🧍🏻", value="`!rks{anything}` ") embed.set_image( url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif' ) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') await ctx.send(embed=embed) @client.command(pass_context=True) async def ping(ctx): """ Pong! """ before = time.monotonic() message = await ctx.send("Pong!") ping = (time.monotonic() - before) * 1000 await message.edit(content=f"Pong!🏓 \n `{int(ping)}ms`") print(f'Ping {int(ping)}ms') @client.command() async def Moderation(ctx): embed = discord.Embed( title="All Moderation Commands", colour=discord.Colour.blue()) embed.add_field(name="!mute", value="Mutes the user{@user}", inline=False) embed.add_field( name="!unmute{@user}", value="Unmutes the user", inline=False) embed.add_field( name="!kick{@user}", value="Kicks the user from the server", inline=False) embed.add_field( name="!ban{@user}", value="Bans the user from the server", inline=False) await ctx.send(embed=embed) @client.command() async def help_greetings(ctx): embed = discord.Embed( title="All Greeting Commands", colour=discord.Colour.blue()) embed.add_field( name="Greeting", value= "!gm: Wishes the user Good Morning \n !gn:Wishes the user Good Night \n !good_day: Wishes the user a good day", inline=False) await ctx.send(embed=embed) @client.command() async def Talking(ctx): embed = discord.Embed( title="All Talking Commands", colour=discord.Colour.blue()) embed.add_field( name="Talking", value= "!Hello: Wishes the user Hello \n !Hey:Wishes the user Hey \n !wassup: Bot will reply with what it is doing \n !info: Inddormation about the bot \n !Bored: Bot will send somethings you can do to pass time \n !Music: Bot plays some awesome english songs \n !Hi:Wishes the user Hi", inline=False) await ctx.send(embed=embed) @client.command() async def Server(ctx): embed = discord.Embed(title="Servers", colour=discord.Colour.blue()) embed.add_field( name="Servers RKS getting used on", value=f"{len(client.guilds)} servers.", inline=False) embed.add_field( name="User's Using RKS", value=f"{len(client.users)} users.", inline=False) await ctx.send(embed=embed) @client.command() async def Mentor(ctx): embed = discord.Embed( title="The Reason why bot is alive", colour=discord.Colour.blue()) embed.add_field(name="Name", value="Krish Kharangra", inline=False) embed.add_field(name="UserName", value="@Kalu#7777", inline=False) embed.add_field(name="User ID", value="457569956079337472", inline=False) await ctx.send(embed=embed) @commands.command(name='rps', aliases=['rockpaperscissors']) async def rps(self, ctx): """Play Rock, Paper, Scissors game""" def check_win(p, b): if p == '🌑': return False if b == '📄' else True elif p == '📄': return False if b == '✂' else True else: # p=='✂' return False if b == '🌑' else True async with ctx.typing(): reactions = ['🌑', '📄', '✂'] game_message = await ctx.send( "**Rock Paper Scissors**\nChoose your shape:", delete_after=15.0) for reaction in reactions: await game_message.add_reaction(reaction) bot_emoji = random.choice(reactions) def check(reaction, user): return user != self.bot.user and user == ctx.author and (str( reaction.emoji) == '🌑' or '📄' or '✂') try: reaction, user = await self.bot.wait_for( 'reaction_add', timeout=10.0, check=check) except asyncio.TimeoutError: await ctx.send(f"Time's Up! :stopwatch:") else: await ctx.send( f"**:man_in_tuxedo_tone1:\t{reaction.emoji}\n:robot:\t{bot_emoji}**" ) # if conds if str(reaction.emoji) == bot_emoji: await ctx.send("**It's a Tie :ribbon:**") elif check_win(str(reaction.emoji), bot_emoji): await ctx.send( "**You win :sparkles:\nAs a reward i will be giving you 100 coins**" ) else: await ctx.send("**I win :robot:**") @client.command() async def wanted(ctx, user: discord.Member = None): if user == None: user = ctx.author wanted = Image.Open( "https://cdn.discordapp.com/attachments/771998022807584797/786807994133774346/wanted-vintage-western-poster_176411-3.jpg" ) asset = user.avatar_url_as(size=130) data = BytesIO(await asset.read()) pfp = Image.open(data) pfp = pfp.resize((177, 177)) wanted.paste(pfp, (120, 212)) wanted.save("profile.jpg") await ctx.send(file=discord.File("profile.jpg")) @client.command() @commands.cooldown(1, 3600, commands.BucketType.user) async def suggest(ctx, *, sugg): channel = client.get_channel(788427578608189440) await ctx.channel.purge(limit=1) embed = discord.Embed( title='New Suggestion By {}'.format(ctx.author.display_name)) embed.add_field(name='Suggestion: ', value=sugg) embed.set_footer( text='UserID: ( {} ) | sID: ( {} )'.format(ctx.author.id, ctx.author.display_name), icon_url=ctx.author.avatar_url) await ctx.send('👌| Your Suggestion Has Been Sent To <#{}> !'.format( channel.id)) suggg = await channel.send(embed=embed) await suggg.add_reaction('👍') await suggg.add_reaction('👎') @client.command() @commands.cooldown(1, 3600, commands.BucketType.user) async def report(ctx, *, report): channel = client.get_channel(795182039723933706) await ctx.channel.purge(limit=1) embed = discord.Embed( title='New Report By {}'.format(ctx.author.display_name)) embed.add_field(name='Issue: ', value=report) embed.set_footer( text='UserID: ( {} ) | sID: ( {} )'.format(ctx.author.id, ctx.author.display_name), icon_url=ctx.author.avatar_url) await ctx.send( '👌 | Your report Has Been Sent To <#{}> !, sorry for the inconvenience' .format(channel.id)) report = await channel.send(embed=embed) await report.add_reaction('👍') await report.add_reaction('👎') @client.command(aliases=['8ball']) async def ball(ctx, *, question): responses = [ "It is certain.", "It is decidedly so.", "Without a doubt.", "Yes - definitely.", "You may rely on it.", "As I see it, yes.", "Most likely.", "Outlook good.", "Yes.", "Signs point to yes.", "Reply hazy, try again.", "Ask again later.", "Better not tell you now.", "Cannot predict now.", "Concentrate and ask again.", "Don't count on it.", "My reply is no.", "My sources say no.", "Very doubtful." ] embed = discord.Embed( title="8-ball", description=f"{random.choice(responses)}", color=discord.Colour.blue()) await ctx.send(embed=embed) @client.command(aliases=[ 'quote', ]) async def Quotes(ctx): responses = [ "“The Best Way To Get Started Is To Quit Talking And Begin Doing.”", "“The Pessimist Sees Difficulty In Every Opportunity. The Optimist Sees Opportunity In Every Difficulty.”", "“Don’t Let Yesterday Take Up Too Much Of Today.”", "“You Learn More From Failure Than From Success. Don’t Let It Stop You. Failure Builds Character.”", "“It’s Not Whether You Get Knocked Down, It’s Whether You Get Up.”", " “If You Are Working On Something That You Really Care About, You Don’t Have To Be Pushed. The Vision Pulls You.”", "“People Who Are Crazy Enough To Think They Can Change The World, Are The Ones Who Do.”", "“Failure Will Never Overtake Me If My Determination To Succeed Is Strong Enough.”", "“Entrepreneurs Are Great At Dealing With Uncertainty And Also Very Good At Minimizing Risk. That’s The Classic Entrepreneur.”", "“We May Encounter Many Defeats But We Must Not Be Defeated.”", "“Knowing Is Not Enough; We Must Apply. Wishing Is Not Enough; We Must Do.”", "“Imagine Your Life Is Perfect In Every Respect; What Would It Look Like?”", "“We Generate Fears While We Sit. We Overcome Them By Action.”", "“Whether You Think You Can Or Think You Can’t, You’re Right.”", "“Security Is Mostly A Superstition. Life Is Either A Daring Adventure Or Nothing.”", " “The Man Who Has Confidence In Himself Gains The Confidence Of Others.”", "“The Only Limit To Our Realization Of Tomorrow Will Be Our Doubts Of Today.”", "“Creativity Is Intelligence Having Fun.”", "“What You Lack In Talent Can Be Made Up With Desire, Hustle And Giving 110% All The Time.”" ] embed = discord.Embed( title="Daily Quotes", description=f"{random.choice(responses)}", color=discord.Colour.blue()) embed.set_footer(text=f"Have a Great Day {ctx.author}") await ctx.send(embed=embed) @client.command() async def Tableflip(ctx): embed = discord.Embed( title="Requested by You", colour=discord.Colour.blue()) embed.add_field(name="TABLEFLIP", value="(╯°□°)╯︵ ┻━┻", inline=False) embed.set_footer(text=f"Requested by {ctx.author}") await ctx.send(embed=embed) @client.command() async def shrug(ctx): embed = discord.Embed( title="As Requested by You", colour=discord.Colour.blue()) embed.add_field(name="SHRUG", value="¯\_(ツ)_/¯", inline=False) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') await ctx.send(embed=embed) @client.command() async def unflip(ctx): embed = discord.Embed( title="Requested by You", colour=discord.Colour.blue()) embed.add_field(name="UNFLIP", value="┬─┬ ノ( ゜-゜ノ)", inline=False) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') await ctx.send(embed=embed) @client.command(case_insensitive=True) async def treat(ctx, member: discord.Member): if member == ctx.author: await ctx.send("You can't treat youself!") return embed = discord.Embed( description= f'You offered {member.name} a treat!! {member.mention} react to the emoji below to accept!', color=0x006400) timeout = int(15.0) message = await ctx.channel.send(embed=embed) await message.add_reaction('🍫') await message.add_reaction('🍕') await message.add_reaction('🍰') await message.add_reaction('🍦') await message.add_reaction('🍔') def check(reaction, user): return user == member and str(reaction.emoji) == '🍫' def check(reaction, user): return user == member and str(reaction.emoji) == '🍕' def check(reaction, user): return user == member and str(reaction.emoji) == '🍰' def check(reaction, user): return user == member and str(reaction.emoji) == '🍦' def check(reaction, user): return user == member and str(reaction.emoji) == '🍔' try: reaction, user = await client.wait_for( 'reaction_add', timeout=timeout, check=check) except asyncio.TimeoutError: msg = (f"{member.mention} didn't accept the treat in time!!") await ctx.channel.send(msg) else: await ctx.channel.send( f"{member.mention} You have accepted {ctx.author.name}'s offer!") @client.command() @commands.has_permissions(kick_members=True) async def quite(ctx, member: discord.Member = None): if not member: await ctx.send("Who do you want me to mute?") return role = discord.utils.get(ctx.guild.roles, name="muted") await member.add_roles(role) await ctx.send("ok I did it") @client.command(aliases=['howgayy']) async def gayrate(ctx): responses = [ 'You are 1 percent gay.', 'You are 2 percent gay.', 'You are 3 percent gay.', 'You are 4 percent gay.', 'You are 5 percent gay.', 'You are 6 percent gay.', 'You are 7 percent gay.', 'You are 8 percent gay.', 'You are 9 percent gay.', 'You are 10 percent gay.', 'You are 11 percent gay.', 'You are 12 percent gay.', 'You are 13 percent gay.', 'You are 14 percent gay.', 'You are 15 percent gay.', 'You are 16 percent gay.', 'You are 17 percent gay.', 'You are 18 percent gay.', 'You are 19 percent gay.', 'You are 20 percent gay.', 'You are 21 percent gay.', 'You are 22 percent gay.', 'You are 23 percent gay.', 'You are 24 percent gay.', 'You are 25 percent gay.', 'You are 26 percent gay.', 'You are 27 percent gay.', 'You are 28 percent gay.', 'You are 29 percent gay.', 'You are 30 percent gay.', 'You are 31 percent gay.', 'You are 32 percent gay.', 'You are 33 percent gay.', 'You are 34 percent gay.', 'You are 35 percent gay.', 'You are 36 percent gay.', 'You are 37 percent gay.', 'You are 38 percent gay.', 'You are 39 percent gay.', 'You are 40 percent gay.', 'You are 41 percent gay.', 'You are 42 percent gay.', 'You are 43 percent gay.', 'You are 44 percent gay.', 'You are 45 percent gay.', 'You are 46 percent gay.', 'You are 47 percent gay.', 'You are 48 percent gay.', 'You are 49 percent gay.', 'You are 50 percent gay.', 'You are 51 percent gay.', 'You are 52 percent gay.', 'You are 53 percent gay.', 'You are 54 percent gay.', 'You are 55 percent gay.', 'You are 56 percent gay.', 'You are 57 percent gay.', 'You are 58 percent gay.', 'You are 59 percent gay.', 'You are 60 percent gay.', 'You are 61 percent gay.', 'You are 62 percent gay.', 'You are 63 percent gay.', 'You are 64 percent gay.', 'You are 65 percent gay.', 'You are 66 percent gay.', 'You are 67 percent gay.', 'You are 68 percent gay.', 'You are 69 percent gay.', 'You are 70 percent gay.', 'You are 71 percent gay.', 'You are 72 percent gay.', 'You are 73 percent gay.', 'You are 74 percent gay.', 'You are 75 percent gay.', 'You are 76 percent gay.', 'You are 77 percent gay.', 'You are 78 percent gay.', 'You are 79 percent gay.', 'You are 80 percent gay.', 'You are 81 percent gay.', 'You are 82 percent gay.', 'You are 83 percent gay.', 'You are 84 percent gay.', 'You are 85 percent gay.', 'You are 86 percent gay.', 'You are 87 percent gay.', 'You are 88 percent gay.', 'You are 89 percent gay.', 'You are 90 percent gay.', 'You are 91 percent gay.', 'You are 92 percent gay.', 'You are 93 percent gay.', 'You are 94 percent gay.', 'You are 95 percent gay.', 'You are 96 percent gay.', 'You are 97 percent gay.', 'You are 98 percent gay.', 'You are 99 percent gay.', 'You are 100 percent gay.' ] mbed = discord.Embed( title='Gay Rate', description=f'{random.choice(responses)}') await ctx.send(embed=mbed) @client.command(aliases=['n']) async def nuke(ctx, channel: discord.TextChannel): mbed = discord.Embed( title='Success', description=f'{channel} has been nuked.', ) if ctx.author.guild_permissions.manage_channels: await ctx.send(embed=mbed) await channel.delete() @client.command(aliases=['hackk']) async def hack(ctx): await ctx.send('rick rolled') await ctx.send( 'https://tenor.com/view/dance-moves-dancing-singer-groovy-gif-17029825' ) @client.command(aliases=['roll']) async def diceroll(ctx): responses = [ 'You rolled a 1!', 'You rolled a 2!', 'You rolled a 3!', 'You rolled a 4!', 'You rolled a 5!', 'You rolled a 6!', ] mbed = discord.Embed( title='Dice Rolled!', description=f'{random.choice(responses)}') mbed.set_thumbnail( url= 'https://images-ext-2.discordapp.net/external/kAegJWUTO1muMX0U5mEKgKSmpHuNl4it6086g2F3pCw/https/gilkalai.files.wordpress.com/2017/09/dice.png?width=80&height=77' ) await ctx.send(embed=mbed) @client.command() @commands.has_permissions(manage_messages=True) async def announce(ctx, ch: discord.TextChannel = None): if ch == None: await ctx.send('Channel not specified') return def check(m): return m.author == ctx.message.author and m.channel == ctx.message.channel await ctx.send('Enter the title:') t = await client.wait_for('message', check=check, timeout=60) await ctx.send('Enter the message:') msg = await client.wait_for('message', check=check, timeout=120) embed = discord.Embed( title=t.content, description=msg.content, color=0xffff) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') await ch.send(embed=embed) @client.command() async def update( ctx, member: discord.Member, ): if not member: # if member is no mentioned await ctx.send("User isnt Mentioned :grey_question:") def check(m): return m.author == ctx.message.author and m.channel == ctx.message.channel await ctx.send('Enter the title:') t = await client.wait_for('message', check=check, timeout=60) await ctx.send('Enter the message:') msg = await client.wait_for('message', check=check, timeout=120) embed = discord.Embed( title=t.content, description=msg.content, color=0xffff) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') await member.send(embed=embed) @client.command(aliases=['membercount']) async def members(ctx): mbed = discord.Embed( color=discord.Color(0xffff), title=f'{ctx.guild.name}') mbed.set_thumbnail(url=f'{ctx.guild.icon_url}') mbed.add_field(name='Member Count', value=f'{ctx.guild.member_count}') mbed.set_footer( icon_url=f'{ctx.guild.icon_url}', text=f'Guild ID: {ctx.guild.id}') await ctx.send(embed=mbed) client.remove_command("commands") @client.command() async def commands(ctx): embed = discord.Embed( title="Commands Loaded", colour=discord.Colour.blue()) embed.set_thumbnail( url= "https://cdn.discordapp.com/avatars/760415780176658442/976c9cc26755a5674b032f8acb0fef8c.png?size=128" ) embed.add_field( name="Total Commands", value=f"{len([x.name for x in client.commands])}", inline=False) await ctx.send(embed=embed) @client.command(aliases=['ticket']) async def createchannel(ctx, channelName): guild = ctx.guild mbed = discord.Embed( title='Success', description="{} has been successfully created.".format(channelName)) if ctx.author.guild_permissions.send_messages: await guild.create_text_channel(name='{}'.format(channelName)) await ctx.send(embed=mbed) @client.command() async def rules(ctx): embed = discord.Embed( title="Discord Terms of Service", colour=discord.Colour.blue()) embed.set_thumbnail( url= "https://cdn.discordapp.com/avatars/566193564502196235/b624ea7737776938c070f6693c91abc9?size=2048" ) embed.add_field( name="Rule of Conduct", value= "Do not organize, participate in, or encourage harassment of others. Disagreements happen and are normal, but continuous, repetitive, or severe negative comments may cross the line into harassment and are not okay. \n Do not organize, promote, or coordinate servers around hate speech. It’s unacceptable to attack a person or a community based on attributes such as their race, ethnicity, national origin, sex, gender, sexual orientation, religious affiliation, or disabilities. \n Do not make threats of violence or threaten to harm others. This includes indirect threats, as well as sharing or threatening to share someone’s private personal information (also known as doxxing) \n Do not evade user blocks or server bans. Do not send unwanted, repeated friend requests or messages, especially after they’ve made it clear they don’t want to talk to you anymore.", inline=False) embed.add_field( name="NSFW", value= "You must apply the NSFW label to channels if there is adult content in that channel. Any content that cannot be placed in an age-gated channel, such as avatars, server banners, and invite splashes, may not contain adult content. \n You may not sexualize minors in any way. This includes sharing content or links which depict minors in a pornographic, sexually suggestive, or violent manner, and includes illustrated or digitally altered pornography that depicts minors \n You may not share sexually explicit content of other people without their consent, or share or promote sharing of non-consensual intimate imagery in an attempt to shame or degrade someone. \n You may not share content that glorifies or promotes suicide or self-harm, including any encouragement to others to cut themselves, or embrace eating disorders such as anorexia or bulimia. \n You may not use Discord for the organization, promotion, or support of violent extremism.", inline=False) embed.add_field( name="Verification", value= "These rules are verified as per discord guidelines and are expected to be followed seriously", inline=False) await ctx.send(embed=embed) @client.command(aliases=[ 'Funfacts', ]) async def facts(ctx): responses = [ "“The idea of RKS was discussed in random discord chatting”", "“RKS was born on 19 November ”", "“RKS is supposed to MEE6”", "“RKS is not developed by a single person. Each developer had a little contribution to make the bot success”", ] embed = discord.Embed( title="Fun Fact about RKS", description=f"{random.choice(responses)}", color=discord.Colour.blue()) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS')( text=f"Have a Great Day {ctx.author}") await ctx.send(embed=embed) @client.command() async def Pet(ctx): embed = discord.Embed( title="Ok! Adopt a animals from below", colour=discord.Colour.blue()) embed.add_field( name="Animals List", value= "Cat 🐈 \n Dog 🐕‍🦺 \n Goldfish 🐟 \n Hamster 🐹 \n Kitten 🐈\n Mouse 🐁 \n Parrot 🦜 \n Puppy 🐕‍🦺\n Rabbit 🐇 \n Tropical fish 🐟 \n Turtle 🐢", inline=False) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS')( text="Use !{animalname} to adopt") await ctx.send(embed=embed) @client.command() async def dog(ctx): embed = discord.Embed( title="Ok! You want a dog!", colour=discord.Colour.blue()) embed.add_field( name="Animals List", value= "Cat 🐈 \n Dog 🐕‍🦺 \n Goldfish 🐟 \n Hamster 🐹 \n Kitten 🐈\n Mouse 🐁 \n Parrot 🦜 \n Puppy 🐕‍🦺\n Rabbit 🐇 \n Tropical fish 🐟 \n Turtle 🐢", inline=False) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS')( text="Use !{animalname} to adopt") await ctx.send(embed=embed) @client.command() async def remind(ctx, mins: int, reminder): embed = discord.Embed( title=f'Reminder set for {mins} Minute named {reminder}', ) embed.timestamp = ctx.message.created_at await ctx.send(embed=embed) counter = 0 while counter <= int(mins): counter += 1 await asyncio.sleep(60) if counter == int(mins): embed = discord.Embed( title='Reminder!!', description= f"⏰{ctx.author.mention}, I have set a reminder for {mins} minutes with the reminder being for {reminder} has completed", colour=discord.Colour.blurple()) embed.timestamp = ctx.message.created_at await ctx.send(embed=embed) @client.command() async def rps1v1(ctx, user: discord.Member): try: def check(message) -> bool: return user == message.author await ctx.send( f"Okay! It\'s gonna be a game between {ctx.author.mention} and {user.mention}" ) await ctx.send( f"{user.mention}, Reply with a ``yes`` or ``no`` to confirm your participation" ) message = await client.wait_for("message", timeout=20, check=check) except asyncio.TimeoutError: await ctx.send(f"{user.mention} doesn't wanna have a game") else: if message.content == "no": await ctx.send("Alright let's just pretend that never happened") if message.content == "yes": await ctx.send("alright") player1 = ctx.author player2 = user await ctx.send( "alright , DM me your choices, **ONLY WHEN I ASK FOR IT**, within 30 seconds" ) await player1.send( "Choose now, ``stone`` or ``paper`` or ``scissors``?") try: def player1_check(message) -> bool: return player1 == message.author player1_choice = await client.wait_for( "message", timeout=30, check=player1_check) await player1.send( f"ok u chose {player1_choice.content}, I am now waiting for {player2} to choose" ) except asyncio.TimeoutError: await player1.send("OK u dont wanna play U LOSE") await ctx.send( f"{player1.mention} DIDNT REPLY SO HE IS A LOSER!, CONGRATS {player2.mention}, YOU WON!" ) else: try: def player2_check(message) -> bool: return player2 == message.author await player2.send( f"Choose now, ``stone`` or ``paper`` or ``scissors``?") player2_choice = await client.wait_for( "message", timeout=30, check=player2_check) await player2.send(f"ok u chose {player2_choice.content} ") except asyncio.TimeoutError: await ctx.send( f"{player2.mention} DIDNT REPLY SO HE IS A LOSER!, CONGRATS {player1.mention}, YOU WON!" ) await player2.send("LOSER") else: if player1_choice.content == player2_choice.content: await ctx.send( f"ITS A TIE!!! {player1.mention} chose {player1_choice.content} and {player2.mention} chose {player2_choice.content}!!!" ) if player1_choice == "stone": if player2_choice.content == "scissors": await ctx.send( f"GG! {player1.mention} chose {player1_choice.content}, which broke {player2.mention}'s {player2_choice.content}" ) if player2_choice.content == "paper": await ctx.send( f"GG! {player2.mention} chose {player2_choice.content}, which wrapped itself and defeated {player1.mention}'s {player1_choice.content}" ) if player1_choice.content == "paper": if player2_choice.content == "scissors": await ctx.send( f"GG! {player2.mention}'s {player2_choice.content} cut {player1.mention}'s {player1_choice.content}!" ) elif player2_choice.content == "stone": await ctx.send( f"GG! {player2.mention} chose {player2_choice.content}, which wrapped itself and defeated {player1.mention}'s {player1_choice.content}" ) elif player1_choice.content == "scissors": if player2_choice.content == "stone": await ctx.send( f"GG! {player2.mention} chose {player2_choice.content}, which CRUSHED {player1.mention}'s {player1_choice.content}" ) elif player2_choice.content == "paper": await ctx.send( f"GG! {player1.mention} chose scissors which cut up {player2.mention}'s papers!" ) @client.command() async def timer(ctx, *, seconds, reason=None): try: secondint = int(seconds) if secondint > 300: await ctx.send( "I dont think im allowed to do go above 300 seconds.") raise BaseException if secondint < 0 or secondint == 0: await ctx.send("I dont think im allowed to do negatives") raise BaseException message = await ctx.send( f" 🕒 {ctx.author.mention}, Your Timer Named {reason} has been Set For {seconds} seconds" ) while True: secondint = secondint - 1 if secondint == 0: await message.edit(new_content=("Ended!")) break await message.edit(new_content=("Timer: {0}".format(secondint))) await asyncio.sleep(1) await ctx.send(ctx.message.author.mention + f"Your countdown for {seconds} Has ended!") except ValueError: await ctx.send("Must be a number!") @client.command(pass_context=True) async def broadcast(ctx, *, msg): for server in client.servers: for channel in server.channels: try: await client.send_message(channel, msg) except Exception: continue else: break @client.command(aliases=["ty", "thank"]) async def thankyou( ctx, member: discord.Member, ): if not member: # if member is no mentioned await ctx.send("User isnt Mentioned :grey_question:") embed = discord.Embed( title=f"Thank you** {member.name}** from {ctx.author.name}. T", description=':kissing_heart: :partying_face:', color=0xea7938) embed.set_image( url= 'https://cdn.discordapp.com/attachments/772665263463464970/795688394909941770/giphy.gif' ) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS')( text=f"Sent by {ctx.author} from {ctx.guild}", icon_url=ctx.author.avatar_url) await member.send(embed=embed) await ctx.send(f'Your thanks has been sent to{member.mention}') @client.command(aliases=["wc", "welcs"]) async def welcome( ctx, member: discord.Member, ): if not member: # if member is no mentioned await ctx.send("User isnt Mentioned :grey_question:") embed = discord.Embed( title=f"Welcome** {member.name}** To {ctx.guild.name}.", description=':kissing_heart: :partying_face:', color=0xea7938) embed.set_image( url= 'https://cdn.discordapp.com/attachments/771998022807584797/798781439864864768/unnamed.gif' ) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') await ctx.send(embed=embed) @client.command() async def website(ctx): embed = discord.Embed( title=f"**RKS NOW HAS A WEBSITE** \n {ctx.author} did you check it?", colour=discord.Colour.blue()) embed.add_field( name="[click here for the website](https://rksbot.netlify.app/)", value= "After almost a year of RKS, AramanSri has launched the official RKS Website for all your needs and info. Check it out now", inline=False) embed.set_image( url= 'https://cdn.discordapp.com/attachments/772665263463464970/793452624909303818/rksbot.netlify.app.png' ) await ctx.send(embed=embed) @client.command(aliases=["userinfo", "aboutuser"]) async def whois(ctx, member: discord.Member = None): user = ctx.author if not member else member if user is None: user = ctx.author date_format = "%a, %d %b %Y %I:%M %p" embed = discord.Embed(color=0xdfa3ff, description=user.mention) embed.set_author(name=str(user), icon_url=user.avatar_url) embed.set_thumbnail(url=user.avatar_url) embed.add_field(name="ID:", value=user.id) embed.add_field(name="Display Name:", value=user.display_name) embed.add_field(name="Joined", value=user.joined_at.strftime(date_format)) members = sorted(ctx.guild.members, key=lambda m: m.joined_at) embed.add_field(name="Join position", value=str(members.index(user) + 1)) embed.add_field( name="Registered", value=user.created_at.strftime(date_format)) if len(user.roles) > 1: role_string = ' '.join([r.mention for r in user.roles][1:]) embed.add_field( name="Roles [{}]".format(len(user.roles) - 1), value=role_string, inline=False) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') return await ctx.send(embed=embed) @client.command(aliases=["avatar"]) async def pfp(ctx, member: discord.Member): embed = discord.Embed(title=f"{member.name}'s avatar") embed.set_image(url=member.avatar_url) await ctx.send(embed=embed) @client.command() async def poll(ctx, ch: discord.TextChannel = None): if ch == None: await ctx.send('Channel not specified') return def check(m): return m.author == ctx.message.author and m.channel == ctx.message.channel await ctx.send('Enter the Poll title') t = await client.wait_for('message', check=check, timeout=60) await ctx.send('Enter the Poll Option 1') poll1 = await client.wait_for('message', check=check, timeout=120) await ctx.send('Enter the Poll Option 2') poll2 = await client.wait_for('message', check=check, timeout=120) embed = discord.Embed( title=t.content, description=f"1️⃣ {poll1.content} \n\n 2️⃣ {poll2.content}", color=0xffff) embed.set_footer( icon_url= 'https://cdn.discordapp.com/attachments/772665263463464970/801704591058010152/R.gif', text='Bot ID:760415780176658442 , Bot Name: RKS') poll1 = await ch.send(embed=embed) await poll1.add_reaction('1️⃣') await poll1.add_reaction('2️⃣') @client.command() async def sinfo(ctx): name = str(ctx.guild.name) description = str(ctx.guild.description) guild = ctx.guild owner = str(ctx.guild.owner) id = str(ctx.guild.id) region = str(ctx.guild.region) memberCount = str(ctx.guild.member_count) icon = str(ctx.guild.icon_url) total_text_channels = len(guild.text_channels) total_voice_channels = len(guild.voice_channels) total_channels = total_text_channels + total_voice_channels role_count = len(ctx.guild.roles) list_of_bots = [bot.mention for bot in ctx.guild.members if bot.bot] staff_roles = [ "Owner", "Head Dev", "Dev", "Head Admin", "Admins", "Moderators", "Community Helpers", "Members" ] embed2 = discord.Embed( timestamp=ctx.message.created_at, color=ctx.author.color) embed2.add_field(name='Name', value=f"{ctx.guild.name}", inline=True) embed2.add_field(name='Owner', value=f"{ctx.guild.owner}", inline=True) embed2.add_field( name='Verification Level', value=str(ctx.guild.verification_level), inline=True) embed2.add_field( name='Highest role', value=ctx.guild.roles[-2], inline=True) embed2.add_field(name='Contributers:', value="None") embed2.add_field(name="Server ID", value=id, inline=True) embed2.add_field(name="Region", value=region, inline=True) embed2.add_field( name="Server Channels: ", value=total_channels, inline=True) embed2.add_field( name="Server Text Channels: ", value=total_text_channels, inline=True) embed2.add_field( name="Server Voice Channels: ", value=total_voice_channels, inline=True) for r in staff_roles: role = discord.utils.get(ctx.guild.roles, name=r) if role: members = '\n'.join([member.name for member in role.members]) or "None" embed2.add_field(name=role.name, value=members) embed2.add_field( name='Number of roles', value=str(role_count), inline=True) embed2.add_field( name='Number Of Members', value=ctx.guild.member_count, inline=True) embed2.add_field(name='Bots:', value=(', '.join(list_of_bots))) embed2.add_field( name='Created At', value=ctx.guild.created_at.__format__('%A, %d. %B %Y @ %H:%M:%S'), inline=True) embed2.set_thumbnail(url=ctx.guild.icon_url) embed2.set_author(name=f"{ctx.guild.name} Information") await ctx.send(embed=embed2) @client.command(aliases=['lvl', 'rank']) @cooldown(1, 5, BucketType.member) async def level(ctx, member: discord.Member = None): if member == None: member = ctx.author with open('./Data/levels.json', 'r', encoding='utf8') as f: user = json.load(f) guild = user[str(ctx.guild.id)] level = user[str(ctx.guild.id)][str(member.id)]['level'] ranks = [] rank = 1 lvl = user[str(ctx.guild.id)][str(member.id)]['level'] + 1 exps=[60] if lvl == 696969696969696969696970: return while lvl > len(exps): exps.append(int(exps[-1] + exps[-1]/10)) if lvl == len(exps): break lvl_end = exps[-1] for i in guild: if i.isdigit(): if i == "774136203548557333": pass else: l = guild[str(i)]['level'] ranks.append(l) ranks = sorted(ranks, reverse=True) for q in ranks: if q == level: break rank += 1 if str(member.id) in guild: lvl = user[str(ctx.guild.id)][str(member.id)]['level'] exp = user[str(ctx.guild.id)][str(member.id)]['exp'] else: lvl = 0 exp = 0 lvl_end = 0 if lvl == 0: rank = 0 avatar = member.avatar_url_as(size=128) Level = lvl exp = exp exp_limit = lvl_end rank = rank name = member ## Template = Image.open("./Images/Rank_card_template/Template.png") Full_bar = Image.open("./Images/Rank_card_template/Full_bar.png") R_bar = Image.open("./Images/Rank_card_template/R_bar.png") mask = Image.open("./Images/Rank_card_template/mask.png") font1 = ImageFont.truetype("./Fonts/Prototype.ttf", 30) font2 = ImageFont.truetype("./Fonts/Prototype.ttf", 35) draw = ImageDraw.Draw(Template) #Avatar Icon avatar_data = BytesIO(await avatar.read()) pfp = Image.open(avatar_data) pfp.thumbnail((100,100)) try: Template.paste(pfp, (10,10) , pfp) except: Template.paste(pfp, (10,10)) Template.paste(mask, (10,10), mask) #Name Name_text = unidecode(str(name)) draw.text((120,30),Name_text,(225,225,225),font=font2) #Level Level_text = "Level:" + str(Level) draw.text((120,80),Level_text,(225,225,225),font=font1) #Exp Exp_text = "Exp:" + str(round(exp/100 , 1)) + "K/" + str(round(exp_limit/100 , 1)) + "K" draw.text((320,80),Exp_text,(225,225,225),font=font1) #Rank Rank_card_templateext = "Rank:" + str(rank) draw.text((520,80),Rank_card_templateext,(225,225,225),font=font1) #Exp Progress Bar exp_per = int((exp/exp_limit)*100) R_bar_position = 28 if exp_per != 0 : bar_width = int(638*(exp_per/100)) R_bar_position += bar_width Full_bar = ImageOps.fit(Full_bar, (bar_width,35)) Template.paste(Full_bar, (28, 142), Full_bar) Template.paste(R_bar, (R_bar_position, 142), R_bar) Template.save( f"./Images/Output/{ctx.author.id}.png", "png" ) await ctx.send(file = discord.File(f"./Images/Output/{ctx.author.id}.png")) @level.error async def level_error(ctx, error): if isinstance(error, CommandOnCooldown): msg = 'please try again in **`{:.2f} s`**'.format(error.retry_after) await ctx.send( f'{ctx.author.mention} Please don\'t spam the chat, {msg}') else: ctx.command.reset_cooldown(ctx) raise error keep_alive.keep_alive() client.run(os.getenv("TOKEN"))
35.287646
958
0.633376
314eebdf65caa8e224f4c6b1da964410e7b905e7
371
py
Python
contacts_searcher/domain/contact_repository.py
sabkaryan/contacts-searcher
b9a1682d72ac517a970a78e7fd960ced156a0adb
[ "MIT" ]
null
null
null
contacts_searcher/domain/contact_repository.py
sabkaryan/contacts-searcher
b9a1682d72ac517a970a78e7fd960ced156a0adb
[ "MIT" ]
null
null
null
contacts_searcher/domain/contact_repository.py
sabkaryan/contacts-searcher
b9a1682d72ac517a970a78e7fd960ced156a0adb
[ "MIT" ]
null
null
null
from typing import List from contacts_searcher.domain.contact import Contact class ContactRepository: def __init__(self, datasource): self.collection = datasource.get_collection('contacts') def write_all(self, contacts: List[Contact]): contacts_dict = [vars(contact) for contact in contacts] self.collection.insert_many(contacts_dict)
28.538462
63
0.746631
5b24c5147c2abf137999514b97fd4957ea841269
9,243
py
Python
src/m2e_iterating.py
Freedom4501/11-Sequences
4762ced7538de7c115503b99827d7c55423013c3
[ "MIT" ]
null
null
null
src/m2e_iterating.py
Freedom4501/11-Sequences
4762ced7538de7c115503b99827d7c55423013c3
[ "MIT" ]
null
null
null
src/m2e_iterating.py
Freedom4501/11-Sequences
4762ced7538de7c115503b99827d7c55423013c3
[ "MIT" ]
null
null
null
""" This module shows how to ITERATE (i.e. loop) through a SEQUENCE: -- list -- string -- tuple It shows two ways to do so: -- using RANGE -- using just IN (no RANGE) Authors: David Mutchler, Valerie Galluzzi, Mark Hays, Amanda Stouder, and their colleagues. """ import rosegraphics as rg def main(): """ Calls the TEST functions in this module. """ run_test_sum_abs_of_all() run_test_sum_abs_of_all_without_range() run_test_fill_from_colors() run_test_print_letters() # ---------------------------------------------------------------------- # The TEST functions are further down in the file, # so that you can focus on the following examples. # ---------------------------------------------------------------------- def sum_abs_of_all(sequence): """ What comes in: -- A sequence of numbers. What goes out: Returns the sum of the absolute values of the numbers. Side effects: None. Examples: sum_all([5, -1, 10, 4, -33]) would return 5 + 1 + 10 + 4 + 33, which is 53 sum_all([10, -30, -20]) would return 10 + 30 + 20, which is 60 Type hints: :type sequence: list or tuple (of numbers) """ # ------------------------------------------------------------------ # EXAMPLE 1. Iterates through a sequence of numbers, summing them. # ------------------------------------------------------------------ total = 0 for k in range(len(sequence)): total = total + abs(sequence[k]) return total def sum_abs_of_all_without_range(sequence): """ Same specification as sum_abs_of_all above, but with a different implementation. """ # ------------------------------------------------------------------ # EXAMPLE 2. Iterates through a sequence of numbers, summing them. # Same as Example 1 above, but uses the "no range" form. # ------------------------------------------------------------------ total = 0 for number in sequence: total = total + abs(number) return total # ------------------------------------------------------------------ # The above example shows how you can iterate through a sequence # WITHOUT using a RANGE expression. This works ONLY # ** IF you do NOT need the index variable. ** # # You can ALWAYS use the form in Example 1 that uses RANGE; # this form in Example 2 is just "syntactic sugar." # Use this form if you like, but: # -- Don't let it keep you from understanding the critical # concept of an INDEX. # -- Be aware of the limitation of this form. # -- Don't confuse the two forms! # ------------------------------------------------------------------ def fill_from_colors(window, graphics_object, colors): """ What comes in: -- An rg.RoseWindow -- A rosegraphics object that can be attached to a RoseWindow and has a fill color (e.g. an rg.Circle or rg.Rectangle) -- A sequence of rosegraphics colors. What goes out: Nothing (i.e., None). Side effects: -- Attaches the given graphics object to the given RoseWindow. -- Then iterates through the given sequence of colors, using those colors to set the given graphics object's fill color. -- At each iteration, renders the window with a brief pause after doing so, to create a "flashing" display. Type hints: :type window: rg.RoseWindow :type graphics_object: rg._Shape :type colors: list or tuple str """ # ------------------------------------------------------------------ # EXAMPLE 3. Iterates through a sequence of colors. # ------------------------------------------------------------------ graphics_object.attach_to(window) for k in range(len(colors)): graphics_object.fill_color = colors[k] window.render() def print_letters(string): """ Prints the characters in the given string, one character per line. """ # ------------------------------------------------------------------ # EXAMPLE 4. Iterates through a STRING. # ------------------------------------------------------------------ for k in range(len(string)): print(string[k]) # ---------------------------------------------------------------------- # Just TEST functions below here. # ---------------------------------------------------------------------- def run_test_sum_abs_of_all(): """ Tests the sum_abs_of_all function. """ print() print('--------------------------------------------------') print('Testing the sum_abs_of_all function:') print('--------------------------------------------------') total1 = sum_abs_of_all([8, 13, 7, 5]) print('Returned, expected:', total1, 33) total2 = sum_abs_of_all([10, -30, -20]) print('Returned, expected:', total2, 60) total3 = sum_abs_of_all([]) print('Returned, expected:', total3, 0) def run_test_sum_abs_of_all_without_range(): """ Tests the sum_abs_of_all_without_range function. """ print() print('--------------------------------------------------') print('Testing the sum_abs_of_all_without_range function:') print('--------------------------------------------------') total1 = sum_abs_of_all_without_range([8, 13, 7, 5]) print('Returned, expected:', total1, 33) total2 = sum_abs_of_all_without_range([10, -30, -20]) print('Returned, expected:', total2, 60) total3 = sum_abs_of_all_without_range([]) print('Returned, expected:', total3, 0) def run_test_fill_from_colors(): """ Tests the fill_from_colors function. """ print('--------------------------------------------------') print('Testing the fill_from_colors function:') print('See the two graphics windows that pop up.') print('--------------------------------------------------') # ------------------------------------------------------------------ # Test 1: Flashes red, white, blue -- 5 times. # ------------------------------------------------------------------ title = 'Red, white and blue, repeated 5 times!' window = rg.RoseWindow(400, 180, title, canvas_color='dark gray') circle = rg.Circle(rg.Point(150, 100), 40) circle.attach_to(window.initial_canvas) number_of_cycles = 5 window.continue_on_mouse_click('Click anywhere in here to start') for _ in range(number_of_cycles): fill_from_colors(window, circle, ['red', 'white', 'blue']) window.close_on_mouse_click() # ------------------------------------------------------------------ # Test 2: Flashes through a bunch of colors, looping through the # list forwards in a rectangle, then backwards in an ellipse. # ------------------------------------------------------------------ colors = ['red', 'white', 'blue', 'chartreuse', 'chocolate', 'DodgerBlue', 'LightPink', 'maroon', 'yellow', 'green', 'SteelBlue', 'black'] title = 'Loop through 12 colors, forwards then backwards!' window = rg.RoseWindow(450, 250, title, canvas_color='yellow') rect_width = 100 rect_height = 40 rect_center = rg.Point(125, 100) rectangle = rg.Rectangle(rg.Point(rect_center.x - (rect_width / 2), rect_center.y - (rect_height / 2)), rg.Point(rect_center.x + (rect_width / 2), rect_center.y + (rect_height / 2))) oval_width = 70 oval_height = 160 oval_center = rg.Point(300, 100) ellipse = rg.Ellipse(rg.Point(oval_center.x - (oval_width / 2), oval_center.y - (oval_height / 2)), rg.Point(oval_center.x + (oval_width / 2), oval_center.y + (oval_height / 2))) rectangle.attach_to(window) ellipse.attach_to(window) window.render() window.continue_on_mouse_click('Click anywhere in here to start') # This function call iterates through the colors, # filling the rectangle with those colors: fill_from_colors(window, rectangle, colors) # The reverse method reverses its list IN PLACE # (i.e., it "mutates" its list -- more on that in future sessions). colors.reverse() window.continue_on_mouse_click() # This function call iterates through the colors, # filling the ellipse (oval) with those colors: fill_from_colors(window, ellipse, colors) window.close_on_mouse_click() def run_test_print_letters(): """ Tests the print_letters function. """ print() print('--------------------------------------------------') print('Testing the print_letters function:') print('--------------------------------------------------') print() print('Test 1: Print the letters in "Eric Clapton"') print_letters('Eric Clapton') print() print('Test 2: Print the letters in "Layla"') print_letters('Layla') # ---------------------------------------------------------------------- # Calls main to start the ball rolling. # ---------------------------------------------------------------------- main()
35.964981
73
0.509358
055e131954dc75e227fb42d97bc3312ab8c7b53a
8,212
py
Python
readthedocs/api/v2/utils.py
mehrdad-khojastefar/readthedocs.org
b958bb8d04c454324d612345890b13af54a19eb6
[ "MIT" ]
2,092
2019-06-29T07:47:30.000Z
2022-03-31T14:54:59.000Z
readthedocs/api/v2/utils.py
mehrdad-khojastefar/readthedocs.org
b958bb8d04c454324d612345890b13af54a19eb6
[ "MIT" ]
2,389
2019-06-29T04:22:55.000Z
2022-03-31T22:57:49.000Z
readthedocs/api/v2/utils.py
mehrdad-khojastefar/readthedocs.org
b958bb8d04c454324d612345890b13af54a19eb6
[ "MIT" ]
1,185
2019-06-29T21:49:31.000Z
2022-03-30T09:57:15.000Z
"""Utility functions that are used by both views and celery tasks.""" import itertools import structlog from rest_framework.pagination import PageNumberPagination from readthedocs.builds.constants import ( BRANCH, INTERNAL, LATEST, LATEST_VERBOSE_NAME, NON_REPOSITORY_VERSIONS, STABLE, STABLE_VERBOSE_NAME, TAG, ) from readthedocs.builds.models import RegexAutomationRule, Version log = structlog.get_logger(__name__) def sync_versions_to_db(project, versions, type): # pylint: disable=redefined-builtin """ Update the database with the current versions from the repository. - check if user has a ``stable`` / ``latest`` version and disable ours - update old versions with newer configs (identifier, type, machine) - create new versions that do not exist on DB (in bulk) - it does not delete versions :param project: project to update versions :param versions: list of VCSVersion fetched from the repository :param type: internal or external version :returns: set of versions' slug added """ old_version_values = project.versions.filter(type=type).values_list( 'verbose_name', 'identifier', ) old_versions = dict(old_version_values) # Add new versions versions_to_create = [] added = set() has_user_stable = False has_user_latest = False for version in versions: version_id = version['identifier'] version_name = version['verbose_name'] if version_name == STABLE_VERBOSE_NAME: has_user_stable = True created_version, created = _set_or_create_version( project=project, slug=STABLE, version_id=version_id, verbose_name=version_name, type_=type, ) if created: added.add(created_version.slug) elif version_name == LATEST_VERBOSE_NAME: has_user_latest = True created_version, created = _set_or_create_version( project=project, slug=LATEST, version_id=version_id, verbose_name=version_name, type_=type, ) if created: added.add(created_version.slug) elif version_name in old_versions: if version_id == old_versions[version_name]: # Version is correct continue # Update slug with new identifier Version.objects.filter( project=project, verbose_name=version_name, ).update( identifier=version_id, type=type, machine=False, ) log.info( 'Re-syncing versions: version updated.', version_verbose_name=version_name, version_id=version_id, ) else: # New Version versions_to_create.append((version_id, version_name)) added.update(_create_versions(project, type, versions_to_create)) if not has_user_stable: stable_version = ( project.versions.filter(slug=STABLE, type=type).first() ) if stable_version: # Put back the RTD's stable version stable_version.machine = True stable_version.save() if not has_user_latest: latest_version = ( project.versions.filter(slug=LATEST, type=type).first() ) if latest_version: # Put back the RTD's latest version latest_version.machine = True latest_version.identifier = project.get_default_branch() latest_version.verbose_name = LATEST_VERBOSE_NAME latest_version.save() if added: log.info( 'Re-syncing versions: versions added.', count=len(added), versions=','.join(itertools.islice(added, 100)), ) return added def _create_versions(project, type, versions): """ Create versions (tuple of version_id and version_name). Returns the slug of all added versions. .. note:: ``Version.slug`` relies on the post_save signal, so we can't use bulk_create. """ versions_objs = ( Version( project=project, type=type, identifier=version_id, verbose_name=version_name, ) for version_id, version_name in versions ) added = set() for version in versions_objs: version.save() added.add(version.slug) return added def _set_or_create_version(project, slug, version_id, verbose_name, type_): """Search or create a version and set its machine attribute to false.""" version = (project.versions.filter(slug=slug).first()) if version: version.identifier = version_id version.machine = False version.type = type_ version.save() else: created_version = Version.objects.create( project=project, type=type_, identifier=version_id, verbose_name=verbose_name, ) return created_version, True return version, False def _get_deleted_versions_qs(project, tags_data, branches_data): # We use verbose_name for tags # because several tags can point to the same identifier. versions_tags = [ version['verbose_name'] for version in tags_data ] versions_branches = [ version['identifier'] for version in branches_data ] to_delete_qs = ( project.versions(manager=INTERNAL) .exclude(uploaded=True) .exclude(slug__in=NON_REPOSITORY_VERSIONS) ) to_delete_qs = to_delete_qs.exclude( type=TAG, verbose_name__in=versions_tags, ) to_delete_qs = to_delete_qs.exclude( type=BRANCH, identifier__in=versions_branches, ) return to_delete_qs def delete_versions_from_db(project, tags_data, branches_data): """ Delete all versions not in the current repo. :returns: The slug of the deleted versions from the database. """ to_delete_qs = ( _get_deleted_versions_qs( project=project, tags_data=tags_data, branches_data=branches_data, ) .exclude(active=True) ) _, deleted = to_delete_qs.delete() versions_count = deleted.get('builds.Version', 0) log.info( 'Re-syncing versions: versions deleted.', project_slug=project.slug, count=versions_count, ) def get_deleted_active_versions(project, tags_data, branches_data): """Return the slug of active versions that were deleted from the repository.""" to_delete_qs = ( _get_deleted_versions_qs( project=project, tags_data=tags_data, branches_data=branches_data, ) .filter(active=True) ) return set(to_delete_qs.values_list('slug', flat=True)) def run_automation_rules(project, added_versions, deleted_active_versions): """ Runs the automation rules on each version. The rules are sorted by priority. :param added_versions: Slugs of versions that were added. :param deleted_active_versions: Slugs of active versions that were deleted from the repository. .. note:: Currently the versions aren't sorted in any way, the same order is keeped. """ class_ = RegexAutomationRule actions = [ (added_versions, class_.allowed_actions_on_create), (deleted_active_versions, class_.allowed_actions_on_delete), ] for versions_slug, allowed_actions in actions: versions = project.versions.filter(slug__in=versions_slug) rules = project.automation_rules.filter(action__in=allowed_actions) for version, rule in itertools.product(versions, rules): rule.run(version) class RemoteOrganizationPagination(PageNumberPagination): page_size = 25 class RemoteProjectPagination(PageNumberPagination): page_size = 15 class ProjectPagination(PageNumberPagination): page_size = 100 max_page_size = 1000
30.191176
99
0.63529
e6d68efc79f1dabb5ee889082f9089f49a143604
5,195
py
Python
python2/koans/about_class_attributes.py
rameshugar/koans
35f2407dac045040bfd54ebe9f95ce77fd8a1b23
[ "MIT" ]
null
null
null
python2/koans/about_class_attributes.py
rameshugar/koans
35f2407dac045040bfd54ebe9f95ce77fd8a1b23
[ "MIT" ]
null
null
null
python2/koans/about_class_attributes.py
rameshugar/koans
35f2407dac045040bfd54ebe9f95ce77fd8a1b23
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Based on AboutClassMethods in the Ruby Koans # from runner.koan import * class AboutClassAttributes(Koan): class Dog(object): pass def test_new_style_class_objects_are_objects(self): # Note: Old style class instances are not objects but they are being # phased out in Python 3. fido = self.Dog() self.assertEqual(True, isinstance(fido, object)) def test_classes_are_types(self): self.assertEqual(True, self.Dog.__class__ == type) def test_classes_are_objects_too(self): self.assertEqual(True, issubclass(self.Dog, object)) def test_objects_have_methods(self): fido = self.Dog() self.assertEqual(18, len(dir(fido))) def test_classes_have_methods(self): self.assertEqual(18, len(dir(self.Dog))) def test_creating_objects_without_defining_a_class(self): singularity = object() self.assertEqual(15, len(dir(singularity))) def test_defining_attributes_on_individual_objects(self): fido = self.Dog() fido.legs = 4 self.assertEqual(4, fido.legs) def test_defining_functions_on_individual_objects(self): fido = self.Dog() fido.wag = lambda: 'fidos wag' self.assertEqual("fidos wag", fido.wag()) def test_other_objects_are_not_affected_by_these_singleton_functions(self): fido = self.Dog() rover = self.Dog() def wag(): return 'fidos wag' fido.wag = wag try: rover.wag() except Exception as ex: self.assertMatch("'Dog' object has no attribute 'wag'", ex[0]) # ------------------------------------------------------------------ class Dog2(object): def wag(self): return 'instance wag' def bark(self): return "instance bark" def growl(self): return "instance growl" @staticmethod def bark(): return "staticmethod bark, arg: None" @classmethod def growl(cls): return "classmethod growl, arg: cls=" + cls.__name__ def test_like_all_objects_classes_can_have_singleton_methods(self): self.assertMatch("classmethod growl, arg: cls=Dog2", self.Dog2.growl()) def test_classmethods_are_not_independent_of_instance_methods(self): fido = self.Dog2() self.assertMatch("classmethod growl, arg: cls=Dog2", fido.growl()) self.assertMatch("classmethod growl, arg: cls=Dog2", self.Dog2.growl()) def test_staticmethods_are_unbound_functions_housed_in_a_class(self): self.assertMatch("staticmethod bark, arg: None", self.Dog2.bark()) def test_staticmethods_also_overshadow_instance_methods(self): fido = self.Dog2() self.assertMatch("staticmethod bark, arg: None", fido.bark()) # ------------------------------------------------------------------ class Dog3(object): def __init__(self): self._name = None def get_name_from_instance(self): return self._name def set_name_from_instance(self, name): self._name = name @classmethod def get_name(cls): return cls._name @classmethod def set_name(cls, name): cls._name = name name = property(get_name, set_name) name_from_instance = property( get_name_from_instance, set_name_from_instance) def test_classmethods_can_not_be_used_as_properties(self): fido = self.Dog3() try: fido.name = "Fido" except Exception as ex: self.assertMatch("'classmethod' object is not callable", ex[0]) def test_classes_and_instances_do_not_share_instance_attributes(self): fido = self.Dog3() fido.set_name_from_instance("Fido") fido.set_name("Rover") self.assertEqual("Fido", fido.get_name_from_instance()) self.assertEqual("Rover", self.Dog3.get_name()) def test_classes_and_instances_do_share_class_attributes(self): fido = self.Dog3() fido.set_name("Fido") self.assertEqual("Fido", fido.get_name()) self.assertEqual("Fido", self.Dog3.get_name()) # ------------------------------------------------------------------ class Dog4(object): def a_class_method(cls): return 'dogs class method' def a_static_method(): return 'dogs static method' a_class_method = classmethod(a_class_method) a_static_method = staticmethod(a_static_method) def test_you_can_define_class_methods_without_using_a_decorator(self): self.assertEqual("dogs class method", self.Dog4.a_class_method()) def test_you_can_define_static_methods_without_using_a_decorator(self): self.assertEqual("dogs static method", self.Dog4.a_static_method()) # ------------------------------------------------------------------ def test_you_can_explicitly_call_class_methods_from_instance_methods(self): fido = self.Dog4() self.assertEqual("dogs class method", fido.__class__.a_class_method())
31.295181
79
0.616362
2587b451e7fccef4725bc0f4eac39826fa783d97
869
py
Python
qiskit/test/mock/backends/valencia/fake_valencia.py
ajavadia/qiskit-sdk-py
a59e8e6be1793197e19998c1f7dcfc45e6f2f3af
[ "Apache-2.0" ]
11
2019-06-27T09:53:29.000Z
2021-03-02T04:40:30.000Z
qiskit/test/mock/backends/valencia/fake_valencia.py
ajavadia/qiskit-sdk-py
a59e8e6be1793197e19998c1f7dcfc45e6f2f3af
[ "Apache-2.0" ]
24
2021-01-27T08:20:27.000Z
2021-07-06T09:42:28.000Z
qiskit/test/mock/backends/valencia/fake_valencia.py
ajavadia/qiskit-sdk-py
a59e8e6be1793197e19998c1f7dcfc45e6f2f3af
[ "Apache-2.0" ]
4
2019-08-05T15:35:33.000Z
2020-09-18T18:55:02.000Z
# This code is part of Qiskit. # # (C) Copyright IBM 2019. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ Fake Valencia device (5 qubit). """ import os from qiskit.test.mock.fake_pulse_backend import FakePulseBackend class FakeValencia(FakePulseBackend): """A fake 5 qubit backend.""" dirname = os.path.dirname(__file__) conf_filename = "conf_valencia.json" props_filename = "props_valencia.json" defs_filename = "defs_valencia.json" backend_name = "fake_valencia"
29.965517
77
0.745685
cd51d8958e972b16f5e67b96a52f74c87b5654ae
17,898
py
Python
tests/web/classes/test_objects.py
priya1puresoftware/python-slack-sdk
3503182feaaf4d41b57fd8bf10038ebc99f1f3c7
[ "MIT" ]
2,486
2016-11-03T14:31:43.000Z
2020-10-26T23:07:44.000Z
tests/web/classes/test_objects.py
priya1puresoftware/python-slack-sdk
3503182feaaf4d41b57fd8bf10038ebc99f1f3c7
[ "MIT" ]
721
2016-11-03T21:26:56.000Z
2020-10-26T12:41:29.000Z
tests/web/classes/test_objects.py
priya1puresoftware/python-slack-sdk
3503182feaaf4d41b57fd8bf10038ebc99f1f3c7
[ "MIT" ]
627
2016-11-02T19:04:19.000Z
2020-10-25T19:21:13.000Z
import copy import unittest from typing import Optional, List, Union from slack.errors import SlackObjectFormationError from slack.web.classes import JsonObject, JsonValidator from slack.web.classes.objects import ( ChannelLink, ConfirmObject, DateLink, EveryoneLink, HereLink, Link, MarkdownTextObject, ObjectLink, Option, OptionGroup, PlainTextObject, ) from . import STRING_301_CHARS, STRING_51_CHARS class SimpleJsonObject(JsonObject): attributes = {"some", "test", "keys"} def __init__(self): self.some = "this is" self.test = "a test" self.keys = "object" @JsonValidator("some validation message") def test_valid(self): return len(self.test) <= 10 @JsonValidator("this should never fail") def always_valid_test(self): return True class KeyValueObject(JsonObject): attributes = {"name", "value"} def __init__( self, *, name: Optional[str] = None, value: Optional[str] = None, ): self.name = name self.value = value class NestedObject(JsonObject): attributes = {"initial", "options"} def __init__( self, *, initial: Union[dict, KeyValueObject], options: List[Union[dict, KeyValueObject]], ): self.initial = ( KeyValueObject(**initial) if isinstance(initial, dict) else initial ) self.options = [ KeyValueObject(**o) if isinstance(o, dict) else o for o in options ] class JsonObjectTests(unittest.TestCase): def setUp(self) -> None: self.good_test_object = SimpleJsonObject() obj = SimpleJsonObject() obj.test = STRING_51_CHARS self.bad_test_object = obj def test_json_formation(self): self.assertDictEqual( self.good_test_object.to_dict(), {"some": "this is", "test": "a test", "keys": "object"}, ) def test_validate_json_fails(self): with self.assertRaises(SlackObjectFormationError): self.bad_test_object.validate_json() def test_to_dict_performs_validation(self): with self.assertRaises(SlackObjectFormationError): self.bad_test_object.to_dict() def test_get_non_null_attributes(self): expected = {"name": "something"} obj = KeyValueObject(name="something", value=None) obj2 = copy.deepcopy(obj) self.assertDictEqual(expected, obj.get_non_null_attributes()) self.assertEqual(str(obj2), str(obj)) def test_get_non_null_attributes_nested(self): expected = { "initial": {"name": "something"}, "options": [ {"name": "something"}, {"name": "message", "value": "That's great!"}, ], } obj1 = KeyValueObject(name="something", value=None) obj2 = KeyValueObject(name="message", value="That's great!") options = [obj1, obj2] nested = NestedObject(initial=obj1, options=options) self.assertEqual(type(obj1), KeyValueObject) self.assertTrue(hasattr(obj1, "value")) self.assertEqual(type(nested.initial), KeyValueObject) self.assertEqual(type(options[0]), KeyValueObject) self.assertTrue(hasattr(options[0], "value")) self.assertEqual(type(nested.options[0]), KeyValueObject) self.assertTrue(hasattr(nested.options[0], "value")) dict_value = nested.get_non_null_attributes() self.assertDictEqual(expected, dict_value) self.assertEqual(type(obj1), KeyValueObject) self.assertTrue(hasattr(obj1, "value")) self.assertEqual(type(nested.initial), KeyValueObject) self.assertEqual(type(options[0]), KeyValueObject) self.assertTrue(hasattr(options[0], "value")) self.assertEqual(type(nested.options[0]), KeyValueObject) self.assertTrue(hasattr(nested.options[0], "value")) def test_get_non_null_attributes_nested_2(self): expected = { "initial": {"name": "something"}, "options": [ {"name": "something"}, {"name": "message", "value": "That's great!"}, ], } nested = NestedObject( initial={"name": "something"}, options=[ {"name": "something"}, {"name": "message", "value": "That's great!"}, ], ) self.assertDictEqual(expected, nested.get_non_null_attributes()) class JsonValidatorTests(unittest.TestCase): def setUp(self) -> None: self.validator_instance = JsonValidator("message") self.class_instance = SimpleJsonObject() def test_isolated_class(self): def does_nothing(): return False wrapped = self.validator_instance(does_nothing) # noinspection PyUnresolvedReferences self.assertTrue(wrapped.validator) def test_wrapped_class(self): for attribute in dir(self.class_instance): attr = getattr(self.class_instance, attribute, None) if attribute in ("test_valid", "always_valid_test"): self.assertTrue(attr.validator) else: with self.assertRaises(AttributeError): # noinspection PyStatementEffect attr.validator class LinkTests(unittest.TestCase): def test_without_text(self): link = Link(url="http://google.com", text="") self.assertEqual(f"{link}", "<http://google.com>") def test_with_text(self): link = Link(url="http://google.com", text="google") self.assertEqual(f"{link}", "<http://google.com|google>") class DateLinkTests(unittest.TestCase): def setUp(self) -> None: self.epoch = 1234567890 def test_simple_formation(self): datelink = DateLink( date=self.epoch, date_format="{date_long}", fallback=f"{self.epoch}" ) self.assertEqual( f"{datelink}", f"<!date^{self.epoch}^{{date_long}}|{self.epoch}>" ) def test_with_url(self): datelink = DateLink( date=self.epoch, date_format="{date_long}", link="http://google.com", fallback=f"{self.epoch}", ) self.assertEqual( f"{datelink}", f"<!date^{self.epoch}^{{date_long}}^http://google.com|{self.epoch}>", ) class ObjectLinkTests(unittest.TestCase): def test_channel(self): objlink = ObjectLink(object_id="C12345") self.assertEqual(f"{objlink}", "<#C12345>") def test_group_message(self): objlink = ObjectLink(object_id="G12345") self.assertEqual(f"{objlink}", "<#G12345>") def test_subteam_message(self): objlink = ObjectLink(object_id="S12345") self.assertEqual(f"{objlink}", "<!subteam^S12345>") def test_with_label(self): objlink = ObjectLink(object_id="C12345", text="abc") self.assertEqual(f"{objlink}", "<#C12345|abc>") def test_unknown_prefix(self): objlink = ObjectLink(object_id="Z12345") self.assertEqual(f"{objlink}", "<@Z12345>") class SpecialLinkTests(unittest.TestCase): def test_channel_link(self): self.assertEqual(f"{ChannelLink()}", "<!channel|channel>") def test_here_link(self): self.assertEqual(f"{HereLink()}", "<!here|here>") def test_everyone_link(self): self.assertEqual(f"{EveryoneLink()}", "<!everyone|everyone>") class PlainTextObjectTests(unittest.TestCase): def test_basic_json(self): self.assertDictEqual( {"text": "some text", "type": "plain_text"}, PlainTextObject(text="some text").to_dict(), ) self.assertDictEqual( {"text": "some text", "emoji": False, "type": "plain_text"}, PlainTextObject(text="some text", emoji=False).to_dict(), ) def test_from_string(self): plaintext = PlainTextObject(text="some text", emoji=True) self.assertDictEqual( plaintext.to_dict(), PlainTextObject.direct_from_string("some text") ) class MarkdownTextObjectTests(unittest.TestCase): def test_basic_json(self): self.assertDictEqual( {"text": "some text", "type": "mrkdwn"}, MarkdownTextObject(text="some text").to_dict(), ) self.assertDictEqual( {"text": "some text", "verbatim": True, "type": "mrkdwn"}, MarkdownTextObject(text="some text", verbatim=True).to_dict(), ) def test_from_string(self): markdown = MarkdownTextObject(text="some text") self.assertDictEqual( markdown.to_dict(), MarkdownTextObject.direct_from_string("some text") ) class ConfirmObjectTests(unittest.TestCase): def test_basic_json(self): expected = { "confirm": {"emoji": True, "text": "Yes", "type": "plain_text"}, "deny": {"emoji": True, "text": "No", "type": "plain_text"}, "text": {"text": "are you sure?", "type": "mrkdwn"}, "title": {"emoji": True, "text": "some title", "type": "plain_text"}, } simple_object = ConfirmObject(title="some title", text="are you sure?") self.assertDictEqual(expected, simple_object.to_dict()) self.assertDictEqual(expected, simple_object.to_dict("block")) self.assertDictEqual( { "text": "are you sure?", "title": "some title", "ok_text": "Okay", "dismiss_text": "Cancel", }, simple_object.to_dict("action"), ) def test_confirm_overrides(self): confirm = ConfirmObject( title="some title", text="are you sure?", confirm="I'm really sure", deny="Nevermind", ) expected = { "confirm": {"text": "I'm really sure", "type": "plain_text", "emoji": True}, "deny": {"text": "Nevermind", "type": "plain_text", "emoji": True}, "text": {"text": "are you sure?", "type": "mrkdwn"}, "title": {"text": "some title", "type": "plain_text", "emoji": True}, } self.assertDictEqual(expected, confirm.to_dict()) self.assertDictEqual(expected, confirm.to_dict("block")) self.assertDictEqual( { "text": "are you sure?", "title": "some title", "ok_text": "I'm really sure", "dismiss_text": "Nevermind", }, confirm.to_dict("action"), ) def test_passing_text_objects(self): direct_construction = ConfirmObject(title="title", text="Are you sure?") mrkdwn = MarkdownTextObject(text="Are you sure?") preconstructed = ConfirmObject(title="title", text=mrkdwn) self.assertDictEqual(direct_construction.to_dict(), preconstructed.to_dict()) plaintext = PlainTextObject(text="Are you sure?", emoji=False) passed_plaintext = ConfirmObject(title="title", text=plaintext) self.assertDictEqual( { "confirm": {"emoji": True, "text": "Yes", "type": "plain_text"}, "deny": {"emoji": True, "text": "No", "type": "plain_text"}, "text": {"emoji": False, "text": "Are you sure?", "type": "plain_text"}, "title": {"emoji": True, "text": "title", "type": "plain_text"}, }, passed_plaintext.to_dict(), ) def test_title_length(self): with self.assertRaises(SlackObjectFormationError): ConfirmObject(title=STRING_301_CHARS, text="Are you sure?").to_dict() def test_text_length(self): with self.assertRaises(SlackObjectFormationError): ConfirmObject(title="title", text=STRING_301_CHARS).to_dict() def test_text_length_with_object(self): with self.assertRaises(SlackObjectFormationError): plaintext = PlainTextObject(text=STRING_301_CHARS) ConfirmObject(title="title", text=plaintext).to_dict() with self.assertRaises(SlackObjectFormationError): markdown = MarkdownTextObject(text=STRING_301_CHARS) ConfirmObject(title="title", text=markdown).to_dict() def test_confirm_length(self): with self.assertRaises(SlackObjectFormationError): ConfirmObject( title="title", text="Are you sure?", confirm=STRING_51_CHARS ).to_dict() def test_deny_length(self): with self.assertRaises(SlackObjectFormationError): ConfirmObject( title="title", text="Are you sure?", deny=STRING_51_CHARS ).to_dict() class OptionTests(unittest.TestCase): def setUp(self) -> None: self.common = Option(label="an option", value="option_1") def test_block_style_json(self): expected = { "text": {"type": "plain_text", "text": "an option", "emoji": True}, "value": "option_1", } self.assertDictEqual(expected, self.common.to_dict("block")) self.assertDictEqual(expected, self.common.to_dict()) def test_dialog_style_json(self): expected = {"label": "an option", "value": "option_1"} self.assertDictEqual(expected, self.common.to_dict("dialog")) def test_action_style_json(self): expected = {"text": "an option", "value": "option_1"} self.assertDictEqual(expected, self.common.to_dict("action")) def test_from_single_value(self): option = Option(label="option_1", value="option_1") self.assertDictEqual( option.to_dict("text"), option.from_single_value("option_1").to_dict("text"), ) def test_label_length(self): with self.assertRaises(SlackObjectFormationError): Option(label=STRING_301_CHARS, value="option_1").to_dict("text") def test_value_length(self): with self.assertRaises(SlackObjectFormationError): Option(label="option_1", value=STRING_301_CHARS).to_dict("text") class OptionGroupTests(unittest.TestCase): maxDiff = None def setUp(self) -> None: self.common_options = [ Option.from_single_value("one"), Option.from_single_value("two"), Option.from_single_value("three"), ] self.common = OptionGroup(label="an option", options=self.common_options) def test_block_style_json(self): expected = { "label": {"emoji": True, "text": "an option", "type": "plain_text"}, "options": [ { "text": {"emoji": True, "text": "one", "type": "plain_text"}, "value": "one", }, { "text": {"emoji": True, "text": "two", "type": "plain_text"}, "value": "two", }, { "text": {"emoji": True, "text": "three", "type": "plain_text"}, "value": "three", }, ], } self.assertDictEqual(expected, self.common.to_dict("block")) self.assertDictEqual(expected, self.common.to_dict()) def test_dialog_style_json(self): self.assertDictEqual( { "label": "an option", "options": [ {"label": "one", "value": "one"}, {"label": "two", "value": "two"}, {"label": "three", "value": "three"}, ], }, self.common.to_dict("dialog"), ) def test_action_style_json(self): self.assertDictEqual( { "text": "an option", "options": [ {"text": "one", "value": "one"}, {"text": "two", "value": "two"}, {"text": "three", "value": "three"}, ], }, self.common.to_dict("action"), ) def test_label_length(self): with self.assertRaises(SlackObjectFormationError): OptionGroup(label=STRING_301_CHARS, options=self.common_options).to_dict( "text" ) def test_options_length(self): with self.assertRaises(SlackObjectFormationError): OptionGroup(label="option_group", options=self.common_options * 34).to_dict( "text" ) def test_confirm_style(self): obj = ConfirmObject.parse( { "title": {"type": "plain_text", "text": "Are you sure?"}, "text": { "type": "mrkdwn", "text": "Wouldn't you prefer a good game of _chess_?", }, "confirm": {"type": "plain_text", "text": "Do it"}, "deny": {"type": "plain_text", "text": "Stop, I've changed my mind!"}, "style": "primary", } ) obj.validate_json() self.assertEqual("primary", obj.style) def test_confirm_style_validation(self): with self.assertRaises(SlackObjectFormationError): ConfirmObject.parse( { "title": {"type": "plain_text", "text": "Are you sure?"}, "text": { "type": "mrkdwn", "text": "Wouldn't you prefer a good game of _chess_?", }, "confirm": {"type": "plain_text", "text": "Do it"}, "deny": { "type": "plain_text", "text": "Stop, I've changed my mind!", }, "style": "something-wrong", } ).validate_json()
34.419231
88
0.568052
9f51d718e882de96ff7abf8365476dc6cd21d4d5
2,091
py
Python
listings/migrations/0001_initial.py
jubayer-hossain/real-estate-app
b2999f5b0d6311aeaa87275f7c979a9573f2b4ed
[ "MIT" ]
null
null
null
listings/migrations/0001_initial.py
jubayer-hossain/real-estate-app
b2999f5b0d6311aeaa87275f7c979a9573f2b4ed
[ "MIT" ]
null
null
null
listings/migrations/0001_initial.py
jubayer-hossain/real-estate-app
b2999f5b0d6311aeaa87275f7c979a9573f2b4ed
[ "MIT" ]
null
null
null
# Generated by Django 2.1.7 on 2019-02-15 17:43 import datetime from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('realtors', '0001_initial'), ] operations = [ migrations.CreateModel( name='Listing', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200)), ('city', models.CharField(max_length=200)), ('state', models.CharField(max_length=20)), ('zipcode', models.CharField(max_length=200)), ('description', models.TextField(blank=True)), ('price', models.IntegerField()), ('bedrooms', models.IntegerField()), ('bathrooms', models.DecimalField(decimal_places=1, max_digits=2)), ('garage', models.IntegerField(default=0)), ('sqft', models.IntegerField()), ('lot_size', models.DecimalField(decimal_places=1, max_digits=5)), ('photo_main', models.ImageField(upload_to='photos/%Y%m%d/')), ('photo_1', models.ImageField(blank=True, upload_to='photos/%Y%m%d/')), ('photo_2', models.ImageField(blank=True, upload_to='photos/%Y%m%d/')), ('photo_3', models.ImageField(blank=True, upload_to='photos/%Y%m%d/')), ('photo_4', models.ImageField(blank=True, upload_to='photos/%Y%m%d/')), ('photo_5', models.ImageField(blank=True, upload_to='photos/%Y%m%d/')), ('photo_6', models.ImageField(blank=True, upload_to='photos/%Y%m%d/')), ('is_published', models.BooleanField(default=True)), ('list_date', models.DateTimeField(blank=True, default=datetime.datetime.now)), ('realtor', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='realtors.Realtor')), ], ), ]
46.466667
118
0.580583
8687ff23eb5a3eff89e3a7976106be26334cf164
966
py
Python
tests/datasets/loaders/test_criteo_uplift_prediction.py
duketemon/pyuplift
33daa0768ff333387cb8223ebfaedaffa57de335
[ "MIT" ]
26
2019-02-24T07:41:59.000Z
2022-01-03T05:07:26.000Z
tests/datasets/loaders/test_criteo_uplift_prediction.py
duketemon/pyuplift
33daa0768ff333387cb8223ebfaedaffa57de335
[ "MIT" ]
8
2019-03-17T07:57:16.000Z
2019-08-02T19:55:49.000Z
tests/datasets/loaders/test_criteo_uplift_prediction.py
duketemon/pyuplift
33daa0768ff333387cb8223ebfaedaffa57de335
[ "MIT" ]
4
2019-07-17T12:36:37.000Z
2020-07-16T11:36:35.000Z
import os import shutil import pytest from pyuplift.datasets import load_criteo_uplift_prediction from pyuplift.datasets import download_criteo_uplift_prediction data_home = os.path.join(os.sep.join(__file__.split(os.sep)[:-1]), 'data') def test_load_criteo_uplift_prediction__do_not_download_if_missing(): with pytest.raises(FileNotFoundError): load_criteo_uplift_prediction(data_home=data_home, download_if_missing=False) def test_download_criteo_uplift_prediction__wrong_url(): with pytest.raises(Exception): download_criteo_uplift_prediction(url='https://s3.us-east-2.amazonaws.com/criteo-uplift/criteo-uplift.csv.gz') def test_download_criteo_uplift_prediction(): download_criteo_uplift_prediction(data_home=data_home) # shutil.rmtree(data_home) def test_load_criteo_uplift_prediction(): df = load_criteo_uplift_prediction(data_home=data_home) assert len(df['feature_names']) != 11 shutil.rmtree(data_home)
32.2
118
0.807453
fb2d1d12098312c270d4a6e2a50ba1bd24bba051
48,085
py
Python
edb/pgsql/compiler/pathctx.py
sfermigier/edgedb
13aff7004aa682777287157dea52642c374967e8
[ "Apache-2.0" ]
null
null
null
edb/pgsql/compiler/pathctx.py
sfermigier/edgedb
13aff7004aa682777287157dea52642c374967e8
[ "Apache-2.0" ]
null
null
null
edb/pgsql/compiler/pathctx.py
sfermigier/edgedb
13aff7004aa682777287157dea52642c374967e8
[ "Apache-2.0" ]
null
null
null
# # This source file is part of the EdgeDB open source project. # # Copyright 2008-present MagicStack Inc. and the EdgeDB authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Helpers to manage statement path contexts.""" from __future__ import annotations import functools from typing import * from edb.common import enum as s_enum from edb.ir import ast as irast from edb.ir import typeutils as irtyputils from edb.schema import pointers as s_pointers from edb.pgsql import ast as pgast from edb.pgsql import types as pg_types from . import astutils from . import context from . import output class PathAspect(s_enum.StrEnum): IDENTITY = 'identity' VALUE = 'value' SOURCE = 'source' SERIALIZED = 'serialized' # A mapping of more specific aspect -> less specific aspect for objects OBJECT_ASPECT_SPECIFICITY_MAP = { PathAspect.IDENTITY: PathAspect.VALUE, PathAspect.VALUE: PathAspect.SOURCE, PathAspect.SERIALIZED: PathAspect.SOURCE, } # A mapping of more specific aspect -> less specific aspect for primitives PRIMITIVE_ASPECT_SPECIFICITY_MAP = { PathAspect.SERIALIZED: PathAspect.VALUE, } def get_less_specific_aspect( path_id: irast.PathId, aspect: str, ) -> Optional[str]: if path_id.is_objtype_path(): mapping = OBJECT_ASPECT_SPECIFICITY_MAP else: mapping = PRIMITIVE_ASPECT_SPECIFICITY_MAP less_specific_aspect = mapping.get(PathAspect(aspect)) if less_specific_aspect is not None: return str(less_specific_aspect) else: return None def map_path_id( path_id: irast.PathId, path_id_map: Dict[irast.PathId, irast.PathId]) -> irast.PathId: sorted_map = sorted( path_id_map.items(), key=lambda kv: len(kv[0]), reverse=True) for outer_id, inner_id in sorted_map: new_path_id = path_id.replace_prefix(outer_id, inner_id) if new_path_id != path_id: path_id = new_path_id break return path_id def reverse_map_path_id( path_id: irast.PathId, path_id_map: Dict[irast.PathId, irast.PathId]) -> irast.PathId: for outer_id, inner_id in path_id_map.items(): new_path_id = path_id.replace_prefix(inner_id, outer_id) if new_path_id != path_id: path_id = new_path_id break return path_id def put_path_id_map( rel: pgast.Query, outer_path_id: irast.PathId, inner_path_id: irast.PathId, ) -> None: inner_path_id = map_path_id(inner_path_id, rel.view_path_id_map) rel.view_path_id_map[outer_path_id] = inner_path_id def get_path_var( rel: pgast.Query, path_id: irast.PathId, *, aspect: str, env: context.Environment) -> pgast.BaseExpr: """Return a value expression for a given *path_id* in a given *rel*.""" if isinstance(rel, pgast.CommonTableExpr): rel = rel.query # Check if we already have a var, before remapping the path_id. # This is useful for serialized aspect disambiguation in tuples, # since process_set_as_tuple() records serialized vars with # original path_id. if (path_id, aspect) in rel.path_namespace: return rel.path_namespace[path_id, aspect] if rel.view_path_id_map: path_id = map_path_id(path_id, rel.view_path_id_map) if (path_id, aspect) in rel.path_namespace: return rel.path_namespace[path_id, aspect] if astutils.is_set_op_query(rel): return _get_path_var_in_setop(rel, path_id, aspect=aspect, env=env) ptrref = path_id.rptr() ptrref_dir = path_id.rptr_dir() is_type_intersection = path_id.is_type_intersection_path() src_path_id: Optional[irast.PathId] = None if ptrref is not None and not is_type_intersection: ptr_info = pg_types.get_ptrref_storage_info( ptrref, resolve_type=False, link_bias=False, allow_missing=True) ptr_dir = path_id.rptr_dir() is_inbound = ptr_dir == s_pointers.PointerDirection.Inbound if is_inbound: src_path_id = path_id else: src_path_id = path_id.src_path() assert src_path_id is not None src_rptr = src_path_id.rptr() if ( irtyputils.is_id_ptrref(ptrref) and ( src_rptr is None or ptrref_dir is not s_pointers.PointerDirection.Inbound ) ): # When there is a reference to the id property of # an object which is linked to by a link stored # inline, we want to route the reference to the # inline attribute. For example, # Foo.__type__.id gets resolved to the Foo.__type__ # column. This can only be done if Foo is visible # in scope, and Foo.__type__ is not a computable. pid = src_path_id while pid.is_type_intersection_path(): # Skip type intersection step(s). src_pid = pid.src_path() if src_pid is not None: src_rptr = src_pid.rptr() pid = src_pid else: break if (src_rptr is not None and not irtyputils.is_computable_ptrref(src_rptr) and env.ptrref_source_visibility.get(src_rptr)): src_ptr_info = pg_types.get_ptrref_storage_info( src_rptr, resolve_type=False, link_bias=False, allow_missing=True) if (src_ptr_info and src_ptr_info.table_type == 'ObjectType'): src_path_id = src_path_id.src_path() ptr_info = src_ptr_info else: ptr_info = None ptr_dir = None var: Optional[pgast.BaseExpr] if ptrref is None: if len(path_id) == 1: # This is an scalar set derived from an expression. src_path_id = path_id elif ptrref.source_ptr is not None: if ptr_info and ptr_info.table_type != 'link' and not is_inbound: # This is a link prop that is stored in source rel, # step back to link source rvar. _prefix_pid = path_id.src_path() assert _prefix_pid is not None src_path_id = _prefix_pid.src_path() elif is_type_intersection: src_path_id = path_id assert src_path_id is not None # Find which rvar will have path_id as an output src_aspect, rel_rvar, found_path_var = _find_rel_rvar( rel, path_id, src_path_id, aspect=aspect, env=env) if found_path_var: return found_path_var if rel_rvar is None: raise LookupError( f'there is no range var for ' f'{src_path_id} {src_aspect} in {rel}') if isinstance(rel_rvar, pgast.IntersectionRangeVar): if ( (path_id.is_objtype_path() and src_path_id == path_id) or (ptrref is not None and irtyputils.is_id_ptrref(ptrref)) ): rel_rvar = rel_rvar.component_rvars[-1] else: # Intersection rvars are basically JOINs of the relevant # parts of the type intersection, and so we need to make # sure we pick the correct component relation of that JOIN. rel_rvar = _find_rvar_in_intersection_by_typeref( path_id, rel_rvar.component_rvars, ) source_rel = rel_rvar.query if isinstance(ptrref, irast.PointerRef) and rel_rvar.typeref is not None: assert ptrref_dir actual_ptrref = irtyputils.maybe_find_actual_ptrref( rel_rvar.typeref, ptrref, dir=ptrref_dir) if actual_ptrref is not None: ptr_info = pg_types.get_ptrref_storage_info( actual_ptrref, resolve_type=False, link_bias=False) outvar = get_path_output( source_rel, path_id, ptr_info=ptr_info, aspect=aspect, env=env) var = astutils.get_rvar_var(rel_rvar, outvar) put_path_var(rel, path_id, var, aspect=aspect, env=env) if isinstance(var, pgast.TupleVar): for element in var.elements: put_path_var_if_not_exists(rel, element.path_id, element.val, aspect=aspect, env=env) return var def _find_rel_rvar( rel: pgast.Query, path_id: irast.PathId, src_path_id: irast.PathId, *, aspect: str, env: context.Environment, ) -> Tuple[str, Optional[pgast.PathRangeVar], Optional[pgast.BaseExpr]]: """Rummage around rel looking for an appropriate rvar for path_id. Somewhat unfortunately, some checks to find the actual path var (in a particular tuple case) need to occur in the middle of the rvar rel search, so we can also find the actual path var in passing. """ src_aspect = aspect rel_rvar = maybe_get_path_rvar(rel, path_id, aspect=aspect, env=env) if rel_rvar is None: alt_aspect = get_less_specific_aspect(path_id, aspect) if alt_aspect is not None: rel_rvar = maybe_get_path_rvar( rel, path_id, aspect=alt_aspect, env=env) else: alt_aspect = None if rel_rvar is None: if src_path_id.is_objtype_path(): src_aspect = 'source' else: src_aspect = aspect if src_path_id.is_tuple_path(): if src_aspect == 'identity': src_aspect = 'value' if (var := _find_in_output_tuple( rel, path_id, src_aspect, env=env)): return src_aspect, None, var rel_rvar = maybe_get_path_rvar( rel, src_path_id, aspect=src_aspect, env=env) if rel_rvar is None: _src_path_id_prefix = src_path_id.src_path() if _src_path_id_prefix is not None: rel_rvar = maybe_get_path_rvar( rel, _src_path_id_prefix, aspect=src_aspect, env=env) else: rel_rvar = maybe_get_path_rvar( rel, src_path_id, aspect=src_aspect, env=env) if (rel_rvar is None and src_aspect != 'source' and path_id != src_path_id): rel_rvar = maybe_get_path_rvar( rel, src_path_id, aspect='source', env=env) if rel_rvar is None and alt_aspect is not None: # There is no source range var for the requested aspect, # check if there is a cached var with less specificity. var = rel.path_namespace.get((path_id, alt_aspect)) if var is not None: put_path_var(rel, path_id, var, aspect=aspect, env=env) return src_aspect, None, var return src_aspect, rel_rvar, None def _get_path_var_in_setop( rel: pgast.Query, path_id: irast.PathId, *, aspect: str, env: context.Environment, ) -> pgast.BaseExpr: test_vals = [] if aspect in ('value', 'serialized'): test_cb = functools.partial( maybe_get_path_var, env=env, path_id=path_id, aspect=aspect) test_vals = astutils.for_each_query_in_set(rel, test_cb) # In order to ensure output balance, we only want to output # a TupleVar if *every* subquery outputs a TupleVar. # If some but not all output TupleVars, we need to fix up # the output TupleVars by outputting them as a real tuple. # This is needed for cases like `(Foo.bar UNION (1,2))`. if ( any(isinstance(x, pgast.TupleVarBase) for x in test_vals) and not all(isinstance(x, pgast.TupleVarBase) for x in test_vals) ): def fixup(subrel: pgast.Query) -> None: cur = get_path_var_and_fix_tuple( subrel, env=env, path_id=path_id, aspect=aspect) if isinstance(cur, pgast.TupleVarBase): new = output.output_as_value(cur, env=env) new_path_id = map_path_id(path_id, subrel.view_path_id_map) put_path_var( subrel, new_path_id, new, force=True, env=env, aspect=aspect) astutils.for_each_query_in_set(rel, fixup) # We disable the find_path_output optimization when doing # UNIONs to avoid situations where they have different numbers # of columns. cb = functools.partial( get_path_output_or_null, env=env, disable_output_fusion=True, path_id=path_id, aspect=aspect) outputs = astutils.for_each_query_in_set(rel, cb) counts = astutils.for_each_query_in_set( rel, lambda x: len(x.target_list)) assert counts == [counts[0]] * len(counts) first: Optional[pgast.OutputVar] = None optional = False all_null = True nullable = False for colref, is_null in outputs: if colref.nullable: nullable = True if first is None: first = colref if is_null: optional = True else: all_null = False if all_null: raise LookupError( f'cannot find refs for ' f'path {path_id} {aspect} in {rel}') if first is None: raise AssertionError( f'union did not produce any outputs') # Path vars produced by UNION expressions can be "optional", # i.e the record is accepted as-is when such var is NULL. # This is necessary to correctly join heterogeneous UNIONs. var = astutils.strip_output_var( first, optional=optional, nullable=optional or nullable) put_path_var(rel, path_id, var, aspect=aspect, env=env) return var def _find_rvar_in_intersection_by_typeref( path_id: irast.PathId, component_rvars: Sequence[pgast.PathRangeVar], ) -> pgast.PathRangeVar: assert component_rvars pid_rptr = path_id.rptr() if pid_rptr is not None: if pid_rptr.material_ptr is not None: pid_rptr = pid_rptr.material_ptr tref = pid_rptr.out_source else: tref = path_id.target for component_rvar in component_rvars: if ( component_rvar.typeref is not None and irtyputils.type_contains(tref, component_rvar.typeref) ): rel_rvar = component_rvar break else: raise AssertionError( f'no rvar in intersection matches path id {path_id}' ) return rel_rvar def _find_in_output_tuple( rel: pgast.Query, path_id: irast.PathId, aspect: str, env: context.Environment) -> Optional[pgast.BaseExpr]: """Try indirecting a source tuple already present as an output. Normally tuple indirections are handled by process_set_as_tuple_indirection, but UNIONing an explicit tuple with a tuple coming from a base relation (like `(Foo.bar UNION (1,2)).0`) can lead to us looking for a tuple path in relations that only have the actual full tuple. (See test_edgeql_coalesce_tuple_{08,09}). We handle this by checking whether some prefix of the tuple path is present in the path_outputs. This is sufficient because the relevant cases are all caused by set ops, and the "fixup" done in set op cases ensures that the tuple will be already present. """ steps = [] src_path_id = path_id.src_path() ptrref = path_id.rptr() while ( src_path_id and src_path_id.is_tuple_path() and isinstance(ptrref, irast.TupleIndirectionPointerRef) ): steps.append((ptrref.shortname.name, src_path_id)) if ( (var := rel.path_namespace.get((src_path_id, aspect))) and not isinstance(var, pgast.TupleVarBase) ): for name, src in reversed(steps): var = astutils.tuple_getattr(var, src.target, name) put_path_var(rel, path_id, var, aspect=aspect, env=env) return var ptrref = src_path_id.rptr() src_path_id = src_path_id.src_path() return None def get_path_identity_var( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.BaseExpr: return get_path_var(rel, path_id, aspect='identity', env=env) def get_path_value_var( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.BaseExpr: return get_path_var(rel, path_id, aspect='value', env=env) def is_relation_rvar( rvar: pgast.BaseRangeVar) -> bool: return ( isinstance(rvar, pgast.RelRangeVar) and is_terminal_relation(rvar.query) ) def is_terminal_relation( rel: pgast.BaseRelation) -> bool: return isinstance(rel, (pgast.Relation, pgast.NullRelation)) def is_values_relation( rel: pgast.BaseRelation) -> bool: return bool(getattr(rel, 'values', None)) def maybe_get_path_var( rel: pgast.Query, path_id: irast.PathId, *, aspect: str, env: context.Environment) -> Optional[pgast.BaseExpr]: try: return get_path_var(rel, path_id, aspect=aspect, env=env) except LookupError: return None def maybe_get_path_identity_var( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> Optional[pgast.BaseExpr]: try: return get_path_var(rel, path_id, aspect='identity', env=env) except LookupError: return None def maybe_get_path_value_var( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> Optional[pgast.BaseExpr]: try: return get_path_var(rel, path_id, aspect='value', env=env) except LookupError: return None def maybe_get_path_serialized_var( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> Optional[pgast.BaseExpr]: try: return get_path_var(rel, path_id, aspect='serialized', env=env) except LookupError: return None def put_path_var( rel: pgast.BaseRelation, path_id: irast.PathId, var: pgast.BaseExpr, *, aspect: str, force: bool=False, env: context.Environment) -> None: if (path_id, aspect) in rel.path_namespace and not force: raise KeyError( f'{aspect} of {path_id} is already present in {rel}') rel.path_namespace[path_id, aspect] = var def put_path_var_if_not_exists( rel: pgast.Query, path_id: irast.PathId, var: pgast.BaseExpr, *, aspect: str, env: context.Environment) -> None: try: put_path_var(rel, path_id, var, aspect=aspect, env=env) except KeyError: pass def put_path_identity_var( rel: pgast.BaseRelation, path_id: irast.PathId, var: pgast.BaseExpr, *, force: bool=False, env: context.Environment) -> None: put_path_var(rel, path_id, var, aspect='identity', force=force, env=env) def put_path_value_var( rel: pgast.BaseRelation, path_id: irast.PathId, var: pgast.BaseExpr, *, force: bool = False, env: context.Environment) -> None: put_path_var(rel, path_id, var, aspect='value', force=force, env=env) def put_path_serialized_var( rel: pgast.BaseRelation, path_id: irast.PathId, var: pgast.BaseExpr, *, force: bool = False, env: context.Environment) -> None: put_path_var(rel, path_id, var, aspect='serialized', force=force, env=env) def put_path_value_var_if_not_exists( rel: pgast.BaseRelation, path_id: irast.PathId, var: pgast.BaseExpr, *, force: bool = False, env: context.Environment) -> None: try: put_path_var(rel, path_id, var, aspect='value', force=force, env=env) except KeyError: pass def put_path_serialized_var_if_not_exists( rel: pgast.BaseRelation, path_id: irast.PathId, var: pgast.BaseExpr, *, force: bool = False, env: context.Environment) -> None: try: put_path_var(rel, path_id, var, aspect='serialized', force=force, env=env) except KeyError: pass def put_path_bond( stmt: pgast.BaseRelation, path_id: irast.PathId) -> None: stmt.path_scope.add(path_id) def put_rvar_path_bond( rvar: pgast.PathRangeVar, path_id: irast.PathId) -> None: put_path_bond(rvar.query, path_id) def get_path_output_alias( path_id: irast.PathId, aspect: str, *, env: context.Environment) -> str: rptr = path_id.rptr() if rptr is not None: alias_base = rptr.shortname.name elif path_id.is_collection_path(): assert path_id.target.collection is not None alias_base = path_id.target.collection else: alias_base = path_id.target_name_hint.name return env.aliases.get(f'{alias_base}_{aspect}') def get_rvar_path_var( rvar: pgast.PathRangeVar, path_id: irast.PathId, aspect: str, *, env: context.Environment) -> pgast.OutputVar: """Return ColumnRef for a given *path_id* in a given *range var*.""" if (path_id, aspect) in rvar.query.path_outputs: outvar = rvar.query.path_outputs[path_id, aspect] elif is_relation_rvar(rvar): ptr_si: Optional[pg_types.PointerStorageInfo] if ( (rptr := path_id.rptr()) is not None and rvar.typeref is not None and rvar.query.path_id and rvar.query.path_id != path_id and (not rvar.query.path_id.is_type_intersection_path() or rvar.query.path_id.src_path() != path_id) ): ptrref_dir = path_id.rptr_dir() assert ptrref_dir actual_rptr = irtyputils.maybe_find_actual_ptrref( rvar.typeref, rptr, dir=ptrref_dir ) if actual_rptr is not None: ptr_si = pg_types.get_ptrref_storage_info(actual_rptr) else: ptr_si = None else: ptr_si = None outvar = _get_rel_path_output( rvar.query, path_id, ptr_info=ptr_si, aspect=aspect, env=env) else: # Range is another query. outvar = get_path_output(rvar.query, path_id, aspect=aspect, env=env) return astutils.get_rvar_var(rvar, outvar) def put_rvar_path_output( rvar: pgast.PathRangeVar, path_id: irast.PathId, aspect: str, var: pgast.OutputVar, *, env: context.Environment) -> None: _put_path_output_var(rvar.query, path_id, aspect, var, env=env) def get_rvar_path_identity_var( rvar: pgast.PathRangeVar, path_id: irast.PathId, *, env: context.Environment) -> pgast.OutputVar: return get_rvar_path_var(rvar, path_id, aspect='identity', env=env) def maybe_get_rvar_path_identity_var( rvar: pgast.PathRangeVar, path_id: irast.PathId, *, env: context.Environment) -> Optional[pgast.OutputVar]: try: return get_rvar_path_var(rvar, path_id, aspect='identity', env=env) except LookupError: return None def get_rvar_path_value_var( rvar: pgast.PathRangeVar, path_id: irast.PathId, *, env: context.Environment) -> pgast.OutputVar: return get_rvar_path_var(rvar, path_id, aspect='value', env=env) def maybe_get_rvar_path_value_var( rvar: pgast.PathRangeVar, path_id: irast.PathId, *, env: context.Environment) -> Optional[pgast.OutputVar]: try: return get_rvar_path_var(rvar, path_id, aspect='value', env=env) except LookupError: return None def get_rvar_output_var_as_col_list( rvar: pgast.PathRangeVar, outvar: pgast.OutputVar, aspect: str, *, env: context.Environment) -> List[pgast.OutputVar]: cols: List[pgast.OutputVar] if isinstance(outvar, pgast.ColumnRef): cols = [outvar] elif isinstance(outvar, pgast.TupleVarBase): cols = [] for el in outvar.elements: col = get_rvar_path_var(rvar, el.path_id, aspect=aspect, env=env) cols.extend(get_rvar_output_var_as_col_list( rvar, col, aspect=aspect, env=env)) else: raise RuntimeError(f'unexpected OutputVar: {outvar!r}') return cols def put_path_packed_output( rel: pgast.EdgeQLPathInfo, path_id: irast.PathId, val: pgast.OutputVar, *, multi: bool) -> None: if rel.packed_path_outputs is None: rel.packed_path_outputs = {} rel.packed_path_outputs[path_id, 'value'] = (val, multi) def get_rvar_path_packed_output( rvar: pgast.PathRangeVar, path_id: irast.PathId, aspect: str, *, env: context.Environment) -> Tuple[pgast.OutputVar, bool]: """Return ColumnRef for a given *path_id* in a given *range var*.""" outvar = ( rvar.query.packed_path_outputs and rvar.query.packed_path_outputs.get((path_id, aspect)) ) if not outvar: raise LookupError( f'there is no packed var for {path_id} {aspect} in {rvar.query}') return astutils.get_rvar_var(rvar, outvar[0]), outvar[1] def maybe_get_rvar_path_packed_output( rvar: pgast.PathRangeVar, path_id: irast.PathId, aspect: str, *, env: context.Environment) -> Optional[Tuple[pgast.OutputVar, bool]]: try: return get_rvar_path_packed_output(rvar, path_id, aspect, env=env) except LookupError: return None def get_packed_path_var( rvar: pgast.PathRangeVar, path_id: irast.PathId, aspect: str, *, env: context.Environment) -> Tuple[pgast.OutputVar, bool]: res = maybe_get_rvar_path_packed_output( rvar, path_id, aspect, env=env) if res: return res query = rvar.query assert isinstance(query, pgast.Query) top_query = query # Handle the optional case in a hacky way, since it is the only # one that should come up currently. Eventually this should be # handled properly. if astutils.is_set_op_query(query): alias = env.aliases.get('null') restarget = pgast.ResTarget( name=alias, val=pgast.NullConstant()) assert query.larg and query.rarg assert ( isinstance(query.rarg, pgast.SelectStmt) and isinstance(query.rarg.from_clause[0], pgast.RelRangeVar) and isinstance( query.rarg.from_clause[0].relation, pgast.NullRelation) ) query.rarg.target_list.append(restarget) nullref = pgast.ColumnRef(name=[alias], nullable=True) _put_path_output_var(query.rarg, path_id, aspect, nullref, env=env) query = query.larg rel_rvar = get_path_rvar( query, path_id, flavor='packed', aspect=aspect, env=env) # XXX: some duplication of path_output ref, multi = get_packed_path_var(rel_rvar, path_id, aspect, env=env) alias = get_path_output_alias(path_id, aspect, env=env) restarget = pgast.ResTarget( name=alias, val=ref, ser_safe=getattr(ref, 'ser_safe', False)) query.target_list.append(restarget) nullable = is_nullable(ref, env=env) optional = None if isinstance(ref, pgast.ColumnRef): optional = ref.optional result = pgast.ColumnRef( name=[alias], nullable=nullable, optional=optional) _put_path_output_var(query, path_id, aspect, result, env=env) if query is not top_query: _put_path_output_var(top_query, path_id, aspect, result, env=env) return result, multi def put_path_rvar( stmt: pgast.Query, path_id: irast.PathId, rvar: pgast.PathRangeVar, *, flavor: str='normal', aspect: str, env: context.Environment) -> None: assert isinstance(path_id, irast.PathId) stmt.get_rvar_map(flavor)[path_id, aspect] = rvar # Normally, masked paths (i.e paths that are only behind a fence below), # will not be exposed in a query namespace. However, when the masked # path in the *main* path of a set, it must still be exposed, but no # further than the immediate parent query. try: query = rvar.query except NotImplementedError: pass else: if path_id in query.path_id_mask: stmt.path_id_mask.add(path_id) def put_path_value_rvar( stmt: pgast.Query, path_id: irast.PathId, rvar: pgast.PathRangeVar, *, flavor: str='normal', env: context.Environment) -> None: put_path_rvar(stmt, path_id, rvar, aspect='value', flavor=flavor, env=env) def put_path_source_rvar( stmt: pgast.Query, path_id: irast.PathId, rvar: pgast.PathRangeVar, *, flavor: str='normal', env: context.Environment) -> None: put_path_rvar(stmt, path_id, rvar, aspect='source', flavor=flavor, env=env) def has_rvar( stmt: pgast.Query, rvar: pgast.PathRangeVar, *, env: context.Environment) -> bool: return any( rvar in set(stmt.get_rvar_map(flavor).values()) for flavor in ('normal', 'packed') ) def put_path_rvar_if_not_exists( stmt: pgast.Query, path_id: irast.PathId, rvar: pgast.PathRangeVar, *, flavor: str='normal', aspect: str, env: context.Environment) -> None: if (path_id, aspect) not in stmt.get_rvar_map(flavor): put_path_rvar( stmt, path_id, rvar, aspect=aspect, flavor=flavor, env=env) def get_path_rvar( stmt: pgast.Query, path_id: irast.PathId, *, flavor: str='normal', aspect: str, env: context.Environment) -> pgast.PathRangeVar: rvar = maybe_get_path_rvar( stmt, path_id, aspect=aspect, flavor=flavor, env=env) if rvar is None: raise LookupError( f'there is no range var for {path_id} {aspect} in {stmt}') return rvar def maybe_get_path_rvar( stmt: pgast.Query, path_id: irast.PathId, *, aspect: str, flavor: str='normal', env: context.Environment) -> Optional[pgast.PathRangeVar]: rvar = env.external_rvars.get((path_id, aspect)) path_rvar_map = stmt.maybe_get_rvar_map(flavor) if path_rvar_map is not None: if rvar is None and path_rvar_map: rvar = path_rvar_map.get((path_id, aspect)) if rvar is None and aspect == 'identity': rvar = path_rvar_map.get((path_id, 'value')) return rvar def list_path_aspects( stmt: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> Set[str]: path_id = map_path_id(path_id, stmt.view_path_id_map) aspects = set() for rvar_path_id, aspect in stmt.path_rvar_map: if path_id == rvar_path_id: aspects.add(aspect) for ns_path_id, aspect in stmt.path_namespace: if path_id == ns_path_id: aspects.add(aspect) for ns_path_id, aspect in stmt.path_outputs: if path_id == ns_path_id: aspects.add(aspect) return aspects def maybe_get_path_value_rvar( stmt: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> Optional[pgast.BaseRangeVar]: return maybe_get_path_rvar(stmt, path_id, aspect='value', env=env) def _same_expr(expr1: pgast.BaseExpr, expr2: pgast.BaseExpr) -> bool: if (isinstance(expr1, pgast.ColumnRef) and isinstance(expr2, pgast.ColumnRef)): return expr1.name == expr2.name else: return expr1 == expr2 def _put_path_output_var( rel: pgast.BaseRelation, path_id: irast.PathId, aspect: str, var: pgast.OutputVar, *, env: context.Environment) -> None: rel.path_outputs[path_id, aspect] = var def _get_rel_object_id_output( rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, ptr_info: Optional[pg_types.PointerStorageInfo]=None, env: context.Environment) -> pgast.OutputVar: var = rel.path_outputs.get((path_id, aspect)) if var is not None: return var if isinstance(rel, pgast.NullRelation): name = env.aliases.get('id') val = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=('uuid',), ) ) rel.target_list.append(pgast.ResTarget(name=name, val=val)) result = pgast.ColumnRef(name=[name], nullable=True) else: result = pgast.ColumnRef(name=['id'], nullable=False) _put_path_output_var(rel, path_id, aspect, result, env=env) return result def _get_rel_path_output( rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, ptr_info: Optional[pg_types.PointerStorageInfo]=None, env: context.Environment) -> pgast.OutputVar: if path_id.is_objtype_path(): if aspect == 'identity': aspect = 'value' if aspect != 'value': raise LookupError( f'invalid request for non-scalar path {path_id} {aspect}') if (path_id == rel.path_id or (rel.path_id and rel.path_id.is_type_intersection_path() and path_id == rel.path_id.src_path())): return _get_rel_object_id_output( rel, path_id, aspect=aspect, env=env) else: if aspect == 'identity': raise LookupError( f'invalid request for scalar path {path_id} {aspect}') elif aspect == 'serialized': aspect = 'value' var = rel.path_outputs.get((path_id, aspect)) if var is not None: return var ptrref = path_id.rptr() rptr_dir = path_id.rptr_dir() if (rptr_dir is not None and rptr_dir != s_pointers.PointerDirection.Outbound): raise LookupError( f'{path_id} is an inbound pointer and cannot be resolved ' f'on a base relation') if isinstance(rel, pgast.NullRelation): if ptrref is not None: target = ptrref.out_target else: target = path_id.target pg_type = pg_types.pg_type_from_ir_typeref(target) if ptr_info is not None: name = env.aliases.get(ptr_info.column_name) else: name = env.aliases.get('v') val = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pg_type, ) ) rel.target_list.append(pgast.ResTarget(name=name, val=val)) result = pgast.ColumnRef(name=[name], nullable=True) else: if ptrref is None: raise ValueError( f'could not resolve trailing pointer class for {path_id}') assert not ptrref.is_computable if ptr_info is None: ptr_info = pg_types.get_ptrref_storage_info( ptrref, resolve_type=False, link_bias=False) result = pgast.ColumnRef( name=[ptr_info.column_name], nullable=not ptrref.required) _put_path_output_var(rel, path_id, aspect, result, env=env) return result def find_path_output( rel: pgast.BaseRelation, path_id: irast.PathId, ref: pgast.BaseExpr, *, env: context.Environment) -> Optional[pgast.OutputVar]: if isinstance(ref, pgast.TupleVarBase): return None for key, other_ref in rel.path_namespace.items(): if _same_expr(other_ref, ref) and key in rel.path_outputs: return rel.path_outputs.get(key) else: return None def get_path_output( rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, allow_nullable: bool=True, disable_output_fusion: bool=False, ptr_info: Optional[pg_types.PointerStorageInfo]=None, env: context.Environment) -> pgast.OutputVar: if isinstance(rel, pgast.Query): path_id = map_path_id(path_id, rel.view_path_id_map) return _get_path_output(rel, path_id=path_id, aspect=aspect, disable_output_fusion=disable_output_fusion, ptr_info=ptr_info, allow_nullable=allow_nullable, env=env) def _get_path_output( rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, allow_nullable: bool=True, disable_output_fusion: bool=False, ptr_info: Optional[pg_types.PointerStorageInfo]=None, env: context.Environment) -> pgast.OutputVar: result = rel.path_outputs.get((path_id, aspect)) if result is not None: return result ref: pgast.BaseExpr alias = None rptr = path_id.rptr() if rptr is not None and irtyputils.is_id_ptrref(rptr) and not ( (src_path_id := path_id.src_path()) and (src_rptr := src_path_id.rptr()) and ( src_rptr.is_computable or src_rptr.out_cardinality.is_multi() ) ): # A value reference to Object.id is the same as a value # reference to the Object itself. (Though we want to only # apply this in the cases that process_set_as_path does this # optimization, which means not for multi props.) src_path_id = path_id.src_path() assert src_path_id is not None id_output = maybe_get_path_output(rel, src_path_id, aspect='value', allow_nullable=allow_nullable, ptr_info=ptr_info, env=env) if id_output is not None: _put_path_output_var(rel, path_id, aspect, id_output, env=env) return id_output if is_terminal_relation(rel): return _get_rel_path_output(rel, path_id, aspect=aspect, ptr_info=ptr_info, env=env) assert isinstance(rel, pgast.Query) if is_values_relation(rel): # The VALUES() construct seems to always expose its # value as "column1". alias = 'column1' ref = pgast.ColumnRef(name=[alias]) else: ref = get_path_var(rel, path_id, aspect=aspect, env=env) # As an optimization, look to see if the same expression is being # output on a different aspect. This can save us needing to do the # work twice in the query. other_output = find_path_output(rel, path_id, ref, env=env) if other_output is not None and not disable_output_fusion: _put_path_output_var(rel, path_id, aspect, other_output, env=env) return other_output if isinstance(ref, pgast.TupleVarBase): elements = [] for el in ref.elements: el_path_id = reverse_map_path_id( el.path_id, rel.view_path_id_map) try: # Similarly to get_path_var(), check for outer path_id # first for tuple serialized var disambiguation. element = _get_path_output( rel, el_path_id, aspect=aspect, disable_output_fusion=disable_output_fusion, allow_nullable=allow_nullable, env=env) except LookupError: element = get_path_output( rel, el_path_id, aspect=aspect, disable_output_fusion=disable_output_fusion, allow_nullable=allow_nullable, env=env) elements.append(pgast.TupleElementBase( path_id=el_path_id, name=element)) result = pgast.TupleVarBase( elements=elements, named=ref.named, typeref=ref.typeref, ) else: if astutils.is_set_op_query(rel): assert isinstance(ref, pgast.OutputVar) result = astutils.strip_output_var(ref) else: assert isinstance(rel, pgast.ReturningQuery), \ "expected ReturningQuery" if alias is None: alias = get_path_output_alias(path_id, aspect, env=env) restarget = pgast.ResTarget( name=alias, val=ref, ser_safe=getattr(ref, 'ser_safe', False)) rel.target_list.append(restarget) nullable = is_nullable(ref, env=env) optional = None if isinstance(ref, pgast.ColumnRef): optional = ref.optional if nullable and not allow_nullable: assert isinstance(rel, pgast.SelectStmt), \ "expected SelectStmt" var = get_path_var(rel, path_id, aspect=aspect, env=env) rel.where_clause = astutils.extend_binop( rel.where_clause, pgast.NullTest(arg=var, negated=True) ) nullable = False result = pgast.ColumnRef( name=[alias], nullable=nullable, optional=optional) _put_path_output_var(rel, path_id, aspect, result, env=env) if (path_id.is_objtype_path() and not isinstance(result, pgast.TupleVarBase)): equiv_aspect = None if aspect == 'identity': equiv_aspect = 'value' elif aspect == 'value': equiv_aspect = 'identity' if (equiv_aspect is not None and (path_id, equiv_aspect) not in rel.path_outputs): _put_path_output_var(rel, path_id, equiv_aspect, result, env=env) return result def maybe_get_path_output( rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, allow_nullable: bool=True, disable_output_fusion: bool=False, ptr_info: Optional[pg_types.PointerStorageInfo]=None, env: context.Environment) -> Optional[pgast.OutputVar]: try: return get_path_output(rel, path_id=path_id, aspect=aspect, allow_nullable=allow_nullable, disable_output_fusion=disable_output_fusion, ptr_info=ptr_info, env=env) except LookupError: return None def get_path_identity_output( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.OutputVar: return get_path_output(rel, path_id, aspect='identity', env=env) def get_path_value_output( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.OutputVar: return get_path_output(rel, path_id, aspect='value', env=env) def get_path_serialized_or_value_var( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.BaseExpr: ref = maybe_get_path_serialized_var(rel, path_id, env=env) if ref is None: ref = get_path_value_var(rel, path_id, env=env) return ref def fix_tuple( rel: pgast.Query, ref: pgast.BaseExpr, *, aspect: str, output: bool=False, env: context.Environment) -> pgast.BaseExpr: if ( isinstance(ref, pgast.TupleVarBase) and not isinstance(ref, pgast.TupleVar) ): elements = [] for el in ref.elements: assert el.path_id is not None val = _get_and_fix_tuple( rel, el.path_id, aspect=aspect, output=output, env=env) elements.append( pgast.TupleElement( path_id=el.path_id, name=el.name, val=val)) ref = pgast.TupleVar( elements, named=ref.named, typeref=ref.typeref, ) return ref def _get_and_fix_tuple( rel: pgast.Query, path_id: irast.PathId, *, output: bool=False, aspect: str, env: context.Environment) -> pgast.BaseExpr: ref = ( get_path_output(rel, path_id, aspect=aspect, env=env) if output else get_path_var(rel, path_id, aspect=aspect, env=env) ) return fix_tuple(rel, ref, aspect=aspect, output=output, env=env) def get_path_var_and_fix_tuple( rel: pgast.Query, path_id: irast.PathId, *, aspect: str, env: context.Environment) -> pgast.BaseExpr: return _get_and_fix_tuple( rel, path_id, output=False, aspect=aspect, env=env) def get_path_output_and_fix_tuple( rel: pgast.Query, path_id: irast.PathId, *, aspect: str, env: context.Environment) -> pgast.BaseExpr: return _get_and_fix_tuple( rel, path_id, output=True, aspect=aspect, env=env) def get_path_serialized_output( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.OutputVar: # Serialized output is a special case, we don't # want this behaviour to be recursive, so it # must be kept outside of get_path_output() generic. aspect = 'serialized' path_id = map_path_id(path_id, rel.view_path_id_map) result = rel.path_outputs.get((path_id, aspect)) if result is not None: return result ref = get_path_serialized_or_value_var(rel, path_id, env=env) if ( isinstance(ref, pgast.TupleVarBase) and not isinstance(ref, pgast.TupleVar) ): elements = [] for el in ref.elements: assert el.path_id is not None val = get_path_serialized_or_value_var(rel, el.path_id, env=env) elements.append( pgast.TupleElement( path_id=el.path_id, name=el.name, val=val)) ref = pgast.TupleVar( elements, named=ref.named, typeref=ref.typeref, ) refexpr = output.serialize_expr(ref, path_id=path_id, env=env) alias = get_path_output_alias(path_id, aspect, env=env) restarget = pgast.ResTarget(name=alias, val=refexpr, ser_safe=True) rel.target_list.append(restarget) result = pgast.ColumnRef( name=[alias], nullable=refexpr.nullable, ser_safe=True) _put_path_output_var(rel, path_id, aspect, result, env=env) return result def get_path_output_or_null( rel: pgast.Query, path_id: irast.PathId, *, disable_output_fusion: bool=False, aspect: str, env: context.Environment) -> \ Tuple[pgast.OutputVar, bool]: path_id = map_path_id(path_id, rel.view_path_id_map) ref = maybe_get_path_output( rel, path_id, disable_output_fusion=disable_output_fusion, aspect=aspect, env=env) if ref is not None: return ref, False alt_aspect = get_less_specific_aspect(path_id, aspect) if alt_aspect is not None: # If disable_output_fusion is true, we need to be careful # to not reuse an existing column if disable_output_fusion: preexisting = rel.path_outputs.pop((path_id, alt_aspect), None) ref = maybe_get_path_output( rel, path_id, disable_output_fusion=disable_output_fusion, aspect=alt_aspect, env=env) if disable_output_fusion: # Put back the path_output to whatever it was before if not preexisting: rel.path_outputs.pop((path_id, alt_aspect), None) else: rel.path_outputs[(path_id, alt_aspect)] = preexisting if ref is not None: _put_path_output_var(rel, path_id, aspect, ref, env=env) return ref, False alias = env.aliases.get('null') restarget = pgast.ResTarget( name=alias, val=pgast.NullConstant()) rel.target_list.append(restarget) ref = pgast.ColumnRef(name=[alias], nullable=True) _put_path_output_var(rel, path_id, aspect, ref, env=env) return ref, True def is_nullable( expr: pgast.BaseExpr, *, env: context.Environment) -> Optional[bool]: try: return expr.nullable except AttributeError: if isinstance(expr, pgast.ReturningQuery): tl_len = len(expr.target_list) if tl_len != 1: raise RuntimeError( f'subquery used as a value returns {tl_len} columns') return is_nullable(expr.target_list[0].val, env=env) else: raise
33.767556
79
0.634938
e685f866c61ff91e0452538442db45529d01cf6b
856
py
Python
sa/profiles/Eltex/MES5448/get_config.py
xUndero/noc
9fb34627721149fcf7064860bd63887e38849131
[ "BSD-3-Clause" ]
1
2019-09-20T09:36:48.000Z
2019-09-20T09:36:48.000Z
sa/profiles/Eltex/MES5448/get_config.py
ewwwcha/noc
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
[ "BSD-3-Clause" ]
null
null
null
sa/profiles/Eltex/MES5448/get_config.py
ewwwcha/noc
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # --------------------------------------------------------------------- # Eltex.MES5448.get_config # --------------------------------------------------------------------- # Copyright (C) 2007-2019 The NOC Project # See LICENSE for details # --------------------------------------------------------------------- # NOC modules from noc.core.script.base import BaseScript from noc.sa.interfaces.igetconfig import IGetConfig class Script(BaseScript): name = "Eltex.MES5448.get_config" interface = IGetConfig def execute_cli(self, policy="r"): assert policy in ("r", "s") if policy == "s": config = self.cli("show startup-config") else: config = self.cli("show running-config") config = self.strip_first_lines(config, 10) return self.cleaned_config(config)
32.923077
71
0.490654
773e4908e88299d8b803998ce0a395959e683cea
2,068
py
Python
scripts/generate_buffet.py
evolutics/code-cleaner-buffet
73ba3f8e322b24b019d9e85e7e6704338109da67
[ "MIT" ]
null
null
null
scripts/generate_buffet.py
evolutics/code-cleaner-buffet
73ba3f8e322b24b019d9e85e7e6704338109da67
[ "MIT" ]
null
null
null
scripts/generate_buffet.py
evolutics/code-cleaner-buffet
73ba3f8e322b24b019d9e85e7e6704338109da67
[ "MIT" ]
1
2020-07-17T15:45:19.000Z
2020-07-17T15:45:19.000Z
#!/usr/bin/env python3 import argparse import json import os import pathlib import subprocess def main(): os.chdir(pathlib.Path(os.path.realpath(__file__)).parent.parent) parser = argparse.ArgumentParser() parser.add_argument("--tag", default=_get_latest_tag()) arguments = parser.parse_args() _generate_dockerfile() _generate_readme(arguments.tag) _commit() def _get_latest_tag(): return subprocess.run( ["git", "describe", "--abbrev=0"], capture_output=True, check=True, text=True, ).stdout.rstrip() def _generate_dockerfile(): with pathlib.Path("Dockerfile").open("w") as dockerfile: subprocess.run(["buffet", "assemble", "dishes"], check=True, stdout=dockerfile) def _generate_readme(tag): _generate_template_partials(tag) with pathlib.Path("README.md").open("w") as readme: subprocess.run( [ "buffet", "document", "--template", "docs/readme/README.md.mustache", "dishes", ], check=True, stdout=readme, ) def _generate_template_partials(tag): intermediate = json.loads( subprocess.run( ["buffet", "parse", "dishes"], check=True, stdout=subprocess.PIPE ).stdout ) dish_example_versions = { option: dish["metadata"]["tags"]["info.evolutics.buffet.version-example"][-1] for option, dish in intermediate["option_to_dish"].items() } generated_partials = { "black.md.mustache": dish_example_versions["black"], "prettier.md.mustache": dish_example_versions["prettier"], "tag.md.mustache": tag, } for filename, content in generated_partials.items(): path = pathlib.Path("docs") / "readme" / filename path.write_text(content) def _commit(): subprocess.run( ["git", "commit", "--all", "--message", "Generate buffet with documentation"], check=True, ) if __name__ == "__main__": main()
25.530864
87
0.605899
1aa0c8474d06397e109a0d0bac4ad9e40b2256e8
8,257
py
Python
model/norm_module.py
ZejianLi/LAMA
f8737926e47b197c94c1254eb900fb6d56fb7ddd
[ "MIT" ]
2
2021-08-23T13:01:39.000Z
2021-12-14T07:22:19.000Z
model/norm_module.py
ZejianLi/LAMA
f8737926e47b197c94c1254eb900fb6d56fb7ddd
[ "MIT" ]
null
null
null
model/norm_module.py
ZejianLi/LAMA
f8737926e47b197c94c1254eb900fb6d56fb7ddd
[ "MIT" ]
1
2022-02-17T05:51:23.000Z
2022-02-17T05:51:23.000Z
import torch import torch.nn as nn import torch.nn.functional as F # Adaptive instance normalization # modified from https://github.com/NVlabs/MUNIT/blob/d79d62d99b588ae341f9826799980ae7298da553/networks.py#L453-L482 class AdaptiveInstanceNorm2d(nn.Module): def __init__(self, num_features, num_w=512, eps=1e-5, momentum=0.1): super(AdaptiveInstanceNorm2d, self).__init__() self.num_features = num_features self.eps = eps self.momentum = momentum # just dummy buffers, not used self.register_buffer('running_mean', torch.zeros(num_features)) self.register_buffer('running_var', torch.ones(num_features)) # projection layer self.weight_proj = nn.Linear(num_w, num_features) self.bias_proj = nn.Linear(num_w, num_features) def forward(self, x, w): b, c = x.size(0), x.size(1) running_mean = self.running_mean.repeat(b) running_var = self.running_var.repeat(b) weight, bias = self.weight_proj(w).contiguous().view(-1) + 1, self.bias_proj(w).contiguous().view(-1) # Apply instance norm x_reshaped = x.contiguous().view(1, b * c, *x.size()[2:]) out = F.batch_norm( x_reshaped, running_mean, running_var, weight, bias, True, self.momentum, self.eps) return out.view(b, c, *x.size()[2:]) def __repr__(self): return self.__class__.__name__ + '(' + str(self.num_features) + ')' class SpatialAdaptiveInstanceNorm2d(nn.Module): def __init__(self, num_features, num_w=512, eps=1e-5, momentum=0.1): super(SpatialAdaptiveInstanceNorm2d, self).__init__() self.num_features = num_features self.eps = eps self.momentum = momentum # just dummy buffers, not used self.register_buffer('running_mean', torch.zeros(num_features)) self.register_buffer('running_var', torch.ones(num_features)) # projection layer self.weight_proj = nn.Linear(num_w, num_features) self.bias_proj = nn.Linear(num_w, num_features) def forward(self, x, w, bbox): b, c, h, w = x.size() running_mean = self.running_mean.repeat(b) running_var = self.running_var.repeat(b) return x class AdaptiveBatchNorm2d(nn.BatchNorm2d): def __init__(self, num_features, num_w=512, eps=1e-5, momentum=0.1, affine=False, track_running_stats=True): super(AdaptiveBatchNorm2d, self).__init__( num_features, eps, momentum, affine, track_running_stats ) # projection layer self.weight_proj = nn.Linear(num_w, num_features) self.bias_proj = nn.Linear(num_w, num_features) def forward(self, x, w): self._check_input_dim(x) exponential_average_factor = 0.0 if self.training and self.track_running_stats: self.num_batches_tracked += 1 if self.momentum is None: # use cumulative moving average exponential_average_factor = 1.0 / self.num_batches_tracked.item() else: # use exponential moving average exponential_average_factor = self.momentum output = F.batch_norm(x, self.running_mean, self.running_var, self.weight, self.bias, self.training or not self.track_running_stats, exponential_average_factor, self.eps) size = output.size() weight, bias = self.weight_proj(w) + 1, self.bias_proj(w) weight = weight.unsqueeze(-1).unsqueeze(-1).expand(size) bias = bias.unsqueeze(-1).unsqueeze(-1).expand(size) return weight * output + bias def __repr__(self): return self.__class__.__name__ + '(' + str(self.num_features) + ')' class SpatialAdaptiveBatchNorm2d(nn.BatchNorm2d): def __init__(self, num_features, num_w=512, eps=1e-5, momentum=0.1, affine=False, track_running_stats=True): super(SpatialAdaptiveBatchNorm2d, self).__init__( num_features, eps, momentum, affine, track_running_stats ) # projection layer self.weight_proj = nn.Linear(num_w, num_features) self.bias_proj = nn.Linear(num_w, num_features) def forward(self, x, vector, bbox): """ :param x: input feature map (b, c, h, w) :param vector: latent vector (b*o, dim_w) :param bbox: bbox map (b, o, h, w) :return: """ self._check_input_dim(x) exponential_average_factor = 0.0 if self.training and self.track_running_stats: self.num_batches_tracked += 1 if self.momentum is None: # use cumulative moving average exponential_average_factor = 1.0 / self.num_batches_tracked.item() else: # use exponential moving average exponential_average_factor = self.momentum output = F.batch_norm(x, self.running_mean, self.running_var, self.weight, self.bias, self.training or not self.track_running_stats, exponential_average_factor, self.eps) b, o, _, _ = bbox.size() _, _, h, w = x.size() bbox = F.interpolate(bbox, size=(h, w), mode='bilinear', align_corners=False) # calculate weight and bias weight, bias = self.weight_proj(vector), self.bias_proj(vector) weight, bias = weight.view(b, o, -1), bias.view(b, o, -1) weight = torch.sum(bbox.unsqueeze(2) * weight.unsqueeze(-1).unsqueeze(-1), dim=1, keepdim=False) / \ (torch.sum(bbox.unsqueeze(2), dim=1, keepdim=False) + 1e-6) + 1 bias = torch.sum(bbox.unsqueeze(2) * bias.unsqueeze(-1).unsqueeze(-1), dim=1, keepdim=False) / \ (torch.sum(bbox.unsqueeze(2), dim=1, keepdim=False) + 1e-6) return weight * output + bias def __repr__(self): return self.__class__.__name__ + '(' + str(self.num_features) + ')' from .sync_batchnorm import SynchronizedBatchNorm2d class SpatialAdaptiveSynBatchNorm2d(nn.BatchNorm2d): def __init__(self, num_features, num_w=512, eps=1e-5, momentum=0.1, affine=False, track_running_stats=True): super(SpatialAdaptiveSynBatchNorm2d, self).__init__( num_features, eps, momentum, affine, track_running_stats ) # projection layer self.weight_proj = nn.utils.spectral_norm(nn.Linear(num_w, num_features)) self.bias_proj = nn.utils.spectral_norm(nn.Linear(num_w, num_features)) # self.weight_proj = nn.Linear(num_w, num_features) # self.bias_proj = nn.Linear(num_w, num_features) self.batch_norm2d = SynchronizedBatchNorm2d(num_features, eps=self.eps, affine=False) self.alpha = nn.Parameter(torch.tensor(0.0)) def forward(self, x, vector, bbox): """ :param x: input feature map (b, c, h, w) :param vector: latent vector (b*o, dim_w) :param bbox: bbox map (b, o, h, w) :return: """ self._check_input_dim(x) output = self.batch_norm2d(x) b, o, _, _ = bbox.size() _, _, h, w = x.size() bbox = F.interpolate(bbox, size=(h, w), mode='bilinear', align_corners=False) # calculate weight and bias weight, bias = self.weight_proj(vector), self.bias_proj(vector) weight, bias = weight.view(b, o, -1), bias.view(b, o, -1) # bbox.unsqueeze(2) -> b, o, 1, h, w # weight.unsqueeze(-1).unsqueeze(-1) -> b, o, num_features, 1, 1 # torch.sum() -> b, num_features, h, w # torch.sum(bbox.unsqueeze(2)) -> b, 1, h, w weight = torch.sum(bbox.unsqueeze(2) * weight.unsqueeze(-1).unsqueeze(-1), dim=1, keepdim=False) / \ (torch.sum(bbox.unsqueeze(2), dim=1, keepdim=False) + 1e-6) bias = torch.sum(bbox.unsqueeze(2) * bias.unsqueeze(-1).unsqueeze(-1), dim=1, keepdim=False) / \ (torch.sum(bbox.unsqueeze(2), dim=1, keepdim=False) + 1e-6) affined = weight * output + bias return output + self.alpha.clamp(-1,1) * affined def __repr__(self): return self.__class__.__name__ + '(' + str(self.num_features) + ')'
41.70202
115
0.622502
a26716b6c1f216f12b6509cd9ddf2e307d447192
619
py
Python
laboratorios/Laboratorio_9/A -Jesse and Cookies.py
yeisonbarreto/ayed-2019-1
4d65c52b60b0b9b98860e323b9514edd580aae8d
[ "MIT" ]
null
null
null
laboratorios/Laboratorio_9/A -Jesse and Cookies.py
yeisonbarreto/ayed-2019-1
4d65c52b60b0b9b98860e323b9514edd580aae8d
[ "MIT" ]
null
null
null
laboratorios/Laboratorio_9/A -Jesse and Cookies.py
yeisonbarreto/ayed-2019-1
4d65c52b60b0b9b98860e323b9514edd580aae8d
[ "MIT" ]
null
null
null
from sys import stdin from collections import deque def Jesse(arr,y): dequeue = deque(arr) cont = 0 for i in range(len(arr)): if sweetness <= y: sweetness = 1 * dequeue[0] + 2 * dequeue[1] print('--',sweetness) print(dequeue.popleft()) print(dequeue.popleft()) dequeue.append(sweetness) print('***',sorted(dequeue)) cont += 1 print(cont) def main(): x,y = stdin.readline().strip().split() arr = [int(i) for i in stdin.readline().strip().split()] Jesse(sorted(arr),int(y)) main()
24.76
60
0.529887
7c18b0453ac53e188b2dfdae3a9dda21a73246a0
4,857
py
Python
build_img.py
teeks99/boost-cpp-docker
96e4b8a245ecb2fea330c693a15002aeeb72156f
[ "MIT" ]
8
2018-05-04T18:06:52.000Z
2022-02-16T06:38:50.000Z
build_img.py
teeks99/boost-cpp-docker
96e4b8a245ecb2fea330c693a15002aeeb72156f
[ "MIT" ]
4
2018-02-08T03:59:01.000Z
2019-06-03T11:43:29.000Z
build_img.py
teeks99/boost-cpp-docker
96e4b8a245ecb2fea330c693a15002aeeb72156f
[ "MIT" ]
3
2018-02-10T19:03:35.000Z
2020-03-20T15:48:01.000Z
import sys import subprocess import datetime import argparse import re options = None versions = [ # Precise # "gcc-4.4", "gcc-4.5", # Trusty # "clang-2.9", "clang-3.0", "clang-3.1", "clang-3.2", "clang-3.3", # "clang-3.4", "clang-3.5", "clang-3.6", "clang-3.7", "clang-3.8", "gcc-4.6", "gcc-4.7", "gcc-4.8", "gcc-4.9", "gcc-5", "gcc-6", # Xenial "clang-3.9", "clang-4", "clang-5", "clang-6", "gcc-7", # Bionic "clang-7", "clang-8", "clang-9", "clang-10", "gcc-8", # Focal "clang-11", "clang-12", "clang-13", "clang-14", "gcc-9", "gcc-10", "gcc-11" ] test_versions = {} def update_base_images(): if not options.no_update_base: # subprocess.check_call("docker pull ubuntu:precise", shell=True) subprocess.check_call("docker pull ubuntu:trusty", shell=True) subprocess.check_call("docker pull ubuntu:xenial", shell=True) subprocess.check_call("docker pull ubuntu:bionic", shell=True) subprocess.check_call("docker pull ubuntu:focal", shell=True) def build(version): tag = f"{options.repo}:{version}" force = "--no-cache" if options.no_force: force = "" cmd = f"docker build {force} --tag {tag} {version}" print(cmd) try: subprocess.check_call(cmd, shell=True) except Exception: print("Failure in command: " + cmd) raise return tag def test(tag, test_version): pass def tag_timestamp(base_tag, version): timestamp = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M") tag = f"{options.repo}:{version}_{timestamp}" cmd = f"docker tag {base_tag} {tag}" try: print(cmd) subprocess.check_call(cmd, shell=True) except Exception: print("Failure in command: " + cmd) raise return tag def tag_latest(base_tag): tag = f"{options.repo}:latest" cmd = f"docker tag {base_tag} {tag}" try: print(cmd) subprocess.check_call(cmd, shell=True) except Exception: print("Failure in command: " + cmd) raise return tag def push_tag(tag): cmd = f"docker push {tag}" try: print(cmd) subprocess.check_call(cmd, shell=True) except Exception: print("Failure in command: " + cmd) raise def remove_tag(tag): cmd = f"docker rmi {tag}" try: print(cmd) subprocess.check_call(cmd, shell=True) except Exception: print("Failure in command: " + cmd) raise def all(): for version in versions: build_one(version) def build_one(version): tags = [] base_tag = None time_tag = None latest_tag = None if not options.no_build: base_tag = build(version) if not options.no_test: tv = version if version in test_versions: tv = test_versions[version] test(base_tag, tv) if not options.no_tag_timestamp: time_tag = tag_timestamp(base_tag, version) if options.latest: latest_tag = tag_latest(base_tag) if options.push: for tag in (base_tag, time_tag, latest_tag): if tag: push_tag(tag) if options.delete_timestamp_tag: remove_tag(time_tag) def set_options(): parser = argparse.ArgumentParser( description="Build one or more docker images for boost-cpp-docker") parser.add_argument( "-v", "--version", action="append", help="Use one of more times to specify the versions to run, skip" + " for all") parser.add_argument( "--no-update-base", action="store_true", help="Don't update the base images") parser.add_argument( "--no-build", action="store_true", help="skip build step") parser.add_argument( "--no-force", action="store_true", help="don't force an update, use existing layers") parser.add_argument( "--no-test", action="store_true", help="skip the test step") parser.add_argument( "--no-tag-timestamp", action="store_true", help="only version tag") parser.add_argument( "--latest", action="store_true", help="update each to latest tag, whichever version is" + " specified last will win") parser.add_argument( "-r", "--repo", default="test/boost-cpp", help="repo to build for and push to") parser.add_argument( "-p", "--push", action="store_true", help="push to dockerhub") parser.add_argument( "-d", "--delete-timestamp-tag", action="store_true", help="remove the timestamp tag from the local machine") global options options = parser.parse_args() def run(): set_options() if options.version: global versions versions = options.version update_base_images() all() if __name__ == "__main__": run()
25.429319
75
0.603871
acd2093b1fcfb83e17ca776dd0a8197c7c4a16a5
11,520
py
Python
build/android/install_emulator_deps.py
TwistedCore/external_v8
c6725dab9be251fbfc6fd7d53c3513a23e78c36c
[ "BSD-3-Clause" ]
777
2017-08-29T15:15:32.000Z
2022-03-21T05:29:41.000Z
build/android/install_emulator_deps.py
TwistedCore/external_v8
c6725dab9be251fbfc6fd7d53c3513a23e78c36c
[ "BSD-3-Clause" ]
66
2017-08-30T18:31:18.000Z
2021-08-02T10:59:35.000Z
build/android/install_emulator_deps.py
TwistedCore/external_v8
c6725dab9be251fbfc6fd7d53c3513a23e78c36c
[ "BSD-3-Clause" ]
123
2017-08-30T01:19:34.000Z
2022-03-17T22:55:31.000Z
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Installs deps for using SDK emulator for testing. The script will download the SDK and system images, if they are not present, and install and enable KVM, if virtualization has been enabled in the BIOS. """ import logging import optparse import os import re import sys import devil_chromium from devil.utils import cmd_helper from devil.utils import run_tests_helper from pylib import constants from pylib import pexpect # Android API level DEFAULT_ANDROID_API_LEVEL = constants.ANDROID_SDK_VERSION # Android ABI/Arch DEFAULT_ABI = 'x86' # Default Time out for downloading SDK component DOWNLOAD_SYSTEM_IMAGE_TIMEOUT = 30 DOWNLOAD_SDK_PLATFORM_TIMEOUT = 60 def CheckSDK(): """Check if SDK is already installed. Returns: True if the emulator SDK directory (src/android_emulator_sdk/) exists. """ return os.path.exists(constants.ANDROID_SDK_ROOT) def CheckSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL, google=False): """Check if the "SDK Platform" for the specified API level is installed. This is necessary in order for the emulator to run when the target is specified. Args: abi: target abi, x86 or arm api_level: the Android API level to check; defaults to the latest API. google: use Google build system image instead of AOSP build Returns: True if the platform is already installed. """ android_binary = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android') if google: pattern = re.compile('id: [0-9]+ or "Google Inc.:Google APIs:%s"' % api_level) else: pattern = re.compile('id: [0-9]+ or "android-%d"' % api_level) try: exit_code, stdout = cmd_helper.GetCmdStatusAndOutput( [android_binary, 'list']) if exit_code != 0: raise Exception('\'android list\' command failed') for line in stdout.split('\n'): if pattern.match(line): return True return False except OSError: logging.exception('Unable to execute \'android list\'') return False def CheckSystemImage(abi, api_level=DEFAULT_ANDROID_API_LEVEL, google=False): """Check if Android system images have been installed. Args: abi: target abi, x86 or arm api_level: the Android API level to check for; defaults to the latest API. google: use Google build system image instead of AOSP build Returns: True if x86 image has been previously downloaded. """ api_target = 'android-%d' % api_level system_image_root = os.path.join(constants.ANDROID_SDK_ROOT, 'system-images', api_target) if abi == 'x86': if google: return os.path.exists(os.path.join(system_image_root, 'google_apis', 'x86')) else: return os.path.exists(os.path.join(system_image_root, 'default', 'x86')) elif abi == 'arm': if google: return os.path.exists(os.path.join(system_image_root, 'google_apis', 'armeabi-v7a')) else: return os.path.exists(os.path.join(system_image_root, 'default', 'armeabi-v7a')) else: raise Exception("abi option invalid") def CheckKVM(): """Quickly check whether KVM is enabled. Returns: True iff /dev/kvm exists (Linux only). """ return os.path.exists('/dev/kvm') def RunKvmOk(): """Run kvm-ok as root to check that KVM is properly enabled after installation of the required packages. Returns: True iff KVM is enabled (/dev/kvm exists). On failure, returns False but also print detailed information explaining why KVM isn't enabled (e.g. CPU doesn't support it, or BIOS disabled it). """ try: # Note: kvm-ok is in /usr/sbin, so always use 'sudo' to run it. return not cmd_helper.RunCmd(['sudo', 'kvm-ok']) except OSError: logging.info('kvm-ok not installed') return False def InstallKVM(): """Installs KVM packages.""" rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm']) if rc: logging.critical('ERROR: Did not install KVM. Make sure hardware ' 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' 'AMD SVM).') # TODO(navabi): Use modprobe kvm-amd on AMD processors. rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel']) if rc: logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure ' 'hardware virtualization is enabled in BIOS.') # Now check to ensure KVM acceleration can be used. if not RunKvmOk(): logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware ' 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' 'AMD SVM).') def UpdateSDK(api_level, package_name, package_pattern, timeout): """This function update SDK with a filter index. Args: api_level: the Android API level to download for. package_name: logging name of package that is being updated. package_pattern: the pattern to match the filter index from. timeout: the amount of time wait for update command. """ android_binary = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android') list_sdk_repo_command = [android_binary, 'list', 'sdk', '--all'] exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(list_sdk_repo_command) if exit_code != 0: raise Exception('\'android list sdk --all\' command return %d' % exit_code) for line in stdout.split('\n'): match = package_pattern.match(line) if match: index = match.group(1) logging.info('package %s corresponds to %s with api level %d', index, package_name, api_level) update_command = [android_binary, 'update', 'sdk', '--no-ui', '--all', '--filter', index] update_command_str = ' '.join(update_command) logging.info('running update command: %s', update_command_str) update_process = pexpect.spawn(update_command_str) if update_process.expect('Do you accept the license') != 0: raise Exception('License agreement check failed') update_process.sendline('y') if update_process.expect( 'Done. 1 package installed.', timeout=timeout) == 0: logging.info('Successfully installed %s for API level %d', package_name, api_level) return else: raise Exception('Failed to install platform update') raise Exception('Could not find android-%d update for the SDK!' % api_level) def GetSystemImage(abi, api_level=DEFAULT_ANDROID_API_LEVEL, google=False): """Download system image files Args: abi: target abi, x86 or arm api_level: the Android API level to download for. google: use Google build system image instead of AOSP build """ logging.info('Download x86 system image directory into sdk directory.') if abi == 'x86': if google: package_name = 'Google Intel x86 Atom System Image' pattern = re.compile( r'\s*([0-9]+)- Google APIs Intel x86 Atom System Image, Google Inc.' ' API %d.*' % api_level) else: package_name = 'Intel x86 system image' pattern = re.compile( r'\s*([0-9]+)- Intel x86 Atom System Image, Android API %d.*' % api_level) elif abi == 'arm': if google: package_name = 'Google arm system image' pattern = re.compile( r'\s*([0-9]+)- Google APIs ARM EABI v7a System Image, Google Inc. API ' '%d.*' % api_level) else: package_name = 'Android arm system image' pattern = re.compile( r'\s*([0-9]+)- ARM EABI v7a System Image, Android API %d.*' % api_level) else: raise Exception('abi option is invalid') UpdateSDK(api_level, package_name, pattern, DOWNLOAD_SYSTEM_IMAGE_TIMEOUT) def GetSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL, google=False): """Update the SDK to include the platform specified. Args: api_level: the Android API level to download google: use Google build system image instead of AOSP build """ logging.info('Download SDK Platform directory into sdk directory.') platform_package_pattern = re.compile( r'\s*([0-9]+)- SDK Platform Android [\.,0-9]+, API %d.*' % api_level) UpdateSDK(api_level, 'SDK Platform', platform_package_pattern, DOWNLOAD_SDK_PLATFORM_TIMEOUT) if google: google_api_package_pattern = re.compile( r'\s*([0-9]+)- Google APIs, Android API %d.*' % api_level) UpdateSDK(api_level, 'Google APIs', google_api_package_pattern, DOWNLOAD_SDK_PLATFORM_TIMEOUT) def main(argv): opt_parser = optparse.OptionParser( description='Install dependencies for running the Android emulator') opt_parser.add_option('--abi', dest='abi', help='The targeted abi for emulator system image', type='string', default=DEFAULT_ABI) opt_parser.add_option('--api-level', dest='api_level', help=('The API level (e.g., 19 for Android 4.4) to ' 'ensure is available'), type='int', default=DEFAULT_ANDROID_API_LEVEL) opt_parser.add_option('-v', dest='verbosity', default=1, action='count', help='Verbose level (multiple times for more)') opt_parser.add_option('--google', dest='google', action='store_true', default=False, help='Install Google System Image instead of AOSP') options, _ = opt_parser.parse_args(argv[1:]) run_tests_helper.SetLogLevel(verbose_count=options.verbosity) devil_chromium.Initialize() # Calls below will download emulator SDK and/or system images only if needed. if CheckSDK(): logging.info('android_emulator_sdk/ exists') else: logging.critical('ERROR: Emulator SDK not installed in %s' , constants.ANDROID_SDK_ROOT) return 1 # Check target. The target has to be installed in order to run the emulator. if CheckSDKPlatform(options.api_level, options.google): logging.info('SDK platform %s %s android-%d already present, skipping.', 'Google' if options.google else 'AOSP', options.abi, options.api_level) else: logging.info('SDK platform %s %s android-%d not present, installing.', 'Google' if options.google else 'AOSP', options.abi, options.api_level) GetSDKPlatform(options.api_level, options.google) # Download the system image needed if CheckSystemImage(options.abi, options.api_level, options.google): logging.info('system image for %s %s android-%d already present, skipping.', 'Google' if options.google else 'AOSP', options.abi, options.api_level) else: GetSystemImage(options.abi, options.api_level, options.google) # Make sure KVM packages are installed and enabled. if options.abi == 'x86': if CheckKVM(): logging.info('KVM already installed and enabled.') else: logging.warning('KVM is not installed or enabled.') if __name__ == '__main__': sys.exit(main(sys.argv))
36.112853
80
0.650347
1edef073fe9d763fe6fba3791183321ead5c2857
517
py
Python
Basics/main.py
miku/batchdata
25446f7d9c6baad24de7ee8b964c62726bf27ea5
[ "MIT" ]
8
2018-11-17T17:17:29.000Z
2019-09-23T17:31:09.000Z
Basics/main.py
miku/batchdata
25446f7d9c6baad24de7ee8b964c62726bf27ea5
[ "MIT" ]
null
null
null
Basics/main.py
miku/batchdata
25446f7d9c6baad24de7ee8b964c62726bf27ea5
[ "MIT" ]
null
null
null
""" Basic example, a minimal task and command line integration. Fill in the blanks. Run: $ python main.py Luigi requires a scheduler and comes with a local scheduler for development. $ python main.py --local-scheduler Specify the task name (name of the class) to execute: $ python main.py <taskname> --local-scheduler """ import luigi # 1. Write a class (e.g. `Hello`) that inherits from `luigi.Task` # 2. Add a `run` method that prints some string. if __name__ == '__main__': luigi.run()
18.464286
76
0.694391
1d26e026f89c7acf328898793ab8d737a2bf664b
2,874
py
Python
setup.py
adamcharnock/lightbus
5e7069da06cd37a8131e8c592ee957ccb73603d5
[ "Apache-2.0" ]
178
2017-07-22T12:35:00.000Z
2022-03-28T07:53:13.000Z
setup.py
adamcharnock/warren
5e7069da06cd37a8131e8c592ee957ccb73603d5
[ "Apache-2.0" ]
26
2017-08-03T12:09:29.000Z
2021-10-19T16:47:18.000Z
setup.py
adamcharnock/warren
5e7069da06cd37a8131e8c592ee957ccb73603d5
[ "Apache-2.0" ]
19
2017-09-15T17:51:24.000Z
2022-02-28T13:00:16.000Z
# -*- coding: utf-8 -*- # DO NOT EDIT THIS FILE! # This file has been autogenerated by dephell <3 # https://github.com/dephell/dephell try: from setuptools import setup except ImportError: from distutils.core import setup import os.path readme = "" here = os.path.abspath(os.path.dirname(__file__)) readme_path = os.path.join(here, "README.rst") if os.path.exists(readme_path): with open(readme_path, "rb") as stream: readme = stream.read().decode("utf8") setup( long_description=readme, name="lightbus", version="1.2.0a4", description="RPC & event framework for Python 3", python_requires=">=3.7", project_urls={ "documentation": "https://lightbus.org", "homepage": "https://lightbus.org", "repository": "https://github.com/adamcharnock/lightbus/", }, author="Adam Charnock", author_email="[email protected]", keywords="python messaging redis bus queue", classifiers=[ "Development Status :: 5 - Production/Stable", "Framework :: AsyncIO", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Programming Language :: Python :: 3", "Topic :: System :: Networking", "Topic :: Communications", ], entry_points={ "console_scripts": ["lightbus = lightbus.commands:lightbus_entry_point"], "lightbus_event_transports": [ "debug = lightbus:DebugEventTransport", "redis = lightbus:RedisEventTransport", ], "lightbus_plugins": [ "internal_metrics = lightbus.plugins.metrics:MetricsPlugin", "internal_state = lightbus.plugins.state:StatePlugin", ], "lightbus_result_transports": [ "debug = lightbus:DebugResultTransport", "redis = lightbus:RedisResultTransport", ], "lightbus_rpc_transports": [ "debug = lightbus:DebugRpcTransport", "redis = lightbus:RedisRpcTransport", ], "lightbus_schema_transports": [ "debug = lightbus:DebugSchemaTransport", "redis = lightbus:RedisSchemaTransport", ], }, packages=[ "lightbus", "lightbus.client", "lightbus.client.docks", "lightbus.client.internal_messaging", "lightbus.client.subclients", "lightbus.commands", "lightbus.config", "lightbus.plugins", "lightbus.schema", "lightbus.serializers", "lightbus.transports", "lightbus.transports.redis", "lightbus.utilities", ], package_dir={"": "."}, package_data={}, install_requires=["aioredis>=1.2.0", "jsonschema>=3.2", "pyyaml>=3.12"], )
31.582418
81
0.607864
2929b9aa761afd9f654fb1eaf0761a397c46d4e8
12,342
py
Python
server.py
PuffinDev/PyChat
47fef1fe33a4e0f9f622aa7f74fe5fd4fcaa45ab
[ "MIT" ]
6
2020-11-12T17:49:03.000Z
2021-04-29T16:45:34.000Z
server.py
harleytml/enhancedPyChat
47fef1fe33a4e0f9f622aa7f74fe5fd4fcaa45ab
[ "MIT" ]
null
null
null
server.py
harleytml/enhancedPyChat
47fef1fe33a4e0f9f622aa7f74fe5fd4fcaa45ab
[ "MIT" ]
2
2021-02-14T13:27:50.000Z
2021-03-16T06:35:46.000Z
import socket import threading import pickle import time import traceback import json HEADER = 64 PORT = input("Type a port >> ") if PORT == "": PORT = 49001 #Default port try: PORT = int(PORT) except: print("Not a valid port. running on default port...") SERVER = "0.0.0.0" ADDR = (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = "disconnect" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) connections = [] usernames = {} user_colours = {} valid_colours = ["green", "orange", "blue", "purple", "red", "turquoise", "red4"] conn_usernames = {} online_users = [] message_history = [] admins = [] banned = [] accounts = {} def load_accounts(): global accounts with open('resources/server/accounts.json', 'r') as file: accounts = json.load(file) def save_accounts(): global accounts with open('resources/server/accounts.json', 'w') as file: json.dump(accounts, file) load_accounts() def addr_from_username(user): for key, value in usernames.items(): if value == user: return key[0] with open("resources/server/banned.txt", 'r') as file: for line in file: line = line.strip() banned.append(line) with open("resources/server/admins.txt", 'r') as file: for line in file: line = line.strip() admins.append(line) def write_config(): with open("resources/server/banned.txt", 'w') as file: file.truncate(0) print(banned) for member in banned: file.write(member + "\n") def send_to_all(msg, user, colour): msg = ('m', msg, user, colour) if not msg[1] == 'disconnect': #Dont append disconnect messages message_history.append(msg) message = pickle.dumps(msg) msg_length = len(message) send_length = str(msg_length).encode(FORMAT) send_length += b' ' * (HEADER - len(send_length)) for conn in connections: conn.send(send_length) conn.send(message) def send_object_to_all(object): message = pickle.dumps(object) msg_length = len(message) send_length = str(msg_length).encode(FORMAT) send_length += b' ' * (HEADER - len(send_length)) for conn in connections: conn.send(send_length) conn.send(message) def send(user, msg): conn = conn_usernames[user] # Get connection object from conn_usernames message = pickle.dumps(msg) msg_length = len(message) send_length = str(msg_length).encode(FORMAT) send_length += b' ' * (HEADER - len(send_length)) conn.send(send_length) conn.send(message) def connsend(conn, msg): message = pickle.dumps(msg) msg_length = len(message) send_length = str(msg_length).encode(FORMAT) send_length += b' ' * (HEADER - len(send_length)) conn.send(send_length) conn.send(message) def handle_client(conn, addr): server.settimeout(6) usernames[addr] = str(threading.activeCount() - 1) #Temp username conn_usernames[usernames[addr]] = conn if addr[0] in banned: connsend(conn, ('x')) return 0 else: print(f"[NEW CONNECTION] {addr} connected.") connected = True username_set = False join_message_sent = False connsend(conn, ('o', online_users)) while connected: if username_set and not join_message_sent: time.sleep(0.5) send_object_to_all(('j', usernames[addr], user_colours[addr])) online_users.append(usernames[addr]) print(online_users) join_message_sent = True try: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) try: msg = conn.recv(msg_length) except: connected = False msg = pickle.loads(msg) prefix = msg[0] if prefix == 'm': is_command = False else: is_command = True try: if msg[1] == DISCONNECT_MESSAGE: connected = False except: pass print("Prefix: " + prefix) if prefix == 'u': username = msg[1].replace(' ', '') password = msg[2] print(accounts) if username in accounts.keys(): if accounts[username] == password: username_set = True connsend(conn, ('r', 'Logged in as ' + username)) usernames[addr] = username #set username user_colours[addr] = msg[3] #set colour conn_usernames[username] = conn else: connsend(conn, ('r', 'Incorrect password.')) else: accounts[username] = password username_set = True connsend(conn, ('r', 'New account created: ' + username)) save_accounts() #save new account in accounts.json usernames[addr] = username #set username user_colours[addr] = msg[3] #set colour conn_usernames[username] = conn """if msg[1] in usernames.values(): time.sleep(0.5) send(usernames[addr], ('r', "That username is taken. please choose another.")) print("username taken") else: usernames[addr] = username #set username user_colours[addr] = msg[2] #set colour print(usernames) conn_usernames[username] = conn #send(usernames[addr], ('r', "Username has been set to " + username)) username_set = True print("username set to " + username)""" if username_set: #Only able to send messages if logged in if prefix == 'b': #ban if addr[0] in admins: try: banned.append(addr_from_username(msg[1])) write_config() send(msg[1], ('x')) #x command: disconnects client send(usernames[addr], ('r', 'User banned successfully!')) except: send(usernames[addr], ('r', 'User does not exist')) else: print(addr[0]) send(usernames[addr], ('r', 'You are not an admin!')) print("Not admin") print(admins) print(addr[0]) if prefix == 'a': #unban if addr[0] in admins: try: banned.remove(addr_from_username(msg[1])) write_config() send(usernames[addr], ('r', 'User unbanned successfully!')) except: send(usernames[addr], ('r', 'User is not banned!')) send(usernames[addr], ('r', addr_from_username(msg[1]))) else: send(usernames[addr], ('r', 'You are not an admin!')) if prefix == 'd': try: send(msg[1], ('d', msg[2], usernames[addr], user_colours[addr])) except: connsend(conn, ('r', "That user is not online, but your message has been sent to their inbox")) #If user is not online then message still gets added to the message history for if they come online message_history.append(('d', msg[1], msg[2], usernames[addr], user_colours[addr])) if prefix == 'c': if msg[1] in valid_colours: user_colours[addr] = msg[1] send(usernames[addr], ('r', 'Changed username colour to ' + msg[1])) else: send(usernames[addr], ('r', 'That is not a valid colour. type \'/colours\'.')) if prefix == 'h': #Send message history to client print("Sending history to client...") history_object = [] i=0 for message in message_history[::-1]: i+=1 if i==16: break if message[0] == 'm': history_object.append(message) if message[0] == 'd' and message[1] == usernames[addr]: history_object.append(message) if message[0] == 'd' and message[3] == usernames[addr]: history_object.append(message) history_object = reversed(history_object) history_object = tuple(history_object) send(usernames[addr], ('h', history_object)) if prefix == 'i': #Inbox inbox_object = [] i=0 for message in message_history[::-1]: if i==20: break if message[0] == 'm' and '@' + usernames[addr] in message[1]: #If @username in message inbox_object.append(message) i+=1 if message[0] == 'd' and message[1] == usernames[addr]: inbox_object.append(message) i+=1 inbox_object = reversed(inbox_object) inbox_object = tuple(inbox_object) send(usernames[addr], ('i', inbox_object)) if is_command == False: try: send_to_all(msg[1], usernames[addr], user_colours[addr]) except KeyError: usernames[addr] = threading.activeCount() - 1 send_to_all(msg[1], usernames[addr], "red") if not prefix == 'k': print(f"[{str(addr).strip('(').strip(')')}] {msg}") else: #Disconnect client if header is blank print("Blank header") connected = False except socket.timeout: #timeout connected = False #Disconnect if failed to recive header print("Timeout") try: online_users.remove(usernames[addr]) except: pass try: connections.remove(conn) #Remove from connections list except: pass try: del conn_usernames[usernames[addr]] except: pass send_object_to_all(('l', usernames[addr])) try: del usernames[addr] except: pass time.sleep(0.5) conn.close() print("Closed connection.") def start(): server.listen() print(f"[LISTENING] Server is listening on {SERVER}") while True: try: conn, addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr)) #Create a new thread for every client that joins thread.start() connections.append(conn) #Append the connection object for send_to_all() to loop through print(f"[ACTIVE CONNECTIONS] {threading.activeCount() - 1}") except socket.timeout: pass print("[STARTING] server is starting...") try: start() except KeyboardInterrupt: exit()
34.188366
127
0.482742
4cece89e47ab50e50172525e4db5abc73c6fa8ba
16,560
py
Python
pymatgen/apps/battery/battery_abc.py
wangyusu/pymatgen
a90af2fe71eff15134ca33c6e58f07caba425ae9
[ "MIT" ]
2
2020-01-28T19:19:15.000Z
2020-03-30T18:10:32.000Z
pymatgen/apps/battery/battery_abc.py
wangyusu/pymatgen
a90af2fe71eff15134ca33c6e58f07caba425ae9
[ "MIT" ]
3
2021-08-03T17:59:02.000Z
2021-08-12T00:43:59.000Z
pymatgen/apps/battery/battery_abc.py
wangyusu/pymatgen
a90af2fe71eff15134ca33c6e58f07caba425ae9
[ "MIT" ]
13
2015-03-05T09:42:11.000Z
2018-08-28T15:22:53.000Z
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ This module defines the abstract base classes for battery-related classes. Regardless of the kind of electrode, conversion or insertion, there are many common definitions and properties, e.g., average voltage, capacity, etc. which can be defined in a general way. The Abc for battery classes implements some of these common definitions to allow sharing of common logic between them. """ __author__ = "Anubhav Jain, Shyue Ping Ong" __copyright__ = "Copyright 2012, The Materials Project" __version__ = "0.1" __maintainer__ = "Shyue Ping Ong" __email__ = "[email protected]" __date__ = "Feb 1, 2012" __status__ = "Beta" from collections.abc import Sequence from dataclasses import dataclass from typing import Dict, Tuple from monty.json import MSONable from scipy.constants import N_A from pymatgen.core import Composition, Element from pymatgen.entries.computed_entries import ComputedEntry @dataclass class AbstractVoltagePair(MSONable): """ An Abstract Base Class for a Voltage Pair. Attributes: voltage : Voltage of voltage pair. mAh: Energy in mAh. mass_charge: Mass of charged pair. mass_discharge: Mass of discharged pair. vol_charge: Vol of charged pair. vol_discharge: Vol of discharged pair. frac_charge: Frac of working ion in charged pair. frac_discharge: Frac of working ion in discharged pair. working_ion_entry: Working ion as an entry. framework : The compositions of one formula unit of the host material """ voltage: float mAh: float mass_charge: float mass_discharge: float vol_charge: float vol_discharge: float frac_charge: float frac_discharge: float working_ion_entry: ComputedEntry _framework_formula: str # should be made into Composition whenever the as_dict and from dict are fixed def __post_init__(self): # ensure the the frame work is a reduced composition self._framework_formula = self.framework.reduced_formula @property def working_ion(self) -> Element: """ working ion as pymatgen Element object """ return self.working_ion_entry.composition.elements[0] @property def framework(self) -> Composition: """ The composition object representing the framework """ return Composition(self._framework_formula) @property def x_charge(self) -> float: """ The number of working ions per formula unit of host in the charged state """ return self.frac_charge * self.framework.num_atoms / (1 - self.frac_charge) @property def x_discharge(self) -> float: """ The number of working ions per formula unit of host in the discharged state """ return self.frac_discharge * self.framework.num_atoms / (1 - self.frac_discharge) @dataclass class AbstractElectrode(Sequence, MSONable): """ An Abstract Base Class representing an Electrode. It is essentially a sequence of VoltagePairs. Generally, subclasses only need to implement three abstract properties: voltage_pairs, working_ion and working_ion_entry. The general concept is that all other battery properties such as capacity, etc. are derived from voltage pairs. One of the major challenges with representing battery materials is keeping track of the normalization between different entries. For example, one entry might be TiO2 with one unit cell whereas another is LiTi2O4 with two unit cells. When computing battery properties, it is needed to always use a universal reference state otherwise you have normalization errors (e.g., the energy of LiTi2O4 must be divided by two to be compared with TiO2). For properties such as volume, mass, or mAh transferred within the voltage pair, a universal convention is necessary. AbstractElectrode can query for extrinsic properties of several different AbstractVoltagePairs belonging to a single charge/discharge path and be confident that the normalization is being carried out properly throughout, even if more AbstractVoltagePairs are added later. The universal normalization is defined by the reduced structural framework of the entries, which is common along the entire charge/discharge path. For example, LiTi2O4 has a reduced structural framework of TiO2. Another example is Li9V6P16O58 which would have a reduced structural framework of V3P8O29. Note that reduced structural frameworks need not be charge-balanced or physical, e.g. V3P8O29 is not charge-balanced, they are just a tool for normalization. Example: for a LiTi2O4 -> TiO2 AbstractVoltagePair, extrinsic quantities like mAh or cell volumes are given per TiO2 formula unit. Developers implementing a new battery (other than the two general ones already implemented) need to implement a VoltagePair and an Electrode. Attributes: voltage_pairs: Objects that represent each voltage step working_ion: Representation of the working ion that only contains element type working_ion_entry: Representation of the working_ion that contains the energy framework: The compositions of one formula unit of the host material """ voltage_pairs: Tuple[AbstractVoltagePair] working_ion_entry: ComputedEntry _framework_formula: str # should be made into Composition whenever the as_dict and from dict are fixed def __post_init__(self): # ensure the the frame work is a reduced composition self._framework_formula = self.framework.reduced_formula def __getitem__(self, index): return self.voltage_pairs[index] def __contains__(self, obj): return obj in self.voltage_pairs def __iter__(self): return self.voltage_pairs.__iter__() def __len__(self): return len(self.voltage_pairs) @property def working_ion(self): """ working ion as pymatgen Element object """ return self.working_ion_entry.composition.elements[0] @property def framework(self): """ The composition object representing the framework """ return Composition(self._framework_formula) @property def x_charge(self) -> float: """ The number of working ions per formula unit of host in the charged state """ return self.voltage_pairs[0].x_charge @property def x_discharge(self) -> float: """ The number of working ions per formula unit of host in the discharged state """ return self.voltage_pairs[-1].x_discharge @property def max_delta_volume(self): """ Maximum volume change along insertion """ vols = [v.vol_charge for v in self.voltage_pairs] vols.extend([v.vol_discharge for v in self.voltage_pairs]) return max(vols) / min(vols) - 1 @property def num_steps(self): """ The number of distinct voltage steps in from fully charge to discharge based on the stable intermediate states """ return len(self.voltage_pairs) @property def max_voltage(self): """ Highest voltage along insertion """ return max([p.voltage for p in self.voltage_pairs]) @property def min_voltage(self): """ Lowest voltage along insertion """ return min([p.voltage for p in self.voltage_pairs]) @property def max_voltage_step(self): """ Maximum absolute difference in adjacent voltage steps """ steps = [ self.voltage_pairs[i].voltage - self.voltage_pairs[i + 1].voltage for i in range(len(self.voltage_pairs) - 1) ] return max(steps) if len(steps) > 0 else 0 @property def normalization_mass(self): """ Returns: Mass used for normalization. This is the mass of the discharged electrode of the last voltage pair. """ return self.voltage_pairs[-1].mass_discharge @property def normalization_volume(self): """ Returns: Mass used for normalization. This is the vol of the discharged electrode of the last voltage pair. """ return self.voltage_pairs[-1].vol_discharge def get_sub_electrodes(self, adjacent_only=True): """ If this electrode contains multiple voltage steps, then it is possible to use only a subset of the voltage steps to define other electrodes. Must be implemented for each electrode object. Args: adjacent_only: Only return electrodes from compounds that are adjacent on the convex hull, i.e. no electrodes returned will have multiple voltage steps if this is set true Returns: A list of Electrode objects """ NotImplementedError( "The get_sub_electrodes function must be implemented for each concrete electrode " f"class {self.__class__.__name__,}" ) def get_average_voltage(self, min_voltage=None, max_voltage=None): """ Average voltage for path satisfying between a min and max voltage. Args: min_voltage (float): The minimum allowable voltage for a given step. max_voltage (float): The maximum allowable voltage allowable for a given step. Returns: Average voltage in V across the insertion path (a subset of the path can be chosen by the optional arguments) """ pairs_in_range = self._select_in_voltage_range(min_voltage, max_voltage) if len(pairs_in_range) == 0: return 0 total_cap_in_range = sum([p.mAh for p in pairs_in_range]) total_edens_in_range = sum([p.mAh * p.voltage for p in pairs_in_range]) return total_edens_in_range / total_cap_in_range def get_capacity_grav(self, min_voltage=None, max_voltage=None, use_overall_normalization=True): """ Get the gravimetric capacity of the electrode. Args: min_voltage (float): The minimum allowable voltage for a given step. max_voltage (float): The maximum allowable voltage allowable for a given step. use_overall_normalization (booL): If False, normalize by the discharged state of only the voltage pairs matching the voltage criteria. if True, use default normalization of the full electrode path. Returns: Gravimetric capacity in mAh/g across the insertion path (a subset of the path can be chosen by the optional arguments). """ pairs_in_range = self._select_in_voltage_range(min_voltage, max_voltage) normalization_mass = ( self.normalization_mass if use_overall_normalization or len(pairs_in_range) == 0 else pairs_in_range[-1].mass_discharge ) return sum([pair.mAh for pair in pairs_in_range]) / normalization_mass def get_capacity_vol(self, min_voltage=None, max_voltage=None, use_overall_normalization=True): """ Get the volumetric capacity of the electrode. Args: min_voltage (float): The minimum allowable voltage for a given step. max_voltage (float): The maximum allowable voltage allowable for a given step. use_overall_normalization (booL): If False, normalize by the discharged state of only the voltage pairs matching the voltage criteria. if True, use default normalization of the full electrode path. Returns: Volumetric capacity in mAh/cc across the insertion path (a subset of the path can be chosen by the optional arguments) """ pairs_in_range = self._select_in_voltage_range(min_voltage, max_voltage) normalization_vol = ( self.normalization_volume if use_overall_normalization or len(pairs_in_range) == 0 else pairs_in_range[-1].vol_discharge ) return sum([pair.mAh for pair in pairs_in_range]) / normalization_vol * 1e24 / N_A def get_specific_energy(self, min_voltage=None, max_voltage=None, use_overall_normalization=True): """ Returns the specific energy of the battery in mAh/g. Args: min_voltage (float): The minimum allowable voltage for a given step. max_voltage (float): The maximum allowable voltage allowable for a given step. use_overall_normalization (booL): If False, normalize by the discharged state of only the voltage pairs matching the voltage criteria. if True, use default normalization of the full electrode path. Returns: Specific energy in Wh/kg across the insertion path (a subset of the path can be chosen by the optional arguments) """ return self.get_capacity_grav(min_voltage, max_voltage, use_overall_normalization) * self.get_average_voltage( min_voltage, max_voltage ) def get_energy_density(self, min_voltage=None, max_voltage=None, use_overall_normalization=True): """ Args: min_voltage (float): The minimum allowable voltage for a given step. max_voltage (float): The maximum allowable voltage allowable for a given step. use_overall_normalization (booL): If False, normalize by the discharged state of only the voltage pairs matching the voltage criteria. if True, use default normalization of the full electrode path. Returns: Energy density in Wh/L across the insertion path (a subset of the path can be chosen by the optional arguments). """ return self.get_capacity_vol(min_voltage, max_voltage, use_overall_normalization) * self.get_average_voltage( min_voltage, max_voltage ) def _select_in_voltage_range(self, min_voltage=None, max_voltage=None): """ Selects VoltagePairs within a certain voltage range. Args: min_voltage (float): The minimum allowable voltage for a given step. max_voltage (float): The maximum allowable voltage allowable for a given step. Returns: A list of VoltagePair objects """ min_voltage = min_voltage if min_voltage is not None else self.min_voltage max_voltage = max_voltage if max_voltage is not None else self.max_voltage return list(filter(lambda p: min_voltage <= p.voltage <= max_voltage, self.voltage_pairs)) def get_summary_dict(self, print_subelectrodes=True) -> Dict: """ Generate a summary dict. Args: print_subelectrodes: Also print data on all the possible subelectrodes. Returns: A summary of this electrode"s properties in dict format. """ d = { "average_voltage": self.get_average_voltage(), "max_voltage": self.max_voltage, "min_voltage": self.min_voltage, "max_delta_volume": self.max_delta_volume, "max_voltage_step": self.max_voltage_step, "capacity_grav": self.get_capacity_grav(), "capacity_vol": self.get_capacity_vol(), "energy_grav": self.get_specific_energy(), "energy_vol": self.get_energy_density(), "working_ion": self.working_ion.symbol, "nsteps": self.num_steps, "fracA_charge": self.voltage_pairs[0].frac_charge, "fracA_discharge": self.voltage_pairs[-1].frac_discharge, "framework_formula": self._framework_formula, } if print_subelectrodes: def f_dict(c): return c.get_summary_dict(print_subelectrodes=False) d["adj_pairs"] = list(map(f_dict, self.get_sub_electrodes(adjacent_only=True))) d["all_pairs"] = list(map(f_dict, self.get_sub_electrodes(adjacent_only=False))) return d
38.244804
118
0.664493
5e4a78d6d580fbba05bec322797cdaeb1bcd7bd6
19,679
py
Python
MLPYthonEnv/ml-agents-release_17/ml-agents/mlagents/trainers/tests/test_settings.py
cihan-demir/NineMensMorris
05f4fe3c096fab1d31d110c4fd106410e248fc21
[ "MIT" ]
3
2021-04-16T06:17:07.000Z
2021-07-04T06:36:37.000Z
ml-agents/mlagents/trainers/tests/test_settings.py
neils94/ml-agents
d3f5ca73d2cc6cfae0d02021cd370bae2fe5b592
[ "Apache-2.0" ]
null
null
null
ml-agents/mlagents/trainers/tests/test_settings.py
neils94/ml-agents
d3f5ca73d2cc6cfae0d02021cd370bae2fe5b592
[ "Apache-2.0" ]
null
null
null
import attr import cattr import pickle import pytest import yaml from typing import Dict, List, Optional from mlagents.trainers.settings import ( RunOptions, TrainerSettings, NetworkSettings, PPOSettings, SACSettings, RewardSignalType, RewardSignalSettings, CuriositySettings, EnvironmentSettings, EnvironmentParameterSettings, ConstantSettings, UniformSettings, GaussianSettings, MultiRangeUniformSettings, TrainerType, deep_update_dict, strict_to_cls, ) from mlagents.trainers.exception import TrainerConfigError def check_if_different(testobj1: object, testobj2: object) -> None: assert testobj1 is not testobj2 if attr.has(testobj1.__class__) and attr.has(testobj2.__class__): for key, val in attr.asdict(testobj1, recurse=False).items(): if isinstance(val, dict) or isinstance(val, list) or attr.has(val): # Note: this check doesn't check the contents of mutables. check_if_different(val, attr.asdict(testobj2, recurse=False)[key]) def check_dict_is_at_least( testdict1: Dict, testdict2: Dict, exceptions: Optional[List[str]] = None ) -> None: """ Check if everything present in the 1st dict is the same in the second dict. Excludes things that the second dict has but is not present in the heirarchy of the 1st dict. Used to compare an underspecified config dict structure (e.g. as would be provided by a user) with a complete one (e.g. as exported by RunOptions). """ for key, val in testdict1.items(): if exceptions is not None and key in exceptions: continue assert key in testdict2 if isinstance(val, dict): check_dict_is_at_least(val, testdict2[key]) elif isinstance(val, list): assert isinstance(testdict2[key], list) for _el0, _el1 in zip(val, testdict2[key]): if isinstance(_el0, dict): check_dict_is_at_least(_el0, _el1) else: assert val == testdict2[key] else: # If not a dict, don't recurse into it assert val == testdict2[key] def test_is_new_instance(): """ Verify that every instance of RunOptions() and its subclasses is a new instance (i.e. all factory methods are used properly.) """ check_if_different(RunOptions(), RunOptions()) check_if_different(TrainerSettings(), TrainerSettings()) def test_no_configuration(): """ Verify that a new config will have a PPO trainer with extrinsic rewards. """ blank_runoptions = RunOptions() blank_runoptions.behaviors.set_config_specified(False) assert isinstance(blank_runoptions.behaviors["test"], TrainerSettings) assert isinstance(blank_runoptions.behaviors["test"].hyperparameters, PPOSettings) assert ( RewardSignalType.EXTRINSIC in blank_runoptions.behaviors["test"].reward_signals ) def test_strict_to_cls(): """ Test strict structuring method. """ @attr.s(auto_attribs=True) class TestAttrsClass: field1: int = 0 field2: str = "test" correct_dict = {"field1": 1, "field2": "test2"} assert strict_to_cls(correct_dict, TestAttrsClass) == TestAttrsClass(**correct_dict) incorrect_dict = {"field3": 1, "field2": "test2"} with pytest.raises(TrainerConfigError): strict_to_cls(incorrect_dict, TestAttrsClass) with pytest.raises(TrainerConfigError): strict_to_cls("non_dict_input", TestAttrsClass) def test_deep_update_dict(): dict1 = {"a": 1, "b": 2, "c": {"d": 3}} dict2 = {"a": 2, "c": {"d": 4, "e": 5}} deep_update_dict(dict1, dict2) assert dict1 == {"a": 2, "b": 2, "c": {"d": 4, "e": 5}} def test_trainersettings_structure(): """ Test structuring method for TrainerSettings """ trainersettings_dict = { "trainer_type": "sac", "hyperparameters": {"batch_size": 1024}, "max_steps": 1.0, "reward_signals": {"curiosity": {"encoding_size": 64}}, } trainer_settings = TrainerSettings.structure(trainersettings_dict, TrainerSettings) assert isinstance(trainer_settings.hyperparameters, SACSettings) assert trainer_settings.trainer_type == TrainerType.SAC assert isinstance(trainer_settings.max_steps, int) assert RewardSignalType.CURIOSITY in trainer_settings.reward_signals # Check invalid trainer type with pytest.raises(ValueError): trainersettings_dict = { "trainer_type": "puppo", "hyperparameters": {"batch_size": 1024}, "max_steps": 1.0, } TrainerSettings.structure(trainersettings_dict, TrainerSettings) # Check invalid hyperparameter with pytest.raises(TrainerConfigError): trainersettings_dict = { "trainer_type": "ppo", "hyperparameters": {"notahyperparam": 1024}, "max_steps": 1.0, } TrainerSettings.structure(trainersettings_dict, TrainerSettings) # Check non-dict with pytest.raises(TrainerConfigError): TrainerSettings.structure("notadict", TrainerSettings) # Check hyperparameters specified but trainer type left as default. # This shouldn't work as you could specify non-PPO hyperparameters. with pytest.raises(TrainerConfigError): trainersettings_dict = {"hyperparameters": {"batch_size": 1024}} TrainerSettings.structure(trainersettings_dict, TrainerSettings) def test_reward_signal_structure(): """ Tests the RewardSignalSettings structure method. This one is special b/c it takes in a Dict[RewardSignalType, RewardSignalSettings]. """ reward_signals_dict = { "extrinsic": {"strength": 1.0}, "curiosity": {"strength": 1.0}, } reward_signals = RewardSignalSettings.structure( reward_signals_dict, Dict[RewardSignalType, RewardSignalSettings] ) assert isinstance(reward_signals[RewardSignalType.EXTRINSIC], RewardSignalSettings) assert isinstance(reward_signals[RewardSignalType.CURIOSITY], CuriositySettings) # Check invalid reward signal type reward_signals_dict = {"puppo": {"strength": 1.0}} with pytest.raises(ValueError): RewardSignalSettings.structure( reward_signals_dict, Dict[RewardSignalType, RewardSignalSettings] ) # Check missing GAIL demo path reward_signals_dict = {"gail": {"strength": 1.0}} with pytest.raises(TypeError): RewardSignalSettings.structure( reward_signals_dict, Dict[RewardSignalType, RewardSignalSettings] ) # Check non-Dict input with pytest.raises(TrainerConfigError): RewardSignalSettings.structure( "notadict", Dict[RewardSignalType, RewardSignalSettings] ) def test_memory_settings_validation(): with pytest.raises(TrainerConfigError): NetworkSettings.MemorySettings(sequence_length=128, memory_size=63) with pytest.raises(TrainerConfigError): NetworkSettings.MemorySettings(sequence_length=128, memory_size=0) def test_env_parameter_structure(): """ Tests the EnvironmentParameterSettings structure method and all validators. """ env_params_dict = { "mass": { "sampler_type": "uniform", "sampler_parameters": {"min_value": 1.0, "max_value": 2.0}, }, "scale": { "sampler_type": "gaussian", "sampler_parameters": {"mean": 1.0, "st_dev": 2.0}, }, "length": { "sampler_type": "multirangeuniform", "sampler_parameters": {"intervals": [[1.0, 2.0], [3.0, 4.0]]}, }, "gravity": 1, "wall_height": { "curriculum": [ { "name": "Lesson1", "completion_criteria": { "measure": "reward", "behavior": "fake_behavior", "threshold": 10, }, "value": 1, }, {"value": 4, "name": "Lesson2"}, ] }, } env_param_settings = EnvironmentParameterSettings.structure( env_params_dict, Dict[str, EnvironmentParameterSettings] ) assert isinstance(env_param_settings["mass"].curriculum[0].value, UniformSettings) assert isinstance(env_param_settings["scale"].curriculum[0].value, GaussianSettings) assert isinstance( env_param_settings["length"].curriculum[0].value, MultiRangeUniformSettings ) # Check __str__ is correct assert ( str(env_param_settings["mass"].curriculum[0].value) == "Uniform sampler: min=1.0, max=2.0" ) assert ( str(env_param_settings["scale"].curriculum[0].value) == "Gaussian sampler: mean=1.0, stddev=2.0" ) assert ( str(env_param_settings["length"].curriculum[0].value) == "MultiRangeUniform sampler: intervals=[(1.0, 2.0), (3.0, 4.0)]" ) assert str(env_param_settings["gravity"].curriculum[0].value) == "Float: value=1" assert isinstance( env_param_settings["wall_height"].curriculum[0].value, ConstantSettings ) assert isinstance( env_param_settings["wall_height"].curriculum[1].value, ConstantSettings ) # Check invalid distribution type invalid_distribution_dict = { "mass": { "sampler_type": "beta", "sampler_parameters": {"alpha": 1.0, "beta": 2.0}, } } with pytest.raises(ValueError): EnvironmentParameterSettings.structure( invalid_distribution_dict, Dict[str, EnvironmentParameterSettings] ) # Check min less than max in uniform invalid_distribution_dict = { "mass": { "sampler_type": "uniform", "sampler_parameters": {"min_value": 2.0, "max_value": 1.0}, } } with pytest.raises(TrainerConfigError): EnvironmentParameterSettings.structure( invalid_distribution_dict, Dict[str, EnvironmentParameterSettings] ) # Check min less than max in multirange invalid_distribution_dict = { "mass": { "sampler_type": "multirangeuniform", "sampler_parameters": {"intervals": [[2.0, 1.0]]}, } } with pytest.raises(TrainerConfigError): EnvironmentParameterSettings.structure( invalid_distribution_dict, Dict[str, EnvironmentParameterSettings] ) # Check multirange has valid intervals invalid_distribution_dict = { "mass": { "sampler_type": "multirangeuniform", "sampler_parameters": {"intervals": [[1.0, 2.0], [3.0]]}, } } with pytest.raises(TrainerConfigError): EnvironmentParameterSettings.structure( invalid_distribution_dict, Dict[str, EnvironmentParameterSettings] ) # Check non-Dict input with pytest.raises(TrainerConfigError): EnvironmentParameterSettings.structure( "notadict", Dict[str, EnvironmentParameterSettings] ) invalid_curriculum_dict = { "wall_height": { "curriculum": [ { "name": "Lesson1", "completion_criteria": { "measure": "progress", "behavior": "fake_behavior", "threshold": 10, }, # > 1 is too large "value": 1, }, {"value": 4, "name": "Lesson2"}, ] } } with pytest.raises(TrainerConfigError): EnvironmentParameterSettings.structure( invalid_curriculum_dict, Dict[str, EnvironmentParameterSettings] ) @pytest.mark.parametrize("use_defaults", [True, False]) def test_exportable_settings(use_defaults): """ Test that structuring and unstructuring a RunOptions object results in the same configuration representation. """ # Try to enable as many features as possible in this test YAML to hit all the # edge cases. Set as much as possible as non-default values to ensure no flukes. test_yaml = """ behaviors: 3DBall: trainer_type: sac hyperparameters: learning_rate: 0.0004 learning_rate_schedule: constant batch_size: 64 buffer_size: 200000 buffer_init_steps: 100 tau: 0.006 steps_per_update: 10.0 save_replay_buffer: true init_entcoef: 0.5 reward_signal_steps_per_update: 10.0 network_settings: normalize: false hidden_units: 256 num_layers: 3 vis_encode_type: nature_cnn memory: memory_size: 1288 sequence_length: 12 reward_signals: extrinsic: gamma: 0.999 strength: 1.0 curiosity: gamma: 0.999 strength: 1.0 keep_checkpoints: 5 max_steps: 500000 time_horizon: 1000 summary_freq: 12000 checkpoint_interval: 1 threaded: true env_settings: env_path: test_env_path env_args: - test_env_args1 - test_env_args2 base_port: 12345 num_envs: 8 seed: 12345 engine_settings: width: 12345 height: 12345 quality_level: 12345 time_scale: 12345 target_frame_rate: 12345 capture_frame_rate: 12345 no_graphics: true checkpoint_settings: run_id: test_run_id initialize_from: test_directory load_model: false resume: true force: true train_model: false inference: false debug: true environment_parameters: big_wall_height: curriculum: - name: Lesson0 completion_criteria: measure: progress behavior: BigWallJump signal_smoothing: true min_lesson_length: 100 threshold: 0.1 value: sampler_type: uniform sampler_parameters: min_value: 0.0 max_value: 4.0 - name: Lesson1 completion_criteria: measure: reward behavior: BigWallJump signal_smoothing: true min_lesson_length: 100 threshold: 0.2 value: sampler_type: gaussian sampler_parameters: mean: 4.0 st_dev: 7.0 - name: Lesson2 completion_criteria: measure: progress behavior: BigWallJump signal_smoothing: true min_lesson_length: 20 threshold: 0.3 value: sampler_type: multirangeuniform sampler_parameters: intervals: [[1.0, 2.0],[4.0, 5.0]] - name: Lesson3 value: 8.0 small_wall_height: 42.0 other_wall_height: sampler_type: multirangeuniform sampler_parameters: intervals: [[1.0, 2.0],[4.0, 5.0]] """ if not use_defaults: loaded_yaml = yaml.safe_load(test_yaml) run_options = RunOptions.from_dict(yaml.safe_load(test_yaml)) else: run_options = RunOptions() dict_export = run_options.as_dict() if not use_defaults: # Don't need to check if no yaml check_dict_is_at_least( loaded_yaml, dict_export, exceptions=["environment_parameters"] ) # Re-import and verify has same elements run_options2 = RunOptions.from_dict(dict_export) second_export = run_options2.as_dict() check_dict_is_at_least(dict_export, second_export) # Should be able to use equality instead of back-and-forth once environment_parameters # is working check_dict_is_at_least(second_export, dict_export) # Check that the two exports are the same assert dict_export == second_export def test_environment_settings(): # default args EnvironmentSettings() # 1 env is OK if no env_path EnvironmentSettings(num_envs=1) # multiple envs is OK if env_path is set EnvironmentSettings(num_envs=42, env_path="/foo/bar.exe") # Multiple environments with no env_path is an error with pytest.raises(ValueError): EnvironmentSettings(num_envs=2) def test_default_settings(): # Make default settings, one nested and one not. default_settings = {"max_steps": 1, "network_settings": {"num_layers": 1000}} behaviors = {"test1": {"max_steps": 2, "network_settings": {"hidden_units": 2000}}} run_options_dict = {"default_settings": default_settings, "behaviors": behaviors} run_options = RunOptions.from_dict(run_options_dict) # Check that a new behavior has the default settings default_settings_cls = cattr.structure(default_settings, TrainerSettings) check_if_different(default_settings_cls, run_options.behaviors["test2"]) # Check that an existing behavior overrides the defaults in specified fields test1_settings = run_options.behaviors["test1"] assert test1_settings.max_steps == 2 assert test1_settings.network_settings.hidden_units == 2000 assert test1_settings.network_settings.num_layers == 1000 # Change the overridden fields back, and check if the rest are equal. test1_settings.max_steps = 1 test1_settings.network_settings.hidden_units == default_settings_cls.network_settings.hidden_units check_if_different(test1_settings, default_settings_cls) def test_config_specified(): # Test require all behavior names to be specified (or not) # Remove any pre-set defaults TrainerSettings.default_override = None behaviors = {"test1": {"max_steps": 2, "network_settings": {"hidden_units": 2000}}} run_options_dict = {"behaviors": behaviors} ro = RunOptions.from_dict(run_options_dict) # Don't require all behavior names ro.behaviors.set_config_specified(False) # Test that we can grab an entry that is not in the dict. assert isinstance(ro.behaviors["test2"], TrainerSettings) # Create strict RunOptions with no defualt_settings run_options_dict = {"behaviors": behaviors} ro = RunOptions.from_dict(run_options_dict) # Require all behavior names ro.behaviors.set_config_specified(True) with pytest.raises(TrainerConfigError): # Variable must be accessed otherwise Python won't query the dict print(ro.behaviors["test2"]) # Create strict RunOptions with default settings default_settings = {"max_steps": 1, "network_settings": {"num_layers": 1000}} run_options_dict = {"default_settings": default_settings, "behaviors": behaviors} ro = RunOptions.from_dict(run_options_dict) # Require all behavior names ro.behaviors.set_config_specified(True) # Test that we can grab an entry that is not in the dict. assert isinstance(ro.behaviors["test2"], TrainerSettings) def test_pickle(): # Make sure RunOptions is pickle-able. run_options = RunOptions() p = pickle.dumps(run_options) pickle.loads(p)
35.267025
102
0.626607
c9af70361d30b992ebd48cb68770479dca4710c9
61,524
py
Python
moto/awslambda/models.py
malekhnovich/moto
0fcf6529ab549faf7a2555d209ce2418391b7f9f
[ "Apache-2.0" ]
null
null
null
moto/awslambda/models.py
malekhnovich/moto
0fcf6529ab549faf7a2555d209ce2418391b7f9f
[ "Apache-2.0" ]
null
null
null
moto/awslambda/models.py
malekhnovich/moto
0fcf6529ab549faf7a2555d209ce2418391b7f9f
[ "Apache-2.0" ]
null
null
null
import base64 import time from collections import defaultdict import copy import datetime from gzip import GzipFile from sys import platform import docker import docker.errors import hashlib import io import logging import os import json import re import zipfile import uuid import tarfile import calendar import threading import weakref import requests.exceptions from moto.awslambda.policy import Policy from moto.core import BaseBackend, BaseModel, CloudFormationModel from moto.core.exceptions import RESTError from moto.iam.models import iam_backend from moto.iam.exceptions import IAMNotFoundException from moto.core.utils import unix_time_millis, BackendDict from moto.s3.models import s3_backend from moto.logs.models import logs_backends from moto.s3.exceptions import MissingBucket, MissingKey from moto import settings from .exceptions import ( CrossAccountNotAllowed, InvalidRoleFormat, InvalidParameterValueException, UnknownLayerException, UnknownFunctionException, UnknownAliasException, ) from .utils import ( make_function_arn, make_function_ver_arn, make_layer_arn, make_layer_ver_arn, split_layer_arn, ) from moto.sqs import sqs_backends from moto.dynamodb import dynamodb_backends from moto.dynamodbstreams import dynamodbstreams_backends from moto.core import ACCOUNT_ID from moto.utilities.docker_utilities import DockerModel, parse_image_ref from tempfile import TemporaryDirectory from uuid import uuid4 logger = logging.getLogger(__name__) docker_3 = docker.__version__[0] >= "3" def zip2tar(zip_bytes): with TemporaryDirectory() as td: tarname = os.path.join(td, "data.tar") timeshift = int( (datetime.datetime.now() - datetime.datetime.utcnow()).total_seconds() ) with zipfile.ZipFile(io.BytesIO(zip_bytes), "r") as zipf, tarfile.TarFile( tarname, "w" ) as tarf: for zipinfo in zipf.infolist(): if zipinfo.filename[-1] == "/": # is_dir() is py3.6+ continue tarinfo = tarfile.TarInfo(name=zipinfo.filename) tarinfo.size = zipinfo.file_size tarinfo.mtime = calendar.timegm(zipinfo.date_time) - timeshift infile = zipf.open(zipinfo.filename) tarf.addfile(tarinfo, infile) with open(tarname, "rb") as f: tar_data = f.read() return tar_data class _VolumeRefCount: __slots__ = "refcount", "volume" def __init__(self, refcount, volume): self.refcount = refcount self.volume = volume class _DockerDataVolumeContext: _data_vol_map = defaultdict( lambda: _VolumeRefCount(0, None) ) # {sha256: _VolumeRefCount} _lock = threading.Lock() def __init__(self, lambda_func): self._lambda_func = lambda_func self._vol_ref = None @property def name(self): return self._vol_ref.volume.name def __enter__(self): # See if volume is already known with self.__class__._lock: self._vol_ref = self.__class__._data_vol_map[self._lambda_func.code_digest] self._vol_ref.refcount += 1 if self._vol_ref.refcount > 1: return self # See if the volume already exists for vol in self._lambda_func.docker_client.volumes.list(): if vol.name == self._lambda_func.code_digest: self._vol_ref.volume = vol return self # It doesn't exist so we need to create it self._vol_ref.volume = self._lambda_func.docker_client.volumes.create( self._lambda_func.code_digest ) if docker_3: volumes = {self.name: {"bind": "/tmp/data", "mode": "rw"}} else: volumes = {self.name: "/tmp/data"} self._lambda_func.docker_client.images.pull( ":".join(parse_image_ref("alpine")) ) container = self._lambda_func.docker_client.containers.run( "alpine", "sleep 100", volumes=volumes, detach=True ) try: tar_bytes = zip2tar(self._lambda_func.code_bytes) container.put_archive("/tmp/data", tar_bytes) finally: container.remove(force=True) return self def __exit__(self, exc_type, exc_val, exc_tb): with self.__class__._lock: self._vol_ref.refcount -= 1 if self._vol_ref.refcount == 0: try: self._vol_ref.volume.remove() except docker.errors.APIError as e: if e.status_code != 409: raise raise # multiple processes trying to use same volume? def _zipfile_content(zipfile): # more hackery to handle unicode/bytes/str in python3 and python2 - # argh! try: to_unzip_code = base64.b64decode(bytes(zipfile, "utf-8")) except Exception: to_unzip_code = base64.b64decode(zipfile) sha_code = hashlib.sha256(to_unzip_code) base64ed_sha = base64.b64encode(sha_code.digest()).decode("utf-8") sha_hex_digest = sha_code.hexdigest() return to_unzip_code, len(to_unzip_code), base64ed_sha, sha_hex_digest def _s3_content(key): sha_code = hashlib.sha256(key.value) base64ed_sha = base64.b64encode(sha_code.digest()).decode("utf-8") sha_hex_digest = sha_code.hexdigest() return key.value, key.size, base64ed_sha, sha_hex_digest def _validate_s3_bucket_and_key(data): key = None try: # FIXME: does not validate bucket region key = s3_backend.get_object(data["S3Bucket"], data["S3Key"]) except MissingBucket: if do_validate_s3(): raise InvalidParameterValueException( "Error occurred while GetObject. S3 Error Code: NoSuchBucket. S3 Error Message: The specified bucket does not exist" ) except MissingKey: if do_validate_s3(): raise ValueError( "InvalidParameterValueException", "Error occurred while GetObject. S3 Error Code: NoSuchKey. S3 Error Message: The specified key does not exist.", ) return key class Permission(CloudFormationModel): def __init__(self, region): self.region = region @staticmethod def cloudformation_name_type(): return "Permission" @staticmethod def cloudformation_type(): return "AWS::Lambda::Permission" @classmethod def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name, **kwargs ): properties = cloudformation_json["Properties"] backend = lambda_backends[region_name] fn = backend.get_function(properties["FunctionName"]) fn.policy.add_statement(raw=json.dumps(properties)) return Permission(region=region_name) class LayerVersion(CloudFormationModel): def __init__(self, spec, region): # required self.region = region self.name = spec["LayerName"] self.content = spec["Content"] # optional self.description = spec.get("Description", "") self.compatible_runtimes = spec.get("CompatibleRuntimes", []) self.license_info = spec.get("LicenseInfo", "") # auto-generated self.created_date = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") self.version = None self._attached = False self._layer = None if "ZipFile" in self.content: ( self.code_bytes, self.code_size, self.code_sha_256, self.code_digest, ) = _zipfile_content(self.content["ZipFile"]) else: key = _validate_s3_bucket_and_key(self.content) if key: ( self.code_bytes, self.code_size, self.code_sha_256, self.code_digest, ) = _s3_content(key) @property def arn(self): if self.version: return make_layer_ver_arn(self.region, ACCOUNT_ID, self.name, self.version) raise ValueError("Layer version is not set") def attach(self, layer, version): self._attached = True self._layer = layer self.version = version def get_layer_version(self): return { "Content": { "Location": "s3://", "CodeSha256": self.code_sha_256, "CodeSize": self.code_size, }, "Version": self.version, "LayerArn": self._layer.layer_arn, "LayerVersionArn": self.arn, "CreatedDate": self.created_date, "CompatibleRuntimes": self.compatible_runtimes, "Description": self.description, "LicenseInfo": self.license_info, } @staticmethod def cloudformation_name_type(): return "LayerVersion" @staticmethod def cloudformation_type(): return "AWS::Lambda::LayerVersion" @classmethod def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name, **kwargs ): properties = cloudformation_json["Properties"] optional_properties = ("Description", "CompatibleRuntimes", "LicenseInfo") # required spec = { "Content": properties["Content"], "LayerName": resource_name, } for prop in optional_properties: if prop in properties: spec[prop] = properties[prop] backend = lambda_backends[region_name] layer_version = backend.publish_layer_version(spec) return layer_version class LambdaAlias(BaseModel): def __init__( self, region, name, function_name, function_version, description, routing_config ): self.arn = ( f"arn:aws:lambda:{region}:{ACCOUNT_ID}:function:{function_name}:{name}" ) self.name = name self.function_version = function_version self.description = description self.routing_config = routing_config self.revision_id = str(uuid4()) def update(self, description, function_version, routing_config): if description is not None: self.description = description if function_version is not None: self.function_version = function_version if routing_config is not None: self.routing_config = routing_config def to_json(self): return { "AliasArn": self.arn, "Description": self.description, "FunctionVersion": self.function_version, "Name": self.name, "RevisionId": self.revision_id, "RoutingConfig": self.routing_config or None, } class Layer(object): def __init__(self, name, region): self.region = region self.name = name self.layer_arn = make_layer_arn(region, ACCOUNT_ID, self.name) self._latest_version = 0 self.layer_versions = {} def attach_version(self, layer_version): self._latest_version += 1 layer_version.attach(self, self._latest_version) self.layer_versions[str(self._latest_version)] = layer_version def delete_version(self, layer_version): self.layer_versions.pop(str(layer_version), None) def to_dict(self): return { "LayerName": self.name, "LayerArn": self.layer_arn, "LatestMatchingVersion": self.layer_versions[ str(self._latest_version) ].get_layer_version(), } class LambdaFunction(CloudFormationModel, DockerModel): def __init__(self, spec, region, version=1): DockerModel.__init__(self) # required self.region = region self.code = spec["Code"] self.function_name = spec["FunctionName"] self.handler = spec["Handler"] self.role = spec["Role"] self.run_time = spec["Runtime"] self.logs_backend = logs_backends[self.region] self.environment_vars = spec.get("Environment", {}).get("Variables", {}) self.policy = None self.state = "Active" self.reserved_concurrency = spec.get("ReservedConcurrentExecutions", None) # optional self.description = spec.get("Description", "") self.memory_size = spec.get("MemorySize", 128) self.package_type = spec.get("PackageType", None) self.publish = spec.get("Publish", False) # this is ignored currently self.timeout = spec.get("Timeout", 3) self.layers = self._get_layers_data(spec.get("Layers", [])) self.signing_profile_version_arn = spec.get("SigningProfileVersionArn") self.signing_job_arn = spec.get("SigningJobArn") self.code_signing_config_arn = spec.get("CodeSigningConfigArn") self.tracing_config = spec.get("TracingConfig") or {"Mode": "PassThrough"} self.logs_group_name = "/aws/lambda/{}".format(self.function_name) # this isn't finished yet. it needs to find out the VpcId value self._vpc_config = spec.get( "VpcConfig", {"SubnetIds": [], "SecurityGroupIds": []} ) # auto-generated self.version = version self.last_modified = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") if "ZipFile" in self.code: ( self.code_bytes, self.code_size, self.code_sha_256, self.code_digest, ) = _zipfile_content(self.code["ZipFile"]) # TODO: we should be putting this in a lambda bucket self.code["UUID"] = str(uuid.uuid4()) self.code["S3Key"] = "{}-{}".format(self.function_name, self.code["UUID"]) else: key = _validate_s3_bucket_and_key(self.code) if key: ( self.code_bytes, self.code_size, self.code_sha_256, self.code_digest, ) = _s3_content(key) else: self.code_bytes = "" self.code_size = 0 self.code_sha_256 = "" self.function_arn = make_function_arn( self.region, ACCOUNT_ID, self.function_name ) if spec.get("Tags"): self.tags = spec.get("Tags") else: self.tags = dict() self._aliases = dict() def set_version(self, version): self.function_arn = make_function_ver_arn( self.region, ACCOUNT_ID, self.function_name, version ) self.version = version self.last_modified = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") @property def vpc_config(self): config = self._vpc_config.copy() if config["SecurityGroupIds"]: config.update({"VpcId": "vpc-123abc"}) return config @property def physical_resource_id(self): return self.function_name def __repr__(self): return json.dumps(self.get_configuration()) def _get_layers_data(self, layers_versions_arns): backend = lambda_backends[self.region] layer_versions = [ backend.layers_versions_by_arn(layer_version) for layer_version in layers_versions_arns ] if not all(layer_versions): raise ValueError( "InvalidParameterValueException", "One or more LayerVersion does not exist {0}".format( layers_versions_arns ), ) return [{"Arn": lv.arn, "CodeSize": lv.code_size} for lv in layer_versions] def get_code_signing_config(self): return { "CodeSigningConfigArn": self.code_signing_config_arn, "FunctionName": self.function_name, } def get_configuration(self, on_create=False): config = { "CodeSha256": self.code_sha_256, "CodeSize": self.code_size, "Description": self.description, "FunctionArn": self.function_arn, "FunctionName": self.function_name, "Handler": self.handler, "LastModified": self.last_modified, "MemorySize": self.memory_size, "Role": self.role, "Runtime": self.run_time, "State": self.state, "PackageType": self.package_type, "Timeout": self.timeout, "Version": str(self.version), "VpcConfig": self.vpc_config, "Layers": self.layers, "SigningProfileVersionArn": self.signing_profile_version_arn, "SigningJobArn": self.signing_job_arn, "TracingConfig": self.tracing_config, } if not on_create: # Only return this variable after the first creation config["LastUpdateStatus"] = "Successful" if self.environment_vars: config["Environment"] = {"Variables": self.environment_vars} return config def get_code(self): code = { "Code": { "Location": "s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com/{1}".format( self.region, self.code["S3Key"] ), "RepositoryType": "S3", }, "Configuration": self.get_configuration(), } if self.tags: code["Tags"] = self.tags if self.reserved_concurrency: code.update( { "Concurrency": { "ReservedConcurrentExecutions": self.reserved_concurrency } } ) return code def update_configuration(self, config_updates): for key, value in config_updates.items(): if key == "Description": self.description = value elif key == "Handler": self.handler = value elif key == "MemorySize": self.memory_size = value elif key == "Role": self.role = value elif key == "Runtime": self.run_time = value elif key == "Timeout": self.timeout = value elif key == "VpcConfig": self._vpc_config = value elif key == "Environment": self.environment_vars = value["Variables"] elif key == "Layers": self.layers = self._get_layers_data(value) return self.get_configuration() def update_function_code(self, updated_spec): if "DryRun" in updated_spec and updated_spec["DryRun"]: return self.get_configuration() if "ZipFile" in updated_spec: self.code["ZipFile"] = updated_spec["ZipFile"] ( self.code_bytes, self.code_size, self.code_sha_256, self.code_digest, ) = _zipfile_content(updated_spec["ZipFile"]) # TODO: we should be putting this in a lambda bucket self.code["UUID"] = str(uuid.uuid4()) self.code["S3Key"] = "{}-{}".format(self.function_name, self.code["UUID"]) elif "S3Bucket" in updated_spec and "S3Key" in updated_spec: key = None try: # FIXME: does not validate bucket region key = s3_backend.get_object( updated_spec["S3Bucket"], updated_spec["S3Key"] ) except MissingBucket: if do_validate_s3(): raise ValueError( "InvalidParameterValueException", "Error occurred while GetObject. S3 Error Code: NoSuchBucket. S3 Error Message: The specified bucket does not exist", ) except MissingKey: if do_validate_s3(): raise ValueError( "InvalidParameterValueException", "Error occurred while GetObject. S3 Error Code: NoSuchKey. S3 Error Message: The specified key does not exist.", ) if key: ( self.code_bytes, self.code_size, self.code_sha_256, self.code_digest, ) = _s3_content(key) self.code["S3Bucket"] = updated_spec["S3Bucket"] self.code["S3Key"] = updated_spec["S3Key"] return self.get_configuration() @staticmethod def convert(s): try: return str(s, encoding="utf-8") except Exception: return s def _invoke_lambda(self, event=None): # Create the LogGroup if necessary, to write the result to self.logs_backend.ensure_log_group(self.logs_group_name, []) # TODO: context not yet implemented if event is None: event = dict() output = None try: # TODO: I believe we can keep the container running and feed events as needed # also need to hook it up to the other services so it can make kws/s3 etc calls # Should get invoke_id /RequestId from invocation env_vars = { "_HANDLER": self.handler, "AWS_EXECUTION_ENV": "AWS_Lambda_{}".format(self.run_time), "AWS_LAMBDA_FUNCTION_TIMEOUT": self.timeout, "AWS_LAMBDA_FUNCTION_NAME": self.function_name, "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": self.memory_size, "AWS_LAMBDA_FUNCTION_VERSION": self.version, "AWS_REGION": self.region, "AWS_ACCESS_KEY_ID": "role-account-id", "AWS_SECRET_ACCESS_KEY": "role-secret-key", "AWS_SESSION_TOKEN": "session-token", } env_vars.update(self.environment_vars) env_vars["MOTO_HOST"] = settings.moto_server_host() env_vars["MOTO_PORT"] = settings.moto_server_port() env_vars[ "MOTO_HTTP_ENDPOINT" ] = f'{env_vars["MOTO_HOST"]}:{env_vars["MOTO_PORT"]}' container = exit_code = None log_config = docker.types.LogConfig(type=docker.types.LogConfig.types.JSON) with _DockerDataVolumeContext(self) as data_vol: try: run_kwargs = dict() network_name = settings.moto_network_name() network_mode = settings.moto_network_mode() if network_name: run_kwargs["network"] = network_name elif network_mode: run_kwargs["network_mode"] = network_mode elif settings.TEST_SERVER_MODE: # AWSLambda can make HTTP requests to a Docker container called 'motoserver' # Only works if our Docker-container is named 'motoserver' # TODO: should remove this and rely on 'network_mode' instead, as this is too tightly coupled with our own test setup run_kwargs["links"] = {"motoserver": "motoserver"} # add host.docker.internal host on linux to emulate Mac + Windows behavior # for communication with other mock AWS services running on localhost if platform == "linux" or platform == "linux2": run_kwargs["extra_hosts"] = { "host.docker.internal": "host-gateway" } image_repo = settings.moto_lambda_image() image_ref = f"{image_repo}:{self.run_time}" self.docker_client.images.pull(":".join(parse_image_ref(image_ref))) container = self.docker_client.containers.run( image_ref, [self.handler, json.dumps(event)], remove=False, mem_limit="{}m".format(self.memory_size), volumes=["{}:/var/task".format(data_vol.name)], environment=env_vars, detach=True, log_config=log_config, **run_kwargs, ) finally: if container: try: exit_code = container.wait(timeout=300) except requests.exceptions.ReadTimeout: exit_code = -1 container.stop() container.kill() else: if docker_3: exit_code = exit_code["StatusCode"] output = container.logs(stdout=False, stderr=True) output += container.logs(stdout=True, stderr=False) container.remove() output = output.decode("utf-8") self.save_logs(output) # We only care about the response from the lambda # Which is the last line of the output, according to https://github.com/lambci/docker-lambda/issues/25 resp = output.splitlines()[-1] logs = os.linesep.join( [line for line in self.convert(output).splitlines()[:-1]] ) invocation_error = exit_code != 0 return resp, invocation_error, logs except docker.errors.DockerException as e: # Docker itself is probably not running - there will be no Lambda-logs to handle msg = "error running docker: {}".format(e) self.save_logs(msg) return msg, True, "" def save_logs(self, output): # Send output to "logs" backend invoke_id = uuid.uuid4().hex log_stream_name = ( "{date.year}/{date.month:02d}/{date.day:02d}/[{version}]{invoke_id}".format( date=datetime.datetime.utcnow(), version=self.version, invoke_id=invoke_id, ) ) self.logs_backend.create_log_stream(self.logs_group_name, log_stream_name) log_events = [ {"timestamp": unix_time_millis(), "message": line} for line in output.splitlines() ] self.logs_backend.put_log_events( self.logs_group_name, log_stream_name, log_events ) def invoke(self, body, request_headers, response_headers): if body: body = json.loads(body) else: body = "{}" # Get the invocation type: res, errored, logs = self._invoke_lambda(event=body) inv_type = request_headers.get("x-amz-invocation-type", "RequestResponse") if inv_type == "RequestResponse": encoded = base64.b64encode(logs.encode("utf-8")) response_headers["x-amz-log-result"] = encoded.decode("utf-8") result = res.encode("utf-8") else: result = res if errored: response_headers["x-amz-function-error"] = "Handled" return result @staticmethod def cloudformation_name_type(): return "FunctionName" @staticmethod def cloudformation_type(): # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-function.html return "AWS::Lambda::Function" @classmethod def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name, **kwargs ): properties = cloudformation_json["Properties"] optional_properties = ( "Description", "MemorySize", "Publish", "Timeout", "VpcConfig", "Environment", "ReservedConcurrentExecutions", ) # required spec = { "Code": properties["Code"], "FunctionName": resource_name, "Handler": properties["Handler"], "Role": properties["Role"], "Runtime": properties["Runtime"], } # NOTE: Not doing `properties.get(k, DEFAULT)` to avoid duplicating the # default logic for prop in optional_properties: if prop in properties: spec[prop] = properties[prop] # when ZipFile is present in CloudFormation, per the official docs, # the code it's a plaintext code snippet up to 4096 bytes. # this snippet converts this plaintext code to a proper base64-encoded ZIP file. if "ZipFile" in properties["Code"]: spec["Code"]["ZipFile"] = base64.b64encode( cls._create_zipfile_from_plaintext_code(spec["Code"]["ZipFile"]) ) backend = lambda_backends[region_name] fn = backend.create_function(spec) return fn @classmethod def has_cfn_attr(cls, attr): return attr in ["Arn"] def get_cfn_attribute(self, attribute_name): from moto.cloudformation.exceptions import UnformattedGetAttTemplateException if attribute_name == "Arn": return make_function_arn(self.region, ACCOUNT_ID, self.function_name) raise UnformattedGetAttTemplateException() @classmethod def update_from_cloudformation_json( cls, original_resource, new_resource_name, cloudformation_json, region_name ): updated_props = cloudformation_json["Properties"] original_resource.update_configuration(updated_props) original_resource.update_function_code(updated_props["Code"]) return original_resource @staticmethod def _create_zipfile_from_plaintext_code(code): zip_output = io.BytesIO() zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED) zip_file.writestr("index.py", code) # This should really be part of the 'lambci' docker image from moto.packages.cfnresponse import cfnresponse with open(cfnresponse.__file__) as cfn: zip_file.writestr("cfnresponse.py", cfn.read()) zip_file.close() zip_output.seek(0) return zip_output.read() def delete(self, region): lambda_backends[region].delete_function(self.function_name) def delete_alias(self, name): self._aliases.pop(name, None) def get_alias(self, name): if name in self._aliases: return self._aliases[name] arn = f"arn:aws:lambda:{self.region}:{ACCOUNT_ID}:function:{self.function_name}:{name}" raise UnknownAliasException(arn) def put_alias(self, name, description, function_version, routing_config): alias = LambdaAlias( region=self.region, name=name, function_name=self.function_name, function_version=function_version, description=description, routing_config=routing_config, ) self._aliases[name] = alias return alias def update_alias(self, name, description, function_version, routing_config): alias = self.get_alias(name) alias.update(description, function_version, routing_config) return alias class EventSourceMapping(CloudFormationModel): def __init__(self, spec): # required self.function_name = spec["FunctionName"] self.event_source_arn = spec["EventSourceArn"] # optional self.batch_size = spec.get("BatchSize") self.starting_position = spec.get("StartingPosition", "TRIM_HORIZON") self.enabled = spec.get("Enabled", True) self.starting_position_timestamp = spec.get("StartingPositionTimestamp", None) self.function_arn = spec["FunctionArn"] self.uuid = str(uuid.uuid4()) self.last_modified = time.mktime(datetime.datetime.utcnow().timetuple()) def _get_service_source_from_arn(self, event_source_arn): return event_source_arn.split(":")[2].lower() def _validate_event_source(self, event_source_arn): valid_services = ("dynamodb", "kinesis", "sqs") service = self._get_service_source_from_arn(event_source_arn) return True if service in valid_services else False @property def event_source_arn(self): return self._event_source_arn @event_source_arn.setter def event_source_arn(self, event_source_arn): if not self._validate_event_source(event_source_arn): raise ValueError( "InvalidParameterValueException", "Unsupported event source type" ) self._event_source_arn = event_source_arn @property def batch_size(self): return self._batch_size @batch_size.setter def batch_size(self, batch_size): batch_size_service_map = { "kinesis": (100, 10000), "dynamodb": (100, 1000), "sqs": (10, 10), } source_type = self._get_service_source_from_arn(self.event_source_arn) batch_size_for_source = batch_size_service_map[source_type] if batch_size is None: self._batch_size = batch_size_for_source[0] elif batch_size > batch_size_for_source[1]: error_message = "BatchSize {} exceeds the max of {}".format( batch_size, batch_size_for_source[1] ) raise ValueError("InvalidParameterValueException", error_message) else: self._batch_size = int(batch_size) def get_configuration(self): return { "UUID": self.uuid, "BatchSize": self.batch_size, "EventSourceArn": self.event_source_arn, "FunctionArn": self.function_arn, "LastModified": self.last_modified, "LastProcessingResult": "", "State": "Enabled" if self.enabled else "Disabled", "StateTransitionReason": "User initiated", } def delete(self, region_name): lambda_backend = lambda_backends[region_name] lambda_backend.delete_event_source_mapping(self.uuid) @staticmethod def cloudformation_name_type(): return None @staticmethod def cloudformation_type(): # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-eventsourcemapping.html return "AWS::Lambda::EventSourceMapping" @classmethod def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name, **kwargs ): properties = cloudformation_json["Properties"] lambda_backend = lambda_backends[region_name] return lambda_backend.create_event_source_mapping(properties) @classmethod def update_from_cloudformation_json( cls, original_resource, new_resource_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] event_source_uuid = original_resource.uuid lambda_backend = lambda_backends[region_name] return lambda_backend.update_event_source_mapping(event_source_uuid, properties) @classmethod def delete_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name ): properties = cloudformation_json["Properties"] lambda_backend = lambda_backends[region_name] esms = lambda_backend.list_event_source_mappings( event_source_arn=properties["EventSourceArn"], function_name=properties["FunctionName"], ) for esm in esms: if esm.uuid == resource_name: esm.delete(region_name) @property def physical_resource_id(self): return self.uuid class LambdaVersion(CloudFormationModel): def __init__(self, spec): self.version = spec["Version"] def __repr__(self): return str(self.logical_resource_id) @staticmethod def cloudformation_name_type(): return None @staticmethod def cloudformation_type(): # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-version.html return "AWS::Lambda::Version" @classmethod def create_from_cloudformation_json( cls, resource_name, cloudformation_json, region_name, **kwargs ): properties = cloudformation_json["Properties"] function_name = properties["FunctionName"] func = lambda_backends[region_name].publish_function(function_name) spec = {"Version": func.version} return LambdaVersion(spec) class LambdaStorage(object): def __init__(self, region_name): # Format 'func_name' {'versions': []} self._functions = {} self._aliases = dict() self._arns = weakref.WeakValueDictionary() self.region_name = region_name def _get_latest(self, name): return self._functions[name]["latest"] def _get_version(self, name, version): index = version - 1 try: return self._functions[name]["versions"][index] except IndexError: return None def delete_alias(self, name, function_name): fn = self.get_function_by_name_or_arn(function_name) return fn.delete_alias(name) def get_alias(self, name, function_name): fn = self.get_function_by_name_or_arn(function_name) return fn.get_alias(name) def put_alias( self, name, function_name, function_version, description, routing_config ): fn = self.get_function_by_name_or_arn(function_name) return fn.put_alias(name, description, function_version, routing_config) def update_alias( self, name, function_name, function_version, description, routing_config ): fn = self.get_function_by_name_or_arn(function_name) return fn.update_alias(name, description, function_version, routing_config) def get_function_by_name(self, name, qualifier=None): if name not in self._functions: return None if qualifier is None: return self._get_latest(name) try: return self._get_version(name, int(qualifier)) except ValueError: return self._functions[name]["latest"] def list_versions_by_function(self, name): if name not in self._functions: return None latest = copy.copy(self._functions[name]["latest"]) latest.function_arn += ":$LATEST" return [latest] + self._functions[name]["versions"] def get_arn(self, arn): # Function ARN may contain an alias # arn:aws:lambda:region:account_id:function:<fn_name>:<alias_name> if ":" in arn.split(":function:")[-1]: # arn = arn:aws:lambda:region:account_id:function:<fn_name> arn = ":".join(arn.split(":")[0:-1]) return self._arns.get(arn, None) def get_function_by_name_or_arn(self, name_or_arn, qualifier=None): return self.get_function_by_name(name_or_arn, qualifier) or self.get_arn( name_or_arn ) def put_function(self, fn): """ :param fn: Function :type fn: LambdaFunction """ valid_role = re.match(InvalidRoleFormat.pattern, fn.role) if valid_role: account = valid_role.group(2) if account != ACCOUNT_ID: raise CrossAccountNotAllowed() try: iam_backend.get_role_by_arn(fn.role) except IAMNotFoundException: raise InvalidParameterValueException( "The role defined for the function cannot be assumed by Lambda." ) else: raise InvalidRoleFormat(fn.role) if fn.function_name in self._functions: self._functions[fn.function_name]["latest"] = fn else: self._functions[fn.function_name] = {"latest": fn, "versions": []} # instantiate a new policy for this version of the lambda fn.policy = Policy(fn) self._arns[fn.function_arn] = fn def publish_function(self, name_or_arn, description=""): function = self.get_function_by_name_or_arn(name_or_arn) if not function: if name_or_arn.startswith("arn:aws"): arn = name_or_arn else: arn = make_function_arn(self.region_name, ACCOUNT_ID, name_or_arn) raise UnknownFunctionException(arn) name = function.function_name if name not in self._functions: return None if not self._functions[name]["latest"]: return None new_version = len(self._functions[name]["versions"]) + 1 fn = copy.copy(self._functions[name]["latest"]) fn.set_version(new_version) if description: fn.description = description self._functions[name]["versions"].append(fn) self._arns[fn.function_arn] = fn return fn def del_function(self, name_or_arn, qualifier=None): function = self.get_function_by_name_or_arn(name_or_arn) if function: name = function.function_name if not qualifier: # Something is still reffing this so delete all arns latest = self._functions[name]["latest"].function_arn del self._arns[latest] for fn in self._functions[name]["versions"]: del self._arns[fn.function_arn] del self._functions[name] return True elif qualifier == "$LATEST": self._functions[name]["latest"] = None # If theres no functions left if ( not self._functions[name]["versions"] and not self._functions[name]["latest"] ): del self._functions[name] return True else: fn = self.get_function_by_name(name, qualifier) if fn: self._functions[name]["versions"].remove(fn) # If theres no functions left if ( not self._functions[name]["versions"] and not self._functions[name]["latest"] ): del self._functions[name] return True return False def all(self): result = [] for function_group in self._functions.values(): latest = copy.deepcopy(function_group["latest"]) latest.function_arn = "{}:$LATEST".format(latest.function_arn) result.append(latest) result.extend(function_group["versions"]) return result def latest(self): """ Return the list of functions with version @LATEST :return: """ result = [] for function_group in self._functions.values(): if function_group["latest"] is not None: result.append(function_group["latest"]) return result class LayerStorage(object): def __init__(self): self._layers = {} self._arns = weakref.WeakValueDictionary() def put_layer_version(self, layer_version): """ :param layer_version: LayerVersion """ if layer_version.name not in self._layers: self._layers[layer_version.name] = Layer( layer_version.name, layer_version.region ) self._layers[layer_version.name].attach_version(layer_version) def list_layers(self): return [layer.to_dict() for layer in self._layers.values()] def delete_layer_version(self, layer_name, layer_version): self._layers[layer_name].delete_version(layer_version) def get_layer_version(self, layer_name, layer_version): if layer_name not in self._layers: raise UnknownLayerException() for lv in self._layers[layer_name].layer_versions.values(): if lv.version == int(layer_version): return lv raise UnknownLayerException() def get_layer_versions(self, layer_name): if layer_name in self._layers: return list(iter(self._layers[layer_name].layer_versions.values())) return [] def get_layer_version_by_arn(self, layer_version_arn): split_arn = split_layer_arn(layer_version_arn) if split_arn.layer_name in self._layers: return self._layers[split_arn.layer_name].layer_versions.get( split_arn.version, None ) return None class LambdaBackend(BaseBackend): """ Implementation of the AWS Lambda endpoint. Invoking functions is supported - they will run inside a Docker container, emulating the real AWS behaviour as closely as possible. It is possible to connect from AWS Lambdas to other services, as long as you are running Moto in ServerMode. The Lambda has access to environment variables `MOTO_HOST` and `MOTO_PORT`, which can be used to build the url that MotoServer runs on: .. sourcecode:: python def lambda_handler(event, context): host = os.environ.get("MOTO_HOST") port = os.environ.get("MOTO_PORT") url = host + ":" + port ec2 = boto3.client('ec2', region_name='us-west-2', endpoint_url=url) # Or even simpler: full_url = os.environ.get("MOTO_HTTP_ENDPOINT") ec2 = boto3.client("ec2", region_name="eu-west-1", endpoint_url=full_url) ec2.do_whatever_inside_the_existing_moto_server() Moto will run on port 5000 by default. This can be overwritten by setting an environment variable when starting Moto: .. sourcecode:: bash # This env var will be propagated to the Docker container running the Lambda functions MOTO_PORT=5000 moto_server The Docker container uses the default network mode, `bridge`. The following environment variables are available for fine-grained control over the Docker connection options: .. sourcecode:: bash # Provide the name of a custom network to connect to MOTO_DOCKER_NETWORK_NAME=mycustomnetwork moto_server # Override the network mode # For example, network_mode=host would use the network of the host machine # Note that this option will be ignored if MOTO_DOCKER_NETWORK_NAME is also set MOTO_DOCKER_NETWORK_MODE=host moto_server The Docker images used by Moto are taken from the `lambci/lambda`-repo by default. Use the following environment variable to configure a different repo: .. sourcecode:: bash MOTO_DOCKER_LAMBDA_IMAGE=mLupin/docker-lambda .. note:: When using the decorators, a Docker container cannot reach Moto, as it does not run as a server. Any boto3-invocations used within your Lambda will try to connect to AWS. """ def __init__(self, region_name): self._lambdas = LambdaStorage(region_name=region_name) self._event_source_mappings = {} self._layers = LayerStorage() self.region_name = region_name def reset(self): region_name = self.region_name self.__dict__ = {} self.__init__(region_name) @staticmethod def default_vpc_endpoint_service(service_region, zones): """Default VPC endpoint service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "lambda" ) def create_alias( self, name, function_name, function_version, description, routing_config ): return self._lambdas.put_alias( name, function_name, function_version, description, routing_config ) def delete_alias(self, name, function_name): return self._lambdas.delete_alias(name, function_name) def get_alias(self, name, function_name): return self._lambdas.get_alias(name, function_name) def update_alias( self, name, function_name, function_version, description, routing_config ): """ The RevisionId parameter is not yet implemented """ return self._lambdas.update_alias( name, function_name, function_version, description, routing_config ) def create_function(self, spec): function_name = spec.get("FunctionName", None) if function_name is None: raise RESTError("InvalidParameterValueException", "Missing FunctionName") fn = LambdaFunction(spec, self.region_name, version="$LATEST") self._lambdas.put_function(fn) if spec.get("Publish"): ver = self.publish_function(function_name) fn = copy.deepcopy( fn ) # We don't want to change the actual version - just the return value fn.version = ver.version return fn def create_event_source_mapping(self, spec): required = ["EventSourceArn", "FunctionName"] for param in required: if not spec.get(param): raise RESTError( "InvalidParameterValueException", "Missing {}".format(param) ) # Validate function name func = self._lambdas.get_function_by_name_or_arn(spec.get("FunctionName", "")) if not func: raise RESTError("ResourceNotFoundException", "Invalid FunctionName") # Validate queue for queue in sqs_backends[self.region_name].queues.values(): if queue.queue_arn == spec["EventSourceArn"]: if queue.lambda_event_source_mappings.get("func.function_arn"): # TODO: Correct exception? raise RESTError( "ResourceConflictException", "The resource already exists." ) if queue.fifo_queue: raise RESTError( "InvalidParameterValueException", "{} is FIFO".format(queue.queue_arn), ) else: spec.update({"FunctionArn": func.function_arn}) esm = EventSourceMapping(spec) self._event_source_mappings[esm.uuid] = esm # Set backend function on queue queue.lambda_event_source_mappings[esm.function_arn] = esm return esm for stream in json.loads( dynamodbstreams_backends[self.region_name].list_streams() )["Streams"]: if stream["StreamArn"] == spec["EventSourceArn"]: spec.update({"FunctionArn": func.function_arn}) esm = EventSourceMapping(spec) self._event_source_mappings[esm.uuid] = esm table_name = stream["TableName"] table = dynamodb_backends[self.region_name].get_table(table_name) table.lambda_event_source_mappings[esm.function_arn] = esm return esm raise RESTError("ResourceNotFoundException", "Invalid EventSourceArn") def publish_layer_version(self, spec): required = ["LayerName", "Content"] for param in required: if not spec.get(param): raise InvalidParameterValueException("Missing {}".format(param)) layer_version = LayerVersion(spec, self.region_name) self._layers.put_layer_version(layer_version) return layer_version def list_layers(self): return self._layers.list_layers() def delete_layer_version(self, layer_name, layer_version): return self._layers.delete_layer_version(layer_name, layer_version) def get_layer_version(self, layer_name, layer_version): return self._layers.get_layer_version(layer_name, layer_version) def get_layer_versions(self, layer_name): return self._layers.get_layer_versions(layer_name) def layers_versions_by_arn(self, layer_version_arn): return self._layers.get_layer_version_by_arn(layer_version_arn) def publish_function(self, function_name, description=""): return self._lambdas.publish_function(function_name, description) def get_function(self, function_name_or_arn, qualifier=None): return self._lambdas.get_function_by_name_or_arn( function_name_or_arn, qualifier ) def list_versions_by_function(self, function_name): return self._lambdas.list_versions_by_function(function_name) def get_event_source_mapping(self, uuid): return self._event_source_mappings.get(uuid) def delete_event_source_mapping(self, uuid): return self._event_source_mappings.pop(uuid) def update_event_source_mapping(self, uuid, spec): esm = self.get_event_source_mapping(uuid) if not esm: return False for key in spec.keys(): if key == "FunctionName": func = self._lambdas.get_function_by_name_or_arn(spec[key]) esm.function_arn = func.function_arn elif key == "BatchSize": esm.batch_size = spec[key] elif key == "Enabled": esm.enabled = spec[key] esm.last_modified = time.mktime(datetime.datetime.utcnow().timetuple()) return esm def list_event_source_mappings(self, event_source_arn, function_name): esms = list(self._event_source_mappings.values()) if event_source_arn: esms = list(filter(lambda x: x.event_source_arn == event_source_arn, esms)) if function_name: esms = list(filter(lambda x: x.function_name == function_name, esms)) return esms def get_function_by_arn(self, function_arn): return self._lambdas.get_arn(function_arn) def delete_function(self, function_name, qualifier=None): return self._lambdas.del_function(function_name, qualifier) def list_functions(self, func_version=None): if func_version == "ALL": return self._lambdas.all() return self._lambdas.latest() def send_sqs_batch(self, function_arn, messages, queue_arn): success = True for message in messages: func = self.get_function_by_arn(function_arn) result = self._send_sqs_message(func, message, queue_arn) if not result: success = False return success def _send_sqs_message(self, func, message, queue_arn): event = { "Records": [ { "messageId": message.id, "receiptHandle": message.receipt_handle, "body": message.body, "attributes": { "ApproximateReceiveCount": "1", "SentTimestamp": "1545082649183", "SenderId": "AIDAIENQZJOLO23YVJ4VO", "ApproximateFirstReceiveTimestamp": "1545082649185", }, "messageAttributes": {}, "md5OfBody": "098f6bcd4621d373cade4e832627b4f6", "eventSource": "aws:sqs", "eventSourceARN": queue_arn, "awsRegion": self.region_name, } ] } request_headers = {} response_headers = {} func.invoke(json.dumps(event), request_headers, response_headers) return "x-amz-function-error" not in response_headers def send_sns_message(self, function_name, message, subject=None, qualifier=None): event = { "Records": [ { "EventVersion": "1.0", "EventSubscriptionArn": "arn:aws:sns:EXAMPLE", "EventSource": "aws:sns", "Sns": { "SignatureVersion": "1", "Timestamp": "1970-01-01T00:00:00.000Z", "Signature": "EXAMPLE", "SigningCertUrl": "EXAMPLE", "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", "Message": message, "MessageAttributes": { "Test": {"Type": "String", "Value": "TestString"}, "TestBinary": {"Type": "Binary", "Value": "TestBinary"}, }, "Type": "Notification", "UnsubscribeUrl": "EXAMPLE", "TopicArn": "arn:aws:sns:EXAMPLE", "Subject": subject or "TestInvoke", }, } ] } func = self._lambdas.get_function_by_name_or_arn(function_name, qualifier) func.invoke(json.dumps(event), {}, {}) def send_dynamodb_items(self, function_arn, items, source): event = { "Records": [ { "eventID": item.to_json()["eventID"], "eventName": "INSERT", "eventVersion": item.to_json()["eventVersion"], "eventSource": item.to_json()["eventSource"], "awsRegion": self.region_name, "dynamodb": item.to_json()["dynamodb"], "eventSourceARN": source, } for item in items ] } func = self._lambdas.get_arn(function_arn) return func.invoke(json.dumps(event), {}, {}) def send_log_event( self, function_arn, filter_name, log_group_name, log_stream_name, log_events ): data = { "messageType": "DATA_MESSAGE", "owner": ACCOUNT_ID, "logGroup": log_group_name, "logStream": log_stream_name, "subscriptionFilters": [filter_name], "logEvents": log_events, } output = io.BytesIO() with GzipFile(fileobj=output, mode="w") as f: f.write(json.dumps(data, separators=(",", ":")).encode("utf-8")) payload_gz_encoded = base64.b64encode(output.getvalue()).decode("utf-8") event = {"awslogs": {"data": payload_gz_encoded}} func = self._lambdas.get_arn(function_arn) return func.invoke(json.dumps(event), {}, {}) def list_tags(self, resource): return self.get_function_by_arn(resource).tags def tag_resource(self, resource, tags): fn = self.get_function_by_arn(resource) if not fn: return False fn.tags.update(tags) return True def untag_resource(self, resource, tagKeys): fn = self.get_function_by_arn(resource) if fn: for key in tagKeys: try: del fn.tags[key] except KeyError: pass # Don't care return True return False def add_permission(self, function_name, raw): fn = self.get_function(function_name) fn.policy.add_statement(raw) def remove_permission(self, function_name, sid, revision=""): fn = self.get_function(function_name) fn.policy.del_statement(sid, revision) def get_code_signing_config(self, function_name): fn = self.get_function(function_name) return fn.get_code_signing_config() def get_policy(self, function_name): fn = self.get_function(function_name) if not fn: raise UnknownFunctionException(function_name) return fn.policy.wire_format() def update_function_code(self, function_name, qualifier, body): fn = self.get_function(function_name, qualifier) if fn: if body.get("Publish", False): fn = self.publish_function(function_name) config = fn.update_function_code(body) return config else: return None def update_function_configuration(self, function_name, qualifier, body): fn = self.get_function(function_name, qualifier) return fn.update_configuration(body) if fn else None def invoke(self, function_name, qualifier, body, headers, response_headers): fn = self.get_function(function_name, qualifier) if fn: payload = fn.invoke(body, headers, response_headers) response_headers["Content-Length"] = str(len(payload)) return payload else: return None def put_function_concurrency(self, function_name, reserved_concurrency): fn = self.get_function(function_name) fn.reserved_concurrency = reserved_concurrency return fn.reserved_concurrency def delete_function_concurrency(self, function_name): fn = self.get_function(function_name) fn.reserved_concurrency = None return fn.reserved_concurrency def get_function_concurrency(self, function_name): fn = self.get_function(function_name) return fn.reserved_concurrency def do_validate_s3(): return os.environ.get("VALIDATE_LAMBDA_S3", "") in ["", "1", "true"] lambda_backends = BackendDict(LambdaBackend, "lambda")
36.404734
184
0.601115
a46ae01ec069a948aeea795e886d39818f50aaa8
191,858
py
Python
nova/tests/unit/api/openstack/compute/test_serversV21.py
sarafraj-singh/nova
677594480ecb9c093a3d81ae49dead120798a5c4
[ "Apache-2.0" ]
null
null
null
nova/tests/unit/api/openstack/compute/test_serversV21.py
sarafraj-singh/nova
677594480ecb9c093a3d81ae49dead120798a5c4
[ "Apache-2.0" ]
null
null
null
nova/tests/unit/api/openstack/compute/test_serversV21.py
sarafraj-singh/nova
677594480ecb9c093a3d81ae49dead120798a5c4
[ "Apache-2.0" ]
1
2020-07-24T01:18:44.000Z
2020-07-24T01:18:44.000Z
# Copyright 2010-2011 OpenStack Foundation # Copyright 2011 Piston Cloud Computing, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import collections import copy import datetime import uuid import fixtures import iso8601 import mock from mox3 import mox from oslo_policy import policy as oslo_policy from oslo_serialization import jsonutils from oslo_utils import timeutils from six.moves import range import six.moves.urllib.parse as urlparse import testtools import webob from nova.api.openstack import api_version_request from nova.api.openstack import common from nova.api.openstack import compute from nova.api.openstack.compute import disk_config from nova.api.openstack.compute import extension_info from nova.api.openstack.compute import ips from nova.api.openstack.compute import keypairs from nova.api.openstack.compute.schemas import servers as servers_schema from nova.api.openstack.compute import servers from nova.api.openstack.compute import views from nova.api.openstack import extensions from nova.api.openstack import wsgi as os_wsgi from nova import availability_zones from nova.compute import api as compute_api from nova.compute import flavors from nova.compute import task_states from nova.compute import vm_states import nova.conf from nova import context from nova import db from nova.db.sqlalchemy import models from nova import exception from nova.image import glance from nova.network import manager from nova import objects from nova.objects import instance as instance_obj from nova import policy from nova import test from nova.tests.unit.api.openstack import fakes from nova.tests.unit import fake_instance from nova.tests.unit import fake_network from nova.tests.unit.image import fake from nova.tests.unit import matchers from nova.tests import uuidsentinel as uuids from nova import utils as nova_utils CONF = nova.conf.CONF FAKE_UUID = fakes.FAKE_UUID INSTANCE_IDS = {FAKE_UUID: 1} FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS def fake_gen_uuid(): return FAKE_UUID def return_servers_empty(context, *args, **kwargs): return objects.InstanceList(objects=[]) def instance_update_and_get_original(context, instance_uuid, values, columns_to_join=None, ): inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid), name=values.get('display_name')) inst = dict(inst, **values) return (inst, inst) def instance_update(context, instance_uuid, values): inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid), name=values.get('display_name')) inst = dict(inst, **values) return inst def fake_compute_api(cls, req, id): return True def fake_start_stop_not_ready(self, context, instance): raise exception.InstanceNotReady(instance_id=instance["uuid"]) def fake_start_stop_invalid_state(self, context, instance): raise exception.InstanceInvalidState( instance_uuid=instance['uuid'], attr='fake_attr', method='fake_method', state='fake_state') def fake_instance_get_by_uuid_not_found(context, uuid, columns_to_join, use_slave=False): raise exception.InstanceNotFound(instance_id=uuid) def fake_instance_get_all_with_locked(context, list_locked, **kwargs): obj_list = [] s_id = 0 for locked in list_locked: uuid = fakes.get_fake_uuid(locked) s_id = s_id + 1 kwargs['locked_by'] = None if locked == 'not_locked' else locked server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs) obj_list.append(server) return objects.InstanceList(objects=obj_list) def fake_instance_get_all_with_description(context, list_desc, **kwargs): obj_list = [] s_id = 0 for desc in list_desc: uuid = fakes.get_fake_uuid(desc) s_id = s_id + 1 kwargs['display_description'] = desc server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs) obj_list.append(server) return objects.InstanceList(objects=obj_list) class MockSetAdminPassword(object): def __init__(self): self.instance_id = None self.password = None def __call__(self, context, instance_id, password): self.instance_id = instance_id self.password = password class Base64ValidationTest(test.TestCase): def setUp(self): super(Base64ValidationTest, self).setUp() ext_info = extension_info.LoadedExtensionInfo() self.controller = servers.ServersController(extension_info=ext_info) def test_decode_base64(self): value = "A random string" result = self.controller._decode_base64(base64.b64encode(value)) self.assertEqual(result, value) def test_decode_base64_binary(self): value = "\x00\x12\x75\x99" result = self.controller._decode_base64(base64.b64encode(value)) self.assertEqual(result, value) def test_decode_base64_whitespace(self): value = "A random string" encoded = base64.b64encode(value) white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:]) result = self.controller._decode_base64(white) self.assertEqual(result, value) def test_decode_base64_invalid(self): invalid = "A random string" result = self.controller._decode_base64(invalid) self.assertIsNone(result) def test_decode_base64_illegal_bytes(self): value = "A random string" encoded = base64.b64encode(value) white = ">\x01%s*%s()" % (encoded[:2], encoded[2:]) result = self.controller._decode_base64(white) self.assertIsNone(result) class ControllerTest(test.TestCase): def setUp(self): super(ControllerTest, self).setUp() self.flags(verbose=True, use_ipv6=False) fakes.stub_out_rate_limiting(self.stubs) fakes.stub_out_key_pair_funcs(self.stubs) fake.stub_out_image_service(self) return_server = fakes.fake_compute_get() return_servers = fakes.fake_compute_get_all() # Server sort keys extension is enabled in v21 so sort data is passed # to the instance API and the sorted DB API is invoked self.stubs.Set(compute_api.API, 'get_all', lambda api, *a, **k: return_servers(*a, **k)) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: return_server(*a, **k)) self.stub_out('nova.db.instance_update_and_get_original', instance_update_and_get_original) self.flags(group='glance', api_servers=['http://localhost:9292']) ext_info = extension_info.LoadedExtensionInfo() self.controller = servers.ServersController(extension_info=ext_info) self.ips_controller = ips.IPsController() policy.reset() policy.init() fake_network.stub_out_nw_api_get_instance_nw_info(self) class ServersControllerTest(ControllerTest): wsgi_api_version = os_wsgi.DEFAULT_API_VERSION def req(self, url, use_admin_context=False): return fakes.HTTPRequest.blank(url, use_admin_context=use_admin_context, version=self.wsgi_api_version) def test_requested_networks_prefix(self): uuid = 'br-00000000-0000-0000-0000-000000000000' requested_networks = [{'uuid': uuid}] res = self.controller._get_requested_networks(requested_networks) self.assertIn((uuid, None), res.as_tuples()) def test_requested_networks_neutronv2_enabled_with_port(self): self.flags(use_neutron=True) port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'port': port}] res = self.controller._get_requested_networks(requested_networks) self.assertEqual([(None, None, port, None)], res.as_tuples()) def test_requested_networks_neutronv2_enabled_with_network(self): self.flags(use_neutron=True) network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' requested_networks = [{'uuid': network}] res = self.controller._get_requested_networks(requested_networks) self.assertEqual([(network, None, None, None)], res.as_tuples()) def test_requested_networks_neutronv2_enabled_with_network_and_port(self): self.flags(use_neutron=True) network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network, 'port': port}] res = self.controller._get_requested_networks(requested_networks) self.assertEqual([(None, None, port, None)], res.as_tuples()) def test_requested_networks_with_duplicate_networks(self): # duplicate networks are allowed only for nova neutron v2.0 network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' requested_networks = [{'uuid': network}, {'uuid': network}] self.assertRaises( webob.exc.HTTPBadRequest, self.controller._get_requested_networks, requested_networks) def test_requested_networks_with_neutronv2_and_duplicate_networks(self): # duplicate networks are allowed only for nova neutron v2.0 self.flags(use_neutron=True) network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' requested_networks = [{'uuid': network}, {'uuid': network}] res = self.controller._get_requested_networks(requested_networks) self.assertEqual([(network, None, None, None), (network, None, None, None)], res.as_tuples()) def test_requested_networks_neutronv2_enabled_conflict_on_fixed_ip(self): self.flags(use_neutron=True) network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' addr = '10.0.0.1' requested_networks = [{'uuid': network, 'fixed_ip': addr, 'port': port}] self.assertRaises( webob.exc.HTTPBadRequest, self.controller._get_requested_networks, requested_networks) def test_requested_networks_neutronv2_disabled_with_port(self): port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'port': port}] self.assertRaises( webob.exc.HTTPBadRequest, self.controller._get_requested_networks, requested_networks) def test_requested_networks_api_enabled_with_v2_subclass(self): self.flags(use_neutron=True) network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network, 'port': port}] res = self.controller._get_requested_networks(requested_networks) self.assertEqual([(None, None, port, None)], res.as_tuples()) def test_get_server_by_uuid(self): req = self.req('/fake/servers/%s' % FAKE_UUID) res_dict = self.controller.show(req, FAKE_UUID) self.assertEqual(res_dict['server']['id'], FAKE_UUID) def test_get_server_joins_pci_devices(self): def fake_get(_self, *args, **kwargs): expected_attrs = kwargs['expected_attrs'] self.assertEqual(['flavor', 'info_cache', 'metadata', 'numa_topology', 'pci_devices'], expected_attrs) ctxt = context.RequestContext('fake', 'fake') return fake_instance.fake_instance_obj( ctxt, expected_attrs=expected_attrs) self.stubs.Set(compute_api.API, 'get', fake_get) req = self.req('/fake/servers/%s' % FAKE_UUID) self.controller.show(req, FAKE_UUID) def test_unique_host_id(self): """Create two servers with the same host and different project_ids and check that the host_id's are unique. """ def return_instance_with_host(context, *args, **kwargs): project_id = str(uuid.uuid4()) return fakes.stub_instance_obj(context, id=1, uuid=FAKE_UUID, project_id=project_id, host='fake_host') self.stubs.Set(compute_api.API, 'get', return_instance_with_host) req = self.req('/fake/servers/%s' % FAKE_UUID) with mock.patch.object(compute_api.API, 'get') as mock_get: mock_get.side_effect = return_instance_with_host server1 = self.controller.show(req, FAKE_UUID) server2 = self.controller.show(req, FAKE_UUID) self.assertNotEqual(server1['server']['hostId'], server2['server']['hostId']) def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark, status="ACTIVE", progress=100): return { "server": { "id": uuid, "user_id": "fake_user", "tenant_id": "fake_project", "updated": "2010-11-11T11:00:00Z", "created": "2010-10-10T12:00:00Z", "progress": progress, "name": "server2", "status": status, "hostId": '', "image": { "id": "10", "links": [ { "rel": "bookmark", "href": image_bookmark, }, ], }, "flavor": { "id": "2", "links": [ { "rel": "bookmark", "href": flavor_bookmark, }, ], }, "addresses": { 'test1': [ {'version': 4, 'addr': '192.168.1.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 6, 'addr': '2001:db8:0:1::1', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'} ] }, "metadata": { "seq": "2", }, "links": [ { "rel": "self", "href": "http://localhost/v2/fake/servers/%s" % uuid, }, { "rel": "bookmark", "href": "http://localhost/fake/servers/%s" % uuid, }, ], } } def test_get_server_by_id(self): self.flags(use_ipv6=True) image_bookmark = "http://localhost/fake/images/10" flavor_bookmark = "http://localhost/fake/flavors/2" uuid = FAKE_UUID req = self.req('/v2/fake/servers/%s' % uuid) res_dict = self.controller.show(req, uuid) expected_server = self._get_server_data_dict(uuid, image_bookmark, flavor_bookmark, status="BUILD", progress=0) expected_server['server']['name'] = 'server1' expected_server['server']['metadata']['seq'] = '1' self.assertThat(res_dict, matchers.DictMatches(expected_server)) def test_get_server_with_active_status_by_id(self): image_bookmark = "http://localhost/fake/images/10" flavor_bookmark = "http://localhost/fake/flavors/2" new_return_server = fakes.fake_compute_get( id=2, vm_state=vm_states.ACTIVE, progress=100) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: new_return_server(*a, **k)) uuid = FAKE_UUID req = self.req('/fake/servers/%s' % uuid) res_dict = self.controller.show(req, uuid) expected_server = self._get_server_data_dict(uuid, image_bookmark, flavor_bookmark) self.assertThat(res_dict, matchers.DictMatches(expected_server)) def test_get_server_with_id_image_ref_by_id(self): image_ref = "10" image_bookmark = "http://localhost/fake/images/10" flavor_id = "1" flavor_bookmark = "http://localhost/fake/flavors/2" new_return_server = fakes.fake_compute_get( id=2, vm_state=vm_states.ACTIVE, image_ref=image_ref, flavor_id=flavor_id, progress=100) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: new_return_server(*a, **k)) uuid = FAKE_UUID req = self.req('/fake/servers/%s' % uuid) res_dict = self.controller.show(req, uuid) expected_server = self._get_server_data_dict(uuid, image_bookmark, flavor_bookmark) self.assertThat(res_dict, matchers.DictMatches(expected_server)) def test_get_server_addresses_from_cache(self): pub0 = ('172.19.0.1', '172.19.0.2',) pub1 = ('1.2.3.4',) pub2 = ('b33f::fdee:ddff:fecc:bbaa',) priv0 = ('192.168.0.3', '192.168.0.4',) def _ip(ip): return {'address': ip, 'type': 'fixed'} nw_cache = [ {'address': 'aa:aa:aa:aa:aa:aa', 'id': 1, 'network': {'bridge': 'br0', 'id': 1, 'label': 'public', 'subnets': [{'cidr': '172.19.0.0/24', 'ips': [_ip(ip) for ip in pub0]}, {'cidr': '1.2.3.0/16', 'ips': [_ip(ip) for ip in pub1]}, {'cidr': 'b33f::/64', 'ips': [_ip(ip) for ip in pub2]}]}}, {'address': 'bb:bb:bb:bb:bb:bb', 'id': 2, 'network': {'bridge': 'br1', 'id': 2, 'label': 'private', 'subnets': [{'cidr': '192.168.0.0/24', 'ips': [_ip(ip) for ip in priv0]}]}}] return_server = fakes.fake_compute_get(nw_cache=nw_cache) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: return_server(*a, **k)) req = self.req('/fake/servers/%s/ips' % FAKE_UUID) res_dict = self.ips_controller.index(req, FAKE_UUID) expected = { 'addresses': { 'private': [ {'version': 4, 'addr': '192.168.0.3'}, {'version': 4, 'addr': '192.168.0.4'}, ], 'public': [ {'version': 4, 'addr': '172.19.0.1'}, {'version': 4, 'addr': '172.19.0.2'}, {'version': 4, 'addr': '1.2.3.4'}, {'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa'}, ], }, } self.assertThat(res_dict, matchers.DictMatches(expected)) # Make sure we kept the addresses in order self.assertIsInstance(res_dict['addresses'], collections.OrderedDict) labels = [vif['network']['label'] for vif in nw_cache] for index, label in enumerate(res_dict['addresses'].keys()): self.assertEqual(label, labels[index]) def test_get_server_addresses_nonexistent_network(self): url = '/v2/fake/servers/%s/ips/network_0' % FAKE_UUID req = self.req(url) self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show, req, FAKE_UUID, 'network_0') def test_get_server_addresses_nonexistent_server(self): def fake_instance_get(*args, **kwargs): raise exception.InstanceNotFound(instance_id='fake') self.stubs.Set(compute_api.API, 'get', fake_instance_get) server_id = str(uuid.uuid4()) req = self.req('/fake/servers/%s/ips' % server_id) self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.index, req, server_id) def test_get_server_list_empty(self): self.stubs.Set(compute_api.API, 'get_all', return_servers_empty) req = self.req('/fake/servers') res_dict = self.controller.index(req) num_servers = len(res_dict['servers']) self.assertEqual(0, num_servers) def test_get_server_list_with_reservation_id(self): req = self.req('/fake/servers?reservation_id=foo') res_dict = self.controller.index(req) i = 0 for s in res_dict['servers']: self.assertEqual(s.get('name'), 'server%d' % (i + 1)) i += 1 def test_get_server_list_with_reservation_id_empty(self): req = self.req('/fake/servers/detail?' 'reservation_id=foo') res_dict = self.controller.detail(req) i = 0 for s in res_dict['servers']: self.assertEqual(s.get('name'), 'server%d' % (i + 1)) i += 1 def test_get_server_list_with_reservation_id_details(self): req = self.req('/fake/servers/detail?' 'reservation_id=foo') res_dict = self.controller.detail(req) i = 0 for s in res_dict['servers']: self.assertEqual(s.get('name'), 'server%d' % (i + 1)) i += 1 def test_get_server_list(self): req = self.req('/fake/servers') res_dict = self.controller.index(req) self.assertEqual(len(res_dict['servers']), 5) for i, s in enumerate(res_dict['servers']): self.assertEqual(s['id'], fakes.get_fake_uuid(i)) self.assertEqual(s['name'], 'server%d' % (i + 1)) self.assertIsNone(s.get('image', None)) expected_links = [ { "rel": "self", "href": "http://localhost/v2/fake/servers/%s" % s['id'], }, { "rel": "bookmark", "href": "http://localhost/fake/servers/%s" % s['id'], }, ] self.assertEqual(s['links'], expected_links) def test_get_servers_with_limit(self): req = self.req('/fake/servers?limit=3') res_dict = self.controller.index(req) servers = res_dict['servers'] self.assertEqual([s['id'] for s in servers], [fakes.get_fake_uuid(i) for i in range(len(servers))]) servers_links = res_dict['servers_links'] self.assertEqual(servers_links[0]['rel'], 'next') href_parts = urlparse.urlparse(servers_links[0]['href']) self.assertEqual('/v2/fake/servers', href_parts.path) params = urlparse.parse_qs(href_parts.query) expected_params = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]} self.assertThat(params, matchers.DictMatches(expected_params)) def test_get_servers_with_limit_bad_value(self): req = self.req('/fake/servers?limit=aaa') self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req) def test_get_server_details_empty(self): self.stubs.Set(compute_api.API, 'get_all', return_servers_empty) req = self.req('/fake/servers/detail') res_dict = self.controller.detail(req) num_servers = len(res_dict['servers']) self.assertEqual(0, num_servers) def test_get_server_details_with_limit(self): req = self.req('/fake/servers/detail?limit=3') res = self.controller.detail(req) servers = res['servers'] self.assertEqual([s['id'] for s in servers], [fakes.get_fake_uuid(i) for i in range(len(servers))]) servers_links = res['servers_links'] self.assertEqual(servers_links[0]['rel'], 'next') href_parts = urlparse.urlparse(servers_links[0]['href']) self.assertEqual('/v2/fake/servers/detail', href_parts.path) params = urlparse.parse_qs(href_parts.query) expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]} self.assertThat(params, matchers.DictMatches(expected)) def test_get_server_details_with_limit_bad_value(self): req = self.req('/fake/servers/detail?limit=aaa') self.assertRaises(webob.exc.HTTPBadRequest, self.controller.detail, req) def test_get_server_details_with_limit_and_other_params(self): req = self.req('/fake/servers/detail' '?limit=3&blah=2:t' '&sort_key=id1&sort_dir=asc') res = self.controller.detail(req) servers = res['servers'] self.assertEqual([s['id'] for s in servers], [fakes.get_fake_uuid(i) for i in range(len(servers))]) servers_links = res['servers_links'] self.assertEqual(servers_links[0]['rel'], 'next') href_parts = urlparse.urlparse(servers_links[0]['href']) self.assertEqual('/v2/fake/servers/detail', href_parts.path) params = urlparse.parse_qs(href_parts.query) expected = {'limit': ['3'], 'blah': ['2:t'], 'sort_key': ['id1'], 'sort_dir': ['asc'], 'marker': [fakes.get_fake_uuid(2)]} self.assertThat(params, matchers.DictMatches(expected)) def test_get_servers_with_too_big_limit(self): req = self.req('/fake/servers?limit=30') res_dict = self.controller.index(req) self.assertNotIn('servers_links', res_dict) def test_get_servers_with_bad_limit(self): req = self.req('/fake/servers?limit=asdf') self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req) def test_get_servers_with_marker(self): url = '/v2/fake/servers?marker=%s' % fakes.get_fake_uuid(2) req = self.req(url) servers = self.controller.index(req)['servers'] self.assertEqual([s['name'] for s in servers], ["server4", "server5"]) def test_get_servers_with_limit_and_marker(self): url = ('/v2/fake/servers?limit=2&marker=%s' % fakes.get_fake_uuid(1)) req = self.req(url) servers = self.controller.index(req)['servers'] self.assertEqual([s['name'] for s in servers], ['server3', 'server4']) def test_get_servers_with_bad_marker(self): req = self.req('/fake/servers?limit=2&marker=asdf') self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req) def test_get_servers_with_bad_option(self): server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): db_list = [fakes.stub_instance(100, uuid=server_uuid)] return instance_obj._make_instance_list( context, objects.InstanceList(), db_list, FIELDS) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?unknownoption=whee') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_allows_image(self): server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('image', search_opts) self.assertEqual(search_opts['image'], '12345') db_list = [fakes.stub_instance(100, uuid=server_uuid)] return instance_obj._make_instance_list( context, objects.InstanceList(), db_list, FIELDS) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?image=12345') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_tenant_id_filter_no_admin_context(self): def fake_get_all(context, search_opts=None, **kwargs): self.assertNotEqual(search_opts, None) self.assertEqual(search_opts['project_id'], 'fake') return [fakes.stub_instance_obj(100)] req = self.req('/fake/servers?tenant_id=newfake') with mock.patch.object(compute_api.API, 'get_all') as mock_get: mock_get.side_effect = fake_get_all servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) def test_all_tenants_param_normal(self): def fake_get_all(context, search_opts=None, **kwargs): self.assertNotIn('project_id', search_opts) return [fakes.stub_instance_obj(100)] req = self.req('/fake/servers?all_tenants', use_admin_context=True) with mock.patch.object(compute_api.API, 'get_all') as mock_get: mock_get.side_effect = fake_get_all servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) def test_all_tenants_param_one(self): def fake_get_all(api, context, search_opts=None, **kwargs): self.assertNotIn('project_id', search_opts) return [fakes.stub_instance_obj(100)] self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?all_tenants=1', use_admin_context=True) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) def test_all_tenants_param_zero(self): def fake_get_all(api, context, search_opts=None, **kwargs): self.assertNotIn('all_tenants', search_opts) return [fakes.stub_instance_obj(100)] self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?all_tenants=0', use_admin_context=True) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) def test_all_tenants_param_false(self): def fake_get_all(api, context, search_opts=None, **kwargs): self.assertNotIn('all_tenants', search_opts) return [fakes.stub_instance_obj(100)] self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?all_tenants=false', use_admin_context=True) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) def test_all_tenants_param_invalid(self): def fake_get_all(api, context, search_opts=None, **kwargs): self.assertNotIn('all_tenants', search_opts) return [fakes.stub_instance_obj(100)] self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?all_tenants=xxx', use_admin_context=True) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req) def test_admin_restricted_tenant(self): def fake_get_all(api, context, search_opts=None, **kwargs): self.assertIsNotNone(search_opts) self.assertEqual(search_opts['project_id'], 'fake') return [fakes.stub_instance_obj(100)] self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers', use_admin_context=True) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) def test_all_tenants_pass_policy(self): def fake_get_all(api, context, search_opts=None, **kwargs): self.assertIsNotNone(search_opts) self.assertNotIn('project_id', search_opts) self.assertTrue(context.is_admin) return [fakes.stub_instance_obj(100)] self.stubs.Set(compute_api.API, 'get_all', fake_get_all) rules = { "os_compute_api:servers:index": "project_id:fake", "os_compute_api:servers:index:get_all_tenants": "project_id:fake" } policy.set_rules(oslo_policy.Rules.from_dict(rules)) req = self.req('/fake/servers?all_tenants=1') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) def test_all_tenants_fail_policy(self): def fake_get_all(api, context, search_opts=None, **kwargs): self.assertIsNotNone(search_opts) return [fakes.stub_instance_obj(100)] rules = { "os_compute_api:servers:index:get_all_tenants": "project_id:non_fake", "os_compute_api:servers:get_all": "project_id:fake", } policy.set_rules(oslo_policy.Rules.from_dict(rules)) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?all_tenants=1') self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, req) def test_get_servers_allows_flavor(self): server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('flavor', search_opts) # flavor is an integer ID self.assertEqual(search_opts['flavor'], '12345') return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?flavor=12345') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_with_bad_flavor(self): req = self.req('/fake/servers?flavor=abcde') with mock.patch.object(compute_api.API, 'get_all') as mock_get: mock_get.return_value = objects.InstanceList(objects=[]) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 0) def test_get_server_details_with_bad_flavor(self): req = self.req('/fake/servers?flavor=abcde') with mock.patch.object(compute_api.API, 'get_all') as mock_get: mock_get.return_value = objects.InstanceList(objects=[]) servers = self.controller.detail(req)['servers'] self.assertThat(servers, testtools.matchers.HasLength(0)) def test_get_servers_allows_status(self): server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('vm_state', search_opts) self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE]) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?status=active') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_allows_task_status(self): server_uuid = str(uuid.uuid4()) task_state = task_states.REBOOTING def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('task_state', search_opts) self.assertEqual([task_states.REBOOT_PENDING, task_states.REBOOT_STARTED, task_states.REBOOTING], search_opts['task_state']) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid, task_state=task_state)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?status=reboot') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_resize_status(self): # Test when resize status, it maps list of vm states. server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIn('vm_state', search_opts) self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE, vm_states.STOPPED]) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?status=resize') servers = self.controller.detail(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_invalid_status(self): # Test getting servers by invalid status. req = self.req('/fake/servers?status=baloney', use_admin_context=False) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 0) def test_get_servers_deleted_status_as_user(self): req = self.req('/fake/servers?status=deleted', use_admin_context=False) self.assertRaises(webob.exc.HTTPForbidden, self.controller.detail, req) def test_get_servers_deleted_status_as_admin(self): server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIn('vm_state', search_opts) self.assertEqual(search_opts['vm_state'], ['deleted']) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?status=deleted', use_admin_context=True) servers = self.controller.detail(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) @mock.patch.object(compute_api.API, 'get_all') def test_get_servers_deleted_filter_str_to_bool(self, mock_get_all): server_uuid = str(uuid.uuid4()) db_list = objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid, vm_state='deleted')]) mock_get_all.return_value = db_list req = self.req('/fake/servers?deleted=true', use_admin_context=True) servers = self.controller.detail(req)['servers'] self.assertEqual(1, len(servers)) self.assertEqual(server_uuid, servers[0]['id']) # Assert that 'deleted' filter value is converted to boolean # while calling get_all() method. expected_search_opts = {'deleted': True, 'project_id': 'fake'} mock_get_all.assert_called_once_with( mock.ANY, search_opts=expected_search_opts, limit=mock.ANY, expected_attrs=['flavor', 'info_cache', 'metadata', 'pci_devices'], marker=mock.ANY, want_objects=mock.ANY, sort_keys=mock.ANY, sort_dirs=mock.ANY) @mock.patch.object(compute_api.API, 'get_all') def test_get_servers_deleted_filter_invalid_str(self, mock_get_all): server_uuid = str(uuid.uuid4()) db_list = objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) mock_get_all.return_value = db_list req = fakes.HTTPRequest.blank('/fake/servers?deleted=abc', use_admin_context=True) servers = self.controller.detail(req)['servers'] self.assertEqual(1, len(servers)) self.assertEqual(server_uuid, servers[0]['id']) # Assert that invalid 'deleted' filter value is converted to boolean # False while calling get_all() method. expected_search_opts = {'deleted': False, 'project_id': 'fake'} mock_get_all.assert_called_once_with( mock.ANY, search_opts=expected_search_opts, limit=mock.ANY, expected_attrs=['flavor', 'info_cache', 'metadata', 'pci_devices'], marker=mock.ANY, want_objects=mock.ANY, sort_keys=mock.ANY, sort_dirs=mock.ANY) def test_get_servers_allows_name(self): server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('name', search_opts) self.assertEqual(search_opts['name'], 'whee.*') self.assertEqual(['pci_devices'], expected_attrs) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?name=whee.*') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) @mock.patch.object(compute_api.API, 'get_all') def test_get_servers_flavor_not_found(self, get_all_mock): get_all_mock.side_effect = exception.FlavorNotFound(flavor_id=1) req = fakes.HTTPRequest.blank( '/fake/servers?status=active&flavor=abc') servers = self.controller.index(req)['servers'] self.assertEqual(0, len(servers)) def test_get_servers_allows_changes_since(self): server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('changes-since', search_opts) changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1, tzinfo=iso8601.iso8601.UTC) self.assertEqual(search_opts['changes-since'], changes_since) self.assertNotIn('deleted', search_opts) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) params = 'changes-since=2011-01-24T17:08:01Z' req = self.req('/fake/servers?%s' % params) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_allows_changes_since_bad_value(self): params = 'changes-since=asdf' req = self.req('/fake/servers?%s' % params) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req) def test_get_servers_admin_filters_as_user(self): """Test getting servers by admin-only or unknown options when context is not admin. Make sure the admin and unknown options are stripped before they get to compute_api.get_all() """ server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) # Allowed by user self.assertIn('name', search_opts) self.assertIn('ip', search_opts) # OSAPI converts status to vm_state self.assertIn('vm_state', search_opts) # Allowed only by admins with admin API on self.assertNotIn('unknown_option', search_opts) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) query_str = "name=foo&ip=10.*&status=active&unknown_option=meow" req = fakes.HTTPRequest.blank('/fake/servers?%s' % query_str) res = self.controller.index(req) servers = res['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_admin_options_as_admin(self): """Test getting servers by admin-only or unknown options when context is admin. All options should be passed """ server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) # Allowed by user self.assertIn('name', search_opts) # OSAPI converts status to vm_state self.assertIn('vm_state', search_opts) # Allowed only by admins with admin API on self.assertIn('ip', search_opts) self.assertIn('unknown_option', search_opts) return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) query_str = "name=foo&ip=10.*&status=active&unknown_option=meow" req = self.req('/fake/servers?%s' % query_str, use_admin_context=True) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_allows_ip(self): """Test getting servers by ip.""" server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('ip', search_opts) self.assertEqual(search_opts['ip'], '10\..*') return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?ip=10\..*') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_admin_allows_ip6(self): """Test getting servers by ip6 with admin_api enabled and admin context """ server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('ip6', search_opts) self.assertEqual(search_opts['ip6'], 'ffff.*') return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?ip6=ffff.*', use_admin_context=True) servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_allows_ip6_with_new_version(self): """Test getting servers by ip6 with new version requested and no admin context """ server_uuid = str(uuid.uuid4()) def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertIsNotNone(search_opts) self.assertIn('ip6', search_opts) self.assertEqual(search_opts['ip6'], 'ffff.*') return objects.InstanceList( objects=[fakes.stub_instance_obj(100, uuid=server_uuid)]) self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers?ip6=ffff.*') req.api_version_request = api_version_request.APIVersionRequest('2.5') servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_all_server_details(self): expected_flavor = { "id": "2", "links": [ { "rel": "bookmark", "href": 'http://localhost/fake/flavors/2', }, ], } expected_image = { "id": "10", "links": [ { "rel": "bookmark", "href": 'http://localhost/fake/images/10', }, ], } req = self.req('/fake/servers/detail') res_dict = self.controller.detail(req) for i, s in enumerate(res_dict['servers']): self.assertEqual(s['id'], fakes.get_fake_uuid(i)) self.assertEqual(s['hostId'], '') self.assertEqual(s['name'], 'server%d' % (i + 1)) self.assertEqual(s['image'], expected_image) self.assertEqual(s['flavor'], expected_flavor) self.assertEqual(s['status'], 'BUILD') self.assertEqual(s['metadata']['seq'], str(i + 1)) def test_get_all_server_details_with_host(self): """We want to make sure that if two instances are on the same host, then they return the same hostId. If two instances are on different hosts, they should return different hostIds. In this test, there are 5 instances - 2 on one host and 3 on another. """ def return_servers_with_host(*args, **kwargs): return objects.InstanceList( objects=[fakes.stub_instance_obj(None, id=i + 1, user_id='fake', project_id='fake', host=i % 2, uuid=fakes.get_fake_uuid(i)) for i in range(5)]) self.stubs.Set(compute_api.API, 'get_all', return_servers_with_host) req = self.req('/fake/servers/detail') res_dict = self.controller.detail(req) server_list = res_dict['servers'] host_ids = [server_list[0]['hostId'], server_list[1]['hostId']] self.assertTrue(host_ids[0] and host_ids[1]) self.assertNotEqual(host_ids[0], host_ids[1]) for i, s in enumerate(server_list): self.assertEqual(s['id'], fakes.get_fake_uuid(i)) self.assertEqual(s['hostId'], host_ids[i % 2]) self.assertEqual(s['name'], 'server%d' % (i + 1)) def test_get_servers_joins_pci_devices(self): def fake_get_all(compute_self, context, search_opts=None, limit=None, marker=None, want_objects=False, expected_attrs=None, sort_keys=None, sort_dirs=None): self.assertEqual(['pci_devices'], expected_attrs) return [] self.stubs.Set(compute_api.API, 'get_all', fake_get_all) req = self.req('/fake/servers', use_admin_context=True) self.assertIn('servers', self.controller.index(req)) class ServersControllerTestV29(ServersControllerTest): wsgi_api_version = '2.9' def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark, status="ACTIVE", progress=100): server_dict = super(ServersControllerTestV29, self)._get_server_data_dict(uuid, image_bookmark, flavor_bookmark, status, progress) server_dict['server']['locked'] = False return server_dict @mock.patch.object(compute_api.API, 'get') def _test_get_server_with_lock(self, locked_by, get_mock): image_bookmark = "http://localhost/fake/images/10" flavor_bookmark = "http://localhost/fake/flavors/2" uuid = FAKE_UUID get_mock.side_effect = fakes.fake_compute_get(id=2, locked_by=locked_by, uuid=uuid) req = self.req('/fake/servers/%s' % uuid) res_dict = self.controller.show(req, uuid) expected_server = self._get_server_data_dict(uuid, image_bookmark, flavor_bookmark, status="BUILD", progress=0) expected_server['server']['locked'] = True if locked_by else False self.assertThat(res_dict, matchers.DictMatches(expected_server)) return res_dict def test_get_server_with_locked_by_admin(self): res_dict = self._test_get_server_with_lock('admin') self.assertTrue(res_dict['server']['locked']) def test_get_server_with_locked_by_owner(self): res_dict = self._test_get_server_with_lock('owner') self.assertTrue(res_dict['server']['locked']) def test_get_server_not_locked(self): res_dict = self._test_get_server_with_lock(None) self.assertFalse(res_dict['server']['locked']) @mock.patch.object(compute_api.API, 'get_all') def _test_list_server_detail_with_lock(self, s1_locked, s2_locked, get_all_mock): get_all_mock.return_value = fake_instance_get_all_with_locked( context, [s1_locked, s2_locked]) req = self.req('/fake/servers/detail') servers_list = self.controller.detail(req) # Check that each returned server has the same 'locked' value # and 'id' as they were created. for locked in [s1_locked, s2_locked]: server = next(server for server in servers_list['servers'] if (server['id'] == fakes.get_fake_uuid(locked))) expected = False if locked == 'not_locked' else True self.assertEqual(expected, server['locked']) def test_list_server_detail_with_locked_s1_admin_s2_owner(self): self._test_list_server_detail_with_lock('admin', 'owner') def test_list_server_detail_with_locked_s1_owner_s2_admin(self): self._test_list_server_detail_with_lock('owner', 'admin') def test_list_server_detail_with_locked_s1_admin_s2_admin(self): self._test_list_server_detail_with_lock('admin', 'admin') def test_list_server_detail_with_locked_s1_admin_s2_not_locked(self): self._test_list_server_detail_with_lock('admin', 'not_locked') def test_list_server_detail_with_locked_s1_s2_not_locked(self): self._test_list_server_detail_with_lock('not_locked', 'not_locked') @mock.patch.object(compute_api.API, 'get_all') def test_get_servers_remove_non_search_options(self, get_all_mock): req = fakes.HTTPRequestV21.blank('/servers' '?sort_key=id1&sort_dir=asc' '&sort_key=id2&sort_dir=desc' '&limit=1&marker=123', use_admin_context=True) self.controller.index(req) kwargs = get_all_mock.call_args[1] search_opts = kwargs['search_opts'] for key in ('sort_key', 'sort_dir', 'limit', 'marker'): self.assertNotIn(key, search_opts) class ServersControllerTestV219(ServersControllerTest): wsgi_api_version = '2.19' def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark, status="ACTIVE", progress=100, description=None): server_dict = super(ServersControllerTestV219, self)._get_server_data_dict(uuid, image_bookmark, flavor_bookmark, status, progress) server_dict['server']['locked'] = False server_dict['server']['description'] = description return server_dict @mock.patch.object(compute_api.API, 'get') def _test_get_server_with_description(self, description, get_mock): image_bookmark = "http://localhost/fake/images/10" flavor_bookmark = "http://localhost/fake/flavors/2" uuid = FAKE_UUID get_mock.side_effect = fakes.fake_compute_get(id=2, display_description=description, uuid=uuid) req = self.req('/fake/servers/%s' % uuid) res_dict = self.controller.show(req, uuid) expected_server = self._get_server_data_dict(uuid, image_bookmark, flavor_bookmark, status="BUILD", progress=0, description=description) self.assertThat(res_dict, matchers.DictMatches(expected_server)) return res_dict @mock.patch.object(compute_api.API, 'get_all') def _test_list_server_detail_with_descriptions(self, s1_desc, s2_desc, get_all_mock): get_all_mock.return_value = fake_instance_get_all_with_description( context, [s1_desc, s2_desc]) req = self.req('/fake/servers/detail') servers_list = self.controller.detail(req) # Check that each returned server has the same 'description' value # and 'id' as they were created. for desc in [s1_desc, s2_desc]: server = next(server for server in servers_list['servers'] if (server['id'] == fakes.get_fake_uuid(desc))) expected = desc self.assertEqual(expected, server['description']) def test_get_server_with_description(self): self._test_get_server_with_description('test desc') def test_list_server_detail_with_descriptions(self): self._test_list_server_detail_with_descriptions('desc1', 'desc2') class ServersControllerTestV226(ControllerTest): wsgi_api_version = '2.26' @mock.patch.object(compute_api.API, 'get') def test_get_server_with_tags_by_id(self, mock_get): req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID, version=self.wsgi_api_version) ctxt = req.environ['nova.context'] fake_server = fakes.stub_instance_obj( ctxt, id=2, vm_state=vm_states.ACTIVE, progress=100) tags = ['tag1', 'tag2'] tag_list = objects.TagList(objects=[ objects.Tag(resource_id=FAKE_UUID, tag=tag) for tag in tags]) fake_server.tags = tag_list mock_get.return_value = fake_server res_dict = self.controller.show(req, FAKE_UUID) self.assertIn('tags', res_dict['server']) self.assertEqual(res_dict['server']['tags'], tags) @mock.patch.object(compute_api.API, 'get_all') def _test_get_servers_allows_tag_filters(self, filter_name, mock_get_all): server_uuid = str(uuid.uuid4()) req = fakes.HTTPRequest.blank('/fake/servers?%s=t1,t2' % filter_name, version=self.wsgi_api_version) ctxt = req.environ['nova.context'] def fake_get_all(*a, **kw): self.assertIsNotNone(kw['search_opts']) self.assertIn(filter_name, kw['search_opts']) self.assertEqual(kw['search_opts'][filter_name], ['t1', 't2']) return objects.InstanceList( objects=[fakes.stub_instance_obj(ctxt, uuid=server_uuid)]) mock_get_all.side_effect = fake_get_all servers = self.controller.index(req)['servers'] self.assertEqual(len(servers), 1) self.assertEqual(servers[0]['id'], server_uuid) def test_get_servers_allows_tags_filter(self): self._test_get_servers_allows_tag_filters('tags') def test_get_servers_allows_tags_any_filter(self): self._test_get_servers_allows_tag_filters('tags-any') def test_get_servers_allows_not_tags_filter(self): self._test_get_servers_allows_tag_filters('not-tags') def test_get_servers_allows_not_tags_any_filter(self): self._test_get_servers_allows_tag_filters('not-tags-any') class ServersControllerDeleteTest(ControllerTest): def setUp(self): super(ServersControllerDeleteTest, self).setUp() self.server_delete_called = False def fake_delete(api, context, instance): if instance.uuid == uuids.non_existent_uuid: raise exception.InstanceNotFound(instance_id=instance.uuid) self.server_delete_called = True self.stubs.Set(compute_api.API, 'delete', fake_delete) def _create_delete_request(self, uuid): fakes.stub_out_instance_quota(self, 0, 10) req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % uuid) req.method = 'DELETE' return req def _delete_server_instance(self, uuid=FAKE_UUID): req = self._create_delete_request(uuid) fake_get = fakes.fake_compute_get(uuid=uuid, vm_state=vm_states.ACTIVE) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: fake_get(*a, **k)) self.controller.delete(req, uuid) def test_delete_server_instance(self): self._delete_server_instance() self.assertTrue(self.server_delete_called) def test_delete_server_instance_not_found(self): self.assertRaises(webob.exc.HTTPNotFound, self._delete_server_instance, uuid=uuids.non_existent_uuid) def test_delete_server_instance_while_building(self): req = self._create_delete_request(FAKE_UUID) self.controller.delete(req, FAKE_UUID) self.assertTrue(self.server_delete_called) def test_delete_locked_server(self): req = self._create_delete_request(FAKE_UUID) self.stubs.Set(compute_api.API, 'soft_delete', fakes.fake_actions_to_locked_server) self.stubs.Set(compute_api.API, 'delete', fakes.fake_actions_to_locked_server) self.assertRaises(webob.exc.HTTPConflict, self.controller.delete, req, FAKE_UUID) def test_delete_server_instance_while_resize(self): req = self._create_delete_request(FAKE_UUID) fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE, task_state=task_states.RESIZE_PREP) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: fake_get(*a, **k)) self.controller.delete(req, FAKE_UUID) def test_delete_server_instance_if_not_launched(self): self.flags(reclaim_instance_interval=3600) req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'DELETE' self.server_delete_called = False fake_get = fakes.fake_compute_get(launched_at=None) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: fake_get(*a, **k)) def instance_destroy_mock(*args, **kwargs): self.server_delete_called = True deleted_at = timeutils.utcnow() return fake_instance.fake_db_instance(deleted_at=deleted_at) self.stub_out('nova.db.instance_destroy', instance_destroy_mock) self.controller.delete(req, FAKE_UUID) # delete() should be called for instance which has never been active, # even if reclaim_instance_interval has been set. self.assertTrue(self.server_delete_called) class ServersControllerRebuildInstanceTest(ControllerTest): image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' image_href = 'http://localhost/v2/fake/images/%s' % image_uuid def setUp(self): super(ServersControllerRebuildInstanceTest, self).setUp() def fake_get(ctrl, ctxt, uuid): if uuid == 'test_inst': raise webob.exc.HTTPNotFound(explanation='fakeout') return fakes.stub_instance_obj(None, vm_state=vm_states.ACTIVE) self.useFixture( fixtures.MonkeyPatch('nova.api.openstack.compute.servers.' 'ServersController._get_instance', fake_get)) fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: fake_get(*a, **k)) self.body = { 'rebuild': { 'name': 'new_name', 'imageRef': self.image_href, 'metadata': { 'open': 'stack', }, }, } self.req = fakes.HTTPRequest.blank('/fake/servers/a/action') self.req.method = 'POST' self.req.headers["content-type"] = "application/json" def test_rebuild_instance_name_with_spaces_in_the_middle(self): self.body['rebuild']['name'] = 'abc def' self.req.body = jsonutils.dump_as_bytes(self.body) self.controller._action_rebuild(self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_name_with_leading_trailing_spaces(self): self.body['rebuild']['name'] = ' abc def ' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_name_with_leading_trailing_spaces_compat_mode( self): self.body['rebuild']['name'] = ' abc def ' self.req.body = jsonutils.dump_as_bytes(self.body) self.req.set_legacy_v2() def fake_rebuild(*args, **kwargs): self.assertEqual('abc def', kwargs['display_name']) with mock.patch.object(compute_api.API, 'rebuild') as mock_rebuild: mock_rebuild.side_effect = fake_rebuild self.controller._action_rebuild(self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_with_blank_metadata_key(self): self.body['rebuild']['metadata'][''] = 'world' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_with_metadata_key_too_long(self): self.body['rebuild']['metadata'][('a' * 260)] = 'world' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_with_metadata_value_too_long(self): self.body['rebuild']['metadata']['key1'] = ('a' * 260) self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_with_metadata_value_not_string(self): self.body['rebuild']['metadata']['key1'] = 1 self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_fails_when_min_ram_too_small(self): # make min_ram larger than our instance ram size def fake_get_image(self, context, image_href, **kwargs): return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', name='public image', is_public=True, status='active', properties={'key1': 'value1'}, min_ram="4096", min_disk="10") self.stubs.Set(fake._FakeImageService, 'show', fake_get_image) self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_fails_when_min_disk_too_small(self): # make min_disk larger than our instance disk size def fake_get_image(self, context, image_href, **kwargs): return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', name='public image', is_public=True, status='active', properties={'key1': 'value1'}, min_ram="128", min_disk="100000") self.stubs.Set(fake._FakeImageService, 'show', fake_get_image) self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_image_too_large(self): # make image size larger than our instance disk size size = str(1000 * (1024 ** 3)) def fake_get_image(self, context, image_href, **kwargs): return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', name='public image', is_public=True, status='active', size=size) self.stubs.Set(fake._FakeImageService, 'show', fake_get_image) self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_name_all_blank(self): def fake_get_image(self, context, image_href, **kwargs): return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', name='public image', is_public=True, status='active') self.stubs.Set(fake._FakeImageService, 'show', fake_get_image) self.body['rebuild']['name'] = ' ' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_with_deleted_image(self): def fake_get_image(self, context, image_href, **kwargs): return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', name='public image', is_public=True, status='DELETED') self.stubs.Set(fake._FakeImageService, 'show', fake_get_image) self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_instance_onset_file_limit_over_quota(self): def fake_get_image(self, context, image_href, **kwargs): return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', name='public image', is_public=True, status='active') with test.nested( mock.patch.object(fake._FakeImageService, 'show', side_effect=fake_get_image), mock.patch.object(self.controller.compute_api, 'rebuild', side_effect=exception.OnsetFileLimitExceeded) ) as ( show_mock, rebuild_mock ): self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPForbidden, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_bad_personality(self): body = { "rebuild": { "imageRef": self.image_href, "personality": [{ "path": "/path/to/file", "contents": "INVALID b64", }] }, } self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=body) def test_rebuild_personality(self): body = { "rebuild": { "imageRef": self.image_href, "personality": [{ "path": "/path/to/file", "contents": base64.b64encode("Test String"), }] }, } body = self.controller._action_rebuild(self.req, FAKE_UUID, body=body).obj self.assertNotIn('personality', body['server']) def test_start(self): self.mox.StubOutWithMock(compute_api.API, 'start') compute_api.API.start(mox.IgnoreArg(), mox.IgnoreArg()) self.mox.ReplayAll() req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(start="") self.controller._start_server(req, FAKE_UUID, body) def test_start_policy_failed(self): rules = { "os_compute_api:servers:start": "project_id:non_fake" } policy.set_rules(oslo_policy.Rules.from_dict(rules)) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(start="") exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller._start_server, req, FAKE_UUID, body) self.assertIn("os_compute_api:servers:start", exc.format_message()) def test_start_not_ready(self): self.stubs.Set(compute_api.API, 'start', fake_start_stop_not_ready) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(start="") self.assertRaises(webob.exc.HTTPConflict, self.controller._start_server, req, FAKE_UUID, body) def test_start_locked_server(self): self.stubs.Set(compute_api.API, 'start', fakes.fake_actions_to_locked_server) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(start="") self.assertRaises(webob.exc.HTTPConflict, self.controller._start_server, req, FAKE_UUID, body) def test_start_invalid(self): self.stubs.Set(compute_api.API, 'start', fake_start_stop_invalid_state) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(start="") self.assertRaises(webob.exc.HTTPConflict, self.controller._start_server, req, FAKE_UUID, body) def test_stop(self): self.mox.StubOutWithMock(compute_api.API, 'stop') compute_api.API.stop(mox.IgnoreArg(), mox.IgnoreArg()) self.mox.ReplayAll() req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(stop="") self.controller._stop_server(req, FAKE_UUID, body) def test_stop_policy_failed(self): rules = { "os_compute_api:servers:stop": "project_id:non_fake" } policy.set_rules(oslo_policy.Rules.from_dict(rules)) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(stop='') exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller._stop_server, req, FAKE_UUID, body) self.assertIn("os_compute_api:servers:stop", exc.format_message()) def test_stop_not_ready(self): self.stubs.Set(compute_api.API, 'stop', fake_start_stop_not_ready) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(stop="") self.assertRaises(webob.exc.HTTPConflict, self.controller._stop_server, req, FAKE_UUID, body) def test_stop_locked_server(self): self.stubs.Set(compute_api.API, 'stop', fakes.fake_actions_to_locked_server) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(stop="") self.assertRaises(webob.exc.HTTPConflict, self.controller._stop_server, req, FAKE_UUID, body) def test_stop_invalid_state(self): self.stubs.Set(compute_api.API, 'stop', fake_start_stop_invalid_state) req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID) body = dict(start="") self.assertRaises(webob.exc.HTTPConflict, self.controller._stop_server, req, FAKE_UUID, body) def test_start_with_bogus_id(self): self.stub_out('nova.db.instance_get_by_uuid', fake_instance_get_by_uuid_not_found) req = fakes.HTTPRequestV21.blank('/fake/servers/test_inst/action') body = dict(start="") self.assertRaises(webob.exc.HTTPNotFound, self.controller._start_server, req, 'test_inst', body) def test_stop_with_bogus_id(self): self.stub_out('nova.db.instance_get_by_uuid', fake_instance_get_by_uuid_not_found) req = fakes.HTTPRequestV21.blank('/fake/servers/test_inst/action') body = dict(stop="") self.assertRaises(webob.exc.HTTPNotFound, self.controller._stop_server, req, 'test_inst', body) class ServersControllerRebuildTestV219(ServersControllerRebuildInstanceTest): def setUp(self): super(ServersControllerRebuildTestV219, self).setUp() self.req.api_version_request = \ api_version_request.APIVersionRequest('2.19') def _rebuild_server(self, set_desc, desc): fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE, display_description=desc) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: fake_get(*a, **k)) if set_desc: self.body['rebuild']['description'] = desc self.req.body = jsonutils.dump_as_bytes(self.body) server = self.controller._action_rebuild(self.req, FAKE_UUID, body=self.body).obj['server'] self.assertEqual(server['id'], FAKE_UUID) self.assertEqual(server['description'], desc) def test_rebuild_server_with_description(self): self._rebuild_server(True, 'server desc') def test_rebuild_server_empty_description(self): self._rebuild_server(True, '') def test_rebuild_server_without_description(self): self._rebuild_server(False, '') def test_rebuild_server_remove_description(self): self._rebuild_server(True, None) def test_rebuild_server_description_too_long(self): self.body['rebuild']['description'] = 'x' * 256 self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) def test_rebuild_server_description_invalid(self): # Invalid non-printable control char in the desc. self.body['rebuild']['description'] = "123\0d456" self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller._action_rebuild, self.req, FAKE_UUID, body=self.body) class ServersControllerUpdateTest(ControllerTest): def _get_request(self, body=None, options=None): if options: fake_get = fakes.fake_compute_get(**options) self.stubs.Set(compute_api.API, 'get', lambda api, *a, **k: fake_get(*a, **k)) req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' req.content_type = 'application/json' req.body = jsonutils.dump_as_bytes(body) return req def test_update_server_all_attributes(self): body = {'server': { 'name': 'server_test', }} req = self._get_request(body, {'name': 'server_test'}) res_dict = self.controller.update(req, FAKE_UUID, body=body) self.assertEqual(res_dict['server']['id'], FAKE_UUID) self.assertEqual(res_dict['server']['name'], 'server_test') def test_update_server_name(self): body = {'server': {'name': 'server_test'}} req = self._get_request(body, {'name': 'server_test'}) res_dict = self.controller.update(req, FAKE_UUID, body=body) self.assertEqual(res_dict['server']['id'], FAKE_UUID) self.assertEqual(res_dict['server']['name'], 'server_test') def test_update_server_name_too_long(self): body = {'server': {'name': 'x' * 256}} req = self._get_request(body, {'name': 'server_test'}) self.assertRaises(exception.ValidationError, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_name_all_blank_spaces(self): self.stub_out('nova.db.instance_get', fakes.fake_instance_get(name='server_test')) req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' req.content_type = 'application/json' body = {'server': {'name': ' ' * 64}} req.body = jsonutils.dump_as_bytes(body) self.assertRaises(exception.ValidationError, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_name_with_spaces_in_the_middle(self): self.stub_out('nova.db.instance_get', fakes.fake_instance_get(name='server_test')) req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' req.content_type = 'application/json' body = {'server': {'name': 'abc def'}} req.body = jsonutils.dump_as_bytes(body) self.controller.update(req, FAKE_UUID, body=body) def test_update_server_name_with_leading_trailing_spaces(self): self.stub_out('nova.db.instance_get', fakes.fake_instance_get(name='server_test')) req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' req.content_type = 'application/json' body = {'server': {'name': ' abc def '}} req.body = jsonutils.dump_as_bytes(body) self.assertRaises(exception.ValidationError, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_name_with_leading_trailing_spaces_compat_mode(self): self.stub_out('nova.db.instance_get', fakes.fake_instance_get(name='server_test')) req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' req.content_type = 'application/json' body = {'server': {'name': ' abc def '}} req.body = jsonutils.dump_as_bytes(body) req.set_legacy_v2() self.controller.update(req, FAKE_UUID, body=body) def test_update_server_admin_password_extra_arg(self): inst_dict = dict(name='server_test', admin_password='bacon') body = dict(server=inst_dict) req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' req.content_type = "application/json" req.body = jsonutils.dump_as_bytes(body) self.assertRaises(exception.ValidationError, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_host_id(self): inst_dict = dict(host_id='123') body = dict(server=inst_dict) req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' req.content_type = "application/json" req.body = jsonutils.dump_as_bytes(body) self.assertRaises(exception.ValidationError, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_not_found(self): def fake_get(*args, **kwargs): raise exception.InstanceNotFound(instance_id='fake') self.stubs.Set(compute_api.API, 'get', fake_get) body = {'server': {'name': 'server_test'}} req = self._get_request(body) self.assertRaises(webob.exc.HTTPNotFound, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_not_found_on_update(self): def fake_update(*args, **kwargs): raise exception.InstanceNotFound(instance_id='fake') self.stub_out('nova.db.instance_update_and_get_original', fake_update) body = {'server': {'name': 'server_test'}} req = self._get_request(body) self.assertRaises(webob.exc.HTTPNotFound, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_policy_fail(self): rule = {'compute:update': 'role:admin'} policy.set_rules(oslo_policy.Rules.from_dict(rule)) body = {'server': {'name': 'server_test'}} req = self._get_request(body, {'name': 'server_test'}) self.assertRaises(exception.PolicyNotAuthorized, self.controller.update, req, FAKE_UUID, body=body) class ServersControllerTriggerCrashDumpTest(ControllerTest): def setUp(self): super(ServersControllerTriggerCrashDumpTest, self).setUp() self.instance = fakes.stub_instance_obj(None, vm_state=vm_states.ACTIVE) def fake_get(ctrl, ctxt, uuid): if uuid != FAKE_UUID: raise webob.exc.HTTPNotFound(explanation='fakeout') return self.instance self.useFixture( fixtures.MonkeyPatch('nova.api.openstack.compute.servers.' 'ServersController._get_instance', fake_get)) self.req = fakes.HTTPRequest.blank('/servers/%s/action' % FAKE_UUID) self.req.api_version_request =\ api_version_request.APIVersionRequest('2.17') self.body = dict(trigger_crash_dump=None) @mock.patch.object(compute_api.API, 'trigger_crash_dump') def test_trigger_crash_dump(self, mock_trigger_crash_dump): ctxt = self.req.environ['nova.context'] self.controller._action_trigger_crash_dump(self.req, FAKE_UUID, body=self.body) mock_trigger_crash_dump.assert_called_with(ctxt, self.instance) def test_trigger_crash_dump_policy_failed(self): rule_name = "os_compute_api:servers:trigger_crash_dump" self.policy.set_rules({rule_name: "project_id:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body=self.body) self.assertIn("os_compute_api:servers:trigger_crash_dump", exc.format_message()) @mock.patch.object(compute_api.API, 'trigger_crash_dump', fake_start_stop_not_ready) def test_trigger_crash_dump_not_ready(self): self.assertRaises(webob.exc.HTTPConflict, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body=self.body) @mock.patch.object(compute_api.API, 'trigger_crash_dump', fakes.fake_actions_to_locked_server) def test_trigger_crash_dump_locked_server(self): self.assertRaises(webob.exc.HTTPConflict, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body=self.body) @mock.patch.object(compute_api.API, 'trigger_crash_dump', fake_start_stop_invalid_state) def test_trigger_crash_dump_invalid_state(self): self.assertRaises(webob.exc.HTTPConflict, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body=self.body) def test_trigger_crash_dump_with_bogus_id(self): self.assertRaises(webob.exc.HTTPNotFound, self.controller._action_trigger_crash_dump, self.req, 'test_inst', body=self.body) def test_trigger_crash_dump_schema_invalid_type(self): self.body['trigger_crash_dump'] = 'not null' self.assertRaises(exception.ValidationError, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body=self.body) def test_trigger_crash_dump_schema_extra_property(self): self.body['extra_property'] = 'extra' self.assertRaises(exception.ValidationError, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body=self.body) @mock.patch.object(compute_api.API, 'trigger_crash_dump', side_effect=exception.TriggerCrashDumpNotSupported) def test_trigger_crash_dump_not_supported(self, mock_trigger_crash_dump): self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body=self.body) class ServersControllerUpdateTestV219(ServersControllerUpdateTest): def _get_request(self, body=None, options=None): req = super(ServersControllerUpdateTestV219, self)._get_request( body=body, options=options) req.api_version_request = api_version_request.APIVersionRequest('2.19') return req def _update_server_desc(self, set_desc, desc=None): body = {'server': {}} if set_desc: body['server']['description'] = desc req = self._get_request() res_dict = self.controller.update(req, FAKE_UUID, body=body) return res_dict def test_update_server_description(self): res_dict = self._update_server_desc(True, 'server_desc') self.assertEqual(res_dict['server']['id'], FAKE_UUID) self.assertEqual(res_dict['server']['description'], 'server_desc') def test_update_server_empty_description(self): res_dict = self._update_server_desc(True, '') self.assertEqual(res_dict['server']['id'], FAKE_UUID) self.assertEqual(res_dict['server']['description'], '') def test_update_server_without_description(self): res_dict = self._update_server_desc(False) self.assertEqual(res_dict['server']['id'], FAKE_UUID) self.assertIsNone(res_dict['server']['description']) def test_update_server_remove_description(self): res_dict = self._update_server_desc(True) self.assertEqual(res_dict['server']['id'], FAKE_UUID) self.assertIsNone(res_dict['server']['description']) def test_update_server_all_attributes(self): body = {'server': { 'name': 'server_test', 'description': 'server_desc' }} req = self._get_request(body, {'name': 'server_test'}) res_dict = self.controller.update(req, FAKE_UUID, body=body) self.assertEqual(res_dict['server']['id'], FAKE_UUID) self.assertEqual(res_dict['server']['name'], 'server_test') self.assertEqual(res_dict['server']['description'], 'server_desc') def test_update_server_description_too_long(self): body = {'server': {'description': 'x' * 256}} req = self._get_request(body, {'name': 'server_test'}) self.assertRaises(exception.ValidationError, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_description_invalid(self): # Invalid non-printable control char in the desc. body = {'server': {'description': "123\0d456"}} req = self._get_request(body, {'name': 'server_test'}) self.assertRaises(exception.ValidationError, self.controller.update, req, FAKE_UUID, body=body) class ServerStatusTest(test.TestCase): def setUp(self): super(ServerStatusTest, self).setUp() fakes.stub_out_nw_api(self) ext_info = extension_info.LoadedExtensionInfo() self.controller = servers.ServersController(extension_info=ext_info) def _get_with_state(self, vm_state, task_state=None): self.stub_out('nova.db.instance_get_by_uuid', fakes.fake_instance_get(vm_state=vm_state, task_state=task_state)) request = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID) return self.controller.show(request, FAKE_UUID) def test_active(self): response = self._get_with_state(vm_states.ACTIVE) self.assertEqual(response['server']['status'], 'ACTIVE') def test_reboot(self): response = self._get_with_state(vm_states.ACTIVE, task_states.REBOOTING) self.assertEqual(response['server']['status'], 'REBOOT') def test_reboot_hard(self): response = self._get_with_state(vm_states.ACTIVE, task_states.REBOOTING_HARD) self.assertEqual(response['server']['status'], 'HARD_REBOOT') def test_reboot_resize_policy_fail(self): def fake_get_server(context, req, id): return fakes.stub_instance(id) self.stubs.Set(self.controller, '_get_server', fake_get_server) rule = {'compute:reboot': 'role:admin'} policy.set_rules(oslo_policy.Rules.from_dict(rule)) req = fakes.HTTPRequestV21.blank('/fake/servers/1234/action') self.assertRaises(exception.PolicyNotAuthorized, self.controller._action_reboot, req, '1234', body={'reboot': {'type': 'HARD'}}) def test_rebuild(self): response = self._get_with_state(vm_states.ACTIVE, task_states.REBUILDING) self.assertEqual(response['server']['status'], 'REBUILD') def test_rebuild_error(self): response = self._get_with_state(vm_states.ERROR) self.assertEqual(response['server']['status'], 'ERROR') def test_resize(self): response = self._get_with_state(vm_states.ACTIVE, task_states.RESIZE_PREP) self.assertEqual(response['server']['status'], 'RESIZE') def test_confirm_resize_policy_fail(self): def fake_get_server(context, req, id): return fakes.stub_instance(id) self.stubs.Set(self.controller, '_get_server', fake_get_server) rule = {'compute:confirm_resize': 'role:admin'} policy.set_rules(oslo_policy.Rules.from_dict(rule)) req = fakes.HTTPRequestV21.blank('/fake/servers/1234/action') self.assertRaises(exception.PolicyNotAuthorized, self.controller._action_confirm_resize, req, '1234', {}) def test_verify_resize(self): response = self._get_with_state(vm_states.RESIZED, None) self.assertEqual(response['server']['status'], 'VERIFY_RESIZE') def test_revert_resize(self): response = self._get_with_state(vm_states.RESIZED, task_states.RESIZE_REVERTING) self.assertEqual(response['server']['status'], 'REVERT_RESIZE') def test_revert_resize_policy_fail(self): def fake_get_server(context, req, id): return fakes.stub_instance(id) self.stubs.Set(self.controller, '_get_server', fake_get_server) rule = {'compute:revert_resize': 'role:admin'} policy.set_rules(oslo_policy.Rules.from_dict(rule)) req = fakes.HTTPRequestV21.blank('/fake/servers/1234/action') self.assertRaises(exception.PolicyNotAuthorized, self.controller._action_revert_resize, req, '1234', {}) def test_password_update(self): response = self._get_with_state(vm_states.ACTIVE, task_states.UPDATING_PASSWORD) self.assertEqual(response['server']['status'], 'PASSWORD') def test_stopped(self): response = self._get_with_state(vm_states.STOPPED) self.assertEqual(response['server']['status'], 'SHUTOFF') class ServersControllerCreateTest(test.TestCase): image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' flavor_ref = 'http://localhost/123/flavors/3' def setUp(self): """Shared implementation for tests below that create instance.""" super(ServersControllerCreateTest, self).setUp() self.flags(verbose=True, enable_instance_password=True) self.instance_cache_num = 0 self.instance_cache_by_id = {} self.instance_cache_by_uuid = {} fakes.stub_out_nw_api(self) ext_info = extension_info.LoadedExtensionInfo() self.controller = servers.ServersController(extension_info=ext_info) def instance_create(context, inst): inst_type = flavors.get_flavor_by_flavor_id(3) image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' def_image_ref = 'http://localhost/fake/images/%s' % image_uuid self.instance_cache_num += 1 instance = fake_instance.fake_db_instance(**{ 'id': self.instance_cache_num, 'display_name': inst['display_name'] or 'test', 'display_description': inst['display_description'] or '', 'uuid': FAKE_UUID, 'instance_type': inst_type, 'image_ref': inst.get('image_ref', def_image_ref), 'user_id': 'fake', 'project_id': 'fake', 'reservation_id': inst['reservation_id'], "created_at": datetime.datetime(2010, 10, 10, 12, 0, 0), "updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0), "config_drive": None, "progress": 0, "fixed_ips": [], "task_state": "", "vm_state": "", "root_device_name": inst.get('root_device_name', 'vda'), }) self.instance_cache_by_id[instance['id']] = instance self.instance_cache_by_uuid[instance['uuid']] = instance return instance def instance_get(context, instance_id): """Stub for compute/api create() pulling in instance after scheduling """ return self.instance_cache_by_id[instance_id] def instance_update(context, uuid, values): instance = self.instance_cache_by_uuid[uuid] instance.update(values) return instance def server_update_and_get_original( context, instance_uuid, params, columns_to_join=None): inst = self.instance_cache_by_uuid[instance_uuid] inst.update(params) return (inst, inst) def fake_method(*args, **kwargs): pass def project_get_networks(context, user_id): return dict(id='1', host='localhost') fakes.stub_out_rate_limiting(self.stubs) fakes.stub_out_key_pair_funcs(self.stubs) fake.stub_out_image_service(self) self.stubs.Set(uuid, 'uuid4', fake_gen_uuid) self.stub_out('nova.db.project_get_networks', project_get_networks) self.stub_out('nova.db.instance_create', instance_create) self.stub_out('nova.db.instance_system_metadata_update', fake_method) self.stub_out('nova.db.instance_get', instance_get) self.stub_out('nova.db.instance_update', instance_update) self.stub_out('nova.db.instance_update_and_get_original', server_update_and_get_original) self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip', fake_method) self.body = { 'server': { 'name': 'server_test', 'imageRef': self.image_uuid, 'flavorRef': self.flavor_ref, 'metadata': { 'hello': 'world', 'open': 'stack', }, 'personality': [ { "path": "/etc/banner.txt", "contents": "MQ==", }, ], }, } self.bdm = [{'delete_on_termination': 1, 'device_name': 123, 'volume_size': 1, 'volume_id': '11111111-1111-1111-1111-111111111111'}] self.req = fakes.HTTPRequest.blank('/fake/servers') self.req.method = 'POST' self.req.headers["content-type"] = "application/json" def _check_admin_password_len(self, server_dict): """utility function - check server_dict for admin_password length.""" self.assertEqual(CONF.password_length, len(server_dict["adminPass"])) def _check_admin_password_missing(self, server_dict): """utility function - check server_dict for admin_password absence.""" self.assertNotIn("adminPass", server_dict) def _test_create_instance(self, flavor=2): image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77' self.body['server']['imageRef'] = image_uuid self.body['server']['flavorRef'] = flavor self.req.body = jsonutils.dump_as_bytes(self.body) server = self.controller.create(self.req, body=self.body).obj['server'] self._check_admin_password_len(server) self.assertEqual(FAKE_UUID, server['id']) def test_create_instance_with_none_value_port(self): self.body['server'] = {'networks': [{'port': None, 'uuid': FAKE_UUID}]} self.body['server']['name'] = 'test' self._test_create_instance() def test_create_instance_private_flavor(self): values = { 'name': 'fake_name', 'memory_mb': 512, 'vcpus': 1, 'root_gb': 10, 'ephemeral_gb': 10, 'flavorid': '1324', 'swap': 0, 'rxtx_factor': 0.5, 'vcpu_weight': 1, 'disabled': False, 'is_public': False, } db.flavor_create(context.get_admin_context(), values) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_instance, flavor=1324) def test_create_server_bad_image_href(self): image_href = 1 self.body['server']['min_count'] = 1 self.body['server']['imageRef'] = image_href, self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) # TODO(cyeoh): bp-v3-api-unittests # This needs to be ported to the os-networks extension tests # def test_create_server_with_invalid_networks_parameter(self): # self.ext_mgr.extensions = {'os-networks': 'fake'} # image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' # flavor_ref = 'http://localhost/123/flavors/3' # body = { # 'server': { # 'name': 'server_test', # 'imageRef': image_href, # 'flavorRef': flavor_ref, # 'networks': {'uuid': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'}, # } # } # req = fakes.HTTPRequest.blank('/fake/servers') # req.method = 'POST' # req.body = jsonutils.dump_as_bytes(body) # req.headers["content-type"] = "application/json" # self.assertRaises(webob.exc.HTTPBadRequest, # self.controller.create, # req, # body) def test_create_server_with_deleted_image(self): # Get the fake image service so we can set the status to deleted (image_service, image_id) = glance.get_remote_image_service( context, '') image_service.update(context, self.image_uuid, {'status': 'DELETED'}) self.addCleanup(image_service.update, context, self.image_uuid, {'status': 'active'}) self.body['server']['flavorRef'] = 2 self.req.body = jsonutils.dump_as_bytes(self.body) with testtools.ExpectedException( webob.exc.HTTPBadRequest, 'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'): self.controller.create(self.req, body=self.body) def test_create_server_image_too_large(self): # Get the fake image service so we can update the size of the image (image_service, image_id) = glance.get_remote_image_service( context, self.image_uuid) image = image_service.show(context, image_id) orig_size = image['size'] new_size = str(1000 * (1024 ** 3)) image_service.update(context, self.image_uuid, {'size': new_size}) self.addCleanup(image_service.update, context, self.image_uuid, {'size': orig_size}) self.body['server']['flavorRef'] = 2 self.req.body = jsonutils.dump_as_bytes(self.body) with testtools.ExpectedException( webob.exc.HTTPBadRequest, "Flavor's disk is too small for requested image."): self.controller.create(self.req, body=self.body) def test_create_instance_image_ref_is_bookmark(self): image_href = 'http://localhost/fake/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj server = res['server'] self.assertEqual(FAKE_UUID, server['id']) def test_create_instance_image_ref_is_invalid(self): image_uuid = 'this_is_not_a_valid_uuid' image_href = 'http://localhost/fake/images/%s' % image_uuid flavor_ref = 'http://localhost/fake/flavors/3' self.body['server']['imageRef'] = image_href self.body['server']['flavorRef'] = flavor_ref self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_no_key_pair(self): fakes.stub_out_key_pair_funcs(self.stubs, have_key_pair=False) self._test_create_instance() def _test_create_extra(self, params, no_image=False): self.body['server']['flavorRef'] = 2 if no_image: self.body['server'].pop('imageRef', None) self.body['server'].update(params) self.req.body = jsonutils.dump_as_bytes(self.body) self.req.headers["content-type"] = "application/json" self.controller.create(self.req, body=self.body).obj['server'] # TODO(cyeoh): bp-v3-api-unittests # This needs to be ported to the os-keypairs extension tests # def test_create_instance_with_keypairs_enabled(self): # self.ext_mgr.extensions = {'os-keypairs': 'fake'} # key_name = 'green' # # params = {'key_name': key_name} # old_create = compute_api.API.create # # # NOTE(sdague): key pair goes back to the database, # # so we need to stub it out for tests # def key_pair_get(context, user_id, name): # return {'public_key': 'FAKE_KEY', # 'fingerprint': 'FAKE_FINGERPRINT', # 'name': name} # # def create(*args, **kwargs): # self.assertEqual(kwargs['key_name'], key_name) # return old_create(*args, **kwargs) # # self.stub_out('nova.db.key_pair_get', key_pair_get) # self.stubs.Set(compute_api.API, 'create', create) # self._test_create_extra(params) # # TODO(cyeoh): bp-v3-api-unittests # This needs to be ported to the os-networks extension tests # def test_create_instance_with_networks_enabled(self): # self.ext_mgr.extensions = {'os-networks': 'fake'} # net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' # requested_networks = [{'uuid': net_uuid}] # params = {'networks': requested_networks} # old_create = compute_api.API.create # def create(*args, **kwargs): # result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None)] # self.assertEqual(kwargs['requested_networks'], result) # return old_create(*args, **kwargs) # self.stubs.Set(compute_api.API, 'create', create) # self._test_create_extra(params) def test_create_instance_with_port_with_no_fixed_ips(self): port_id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'port': port_id}] params = {'networks': requested_networks} def fake_create(*args, **kwargs): raise exception.PortRequiresFixedIP(port_id=port_id) self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) @mock.patch.object(compute_api.API, 'create') def test_create_instance_raise_user_data_too_large(self, mock_create): mock_create.side_effect = exception.InstanceUserDataTooLarge( maxsize=1, length=2) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_with_network_with_no_subnet(self): network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network}] params = {'networks': requested_networks} def fake_create(*args, **kwargs): raise exception.NetworkRequiresSubnet(network_uuid=network) self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) def test_create_instance_with_non_unique_secgroup_name(self): network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network}] params = {'networks': requested_networks, 'security_groups': [{'name': 'dup'}, {'name': 'dup'}]} def fake_create(*args, **kwargs): raise exception.NoUniqueMatch("No Unique match found for ...") self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPConflict, self._test_create_extra, params) def test_create_instance_secgroup_leading_trailing_spaces(self): network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network}] params = {'networks': requested_networks, 'security_groups': [{'name': ' sg '}]} self.assertRaises(exception.ValidationError, self._test_create_extra, params) def test_create_instance_secgroup_leading_trailing_spaces_compat_mode( self): network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network}] params = {'networks': requested_networks, 'security_groups': [{'name': ' sg '}]} def fake_create(*args, **kwargs): self.assertEqual([' sg '], kwargs['security_group']) return (objects.InstanceList(objects=[fakes.stub_instance_obj( self.req.environ['nova.context'])]), None) self.stubs.Set(compute_api.API, 'create', fake_create) self.req.set_legacy_v2() self._test_create_extra(params) def test_create_instance_with_networks_disabled_neutronv2(self): self.flags(use_neutron=True) net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' requested_networks = [{'uuid': net_uuid}] params = {'networks': requested_networks} old_create = compute_api.API.create def create(*args, **kwargs): result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None, None, None)] self.assertEqual(result, kwargs['requested_networks'].as_tuples()) return old_create(*args, **kwargs) self.stubs.Set(compute_api.API, 'create', create) self._test_create_extra(params) def test_create_instance_with_networks_disabled(self): net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' requested_networks = [{'uuid': net_uuid}] params = {'networks': requested_networks} old_create = compute_api.API.create def create(*args, **kwargs): self.assertIsNone(kwargs['requested_networks']) return old_create(*args, **kwargs) self.stubs.Set(compute_api.API, 'create', create) self._test_create_extra(params) def test_create_instance_with_pass_disabled(self): # test with admin passwords disabled See lp bug 921814 self.flags(enable_instance_password=False) # proper local hrefs must start with 'http://localhost/v2/' self.flags(enable_instance_password=False) image_href = 'http://localhost/v2/fake/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj server = res['server'] self._check_admin_password_missing(server) self.assertEqual(FAKE_UUID, server['id']) def test_create_instance_name_too_long(self): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['name'] = 'X' * 256 self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_name_with_spaces_in_the_middle(self): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['name'] = 'abc def' self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) self.controller.create(self.req, body=self.body) def test_create_instance_name_with_leading_trailing_spaces(self): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['name'] = ' abc def ' self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_name_with_leading_trailing_spaces_in_compat_mode( self): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['name'] = ' abc def ' self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) self.req.set_legacy_v2() self.controller.create(self.req, body=self.body) def test_create_instance_name_all_blank_spaces(self): # proper local hrefs must start with 'http://localhost/v2/' image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' image_href = 'http://localhost/v2/images/%s' % image_uuid flavor_ref = 'http://localhost/fake/flavors/3' body = { 'server': { 'name': ' ' * 64, 'imageRef': image_href, 'flavorRef': flavor_ref, 'metadata': { 'hello': 'world', 'open': 'stack', }, }, } req = fakes.HTTPRequest.blank('/fake/servers') req.method = 'POST' req.body = jsonutils.dump_as_bytes(body) req.headers["content-type"] = "application/json" self.assertRaises(exception.ValidationError, self.controller.create, req, body=body) def test_create_az_with_leading_trailing_spaces(self): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['availability_zone'] = ' zone1 ' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_az_with_leading_trailing_spaces_in_compat_mode( self): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['name'] = ' abc def ' self.body['server']['imageRef'] = image_href self.body['server']['availability_zones'] = ' zone1 ' self.req.body = jsonutils.dump_as_bytes(self.body) self.req.set_legacy_v2() with mock.patch.object(availability_zones, 'get_availability_zones', return_value=[' zone1 ']): self.controller.create(self.req, body=self.body) def test_create_instance(self): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj server = res['server'] self._check_admin_password_len(server) self.assertEqual(FAKE_UUID, server['id']) def test_create_instance_extension_create_exception(self): def fake_keypair_server_create(self, server_dict, create_kwargs): raise KeyError self.stubs.Set(keypairs.Keypairs, 'server_create', fake_keypair_server_create) # proper local hrefs must start with 'http://localhost/v2/' image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' image_href = 'http://localhost/v2/images/%s' % image_uuid flavor_ref = 'http://localhost/123/flavors/3' body = { 'server': { 'name': 'server_test', 'imageRef': image_href, 'flavorRef': flavor_ref, 'metadata': { 'hello': 'world', 'open': 'stack', }, }, } req = fakes.HTTPRequestV21.blank('/fake/servers') req.method = 'POST' req.body = jsonutils.dump_as_bytes(body) req.headers["content-type"] = "application/json" self.assertRaises(webob.exc.HTTPInternalServerError, self.controller.create, req, body=body) def test_create_instance_pass_disabled(self): self.flags(enable_instance_password=False) # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj server = res['server'] self._check_admin_password_missing(server) self.assertEqual(FAKE_UUID, server['id']) @mock.patch('nova.virt.hardware.numa_get_constraints') def _test_create_instance_numa_topology_wrong(self, exc, numa_constraints_mock): numa_constraints_mock.side_effect = exc(**{'name': None, 'cpunum': 0, 'cpumax': 0, 'cpuset': None, 'memsize': 0, 'memtotal': 0}) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_numa_topology_wrong(self): for exc in [exception.ImageNUMATopologyIncomplete, exception.ImageNUMATopologyForbidden, exception.ImageNUMATopologyAsymmetric, exception.ImageNUMATopologyCPUOutOfRange, exception.ImageNUMATopologyCPUDuplicates, exception.ImageNUMATopologyCPUsUnassigned, exception.ImageNUMATopologyMemoryOutOfRange]: self._test_create_instance_numa_topology_wrong(exc) def test_create_instance_too_much_metadata(self): self.flags(quota_metadata_items=1) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['metadata']['vote'] = 'fiddletown' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPForbidden, self.controller.create, self.req, body=self.body) def test_create_instance_metadata_key_too_long(self): self.flags(quota_metadata_items=1) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['metadata'] = {('a' * 260): '12345'} self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_metadata_value_too_long(self): self.flags(quota_metadata_items=1) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['metadata'] = {'key1': ('a' * 260)} self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_metadata_key_blank(self): self.flags(quota_metadata_items=1) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['metadata'] = {'': 'abcd'} self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_metadata_not_dict(self): self.flags(quota_metadata_items=1) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['metadata'] = 'string' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_metadata_key_not_string(self): self.flags(quota_metadata_items=1) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['metadata'] = {1: 'test'} self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_metadata_value_not_string(self): self.flags(quota_metadata_items=1) image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href self.body['server']['metadata'] = {'test': ['a', 'list']} self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_user_data_malformed_bad_request(self): params = {'user_data': 'u1234'} self.assertRaises(exception.ValidationError, self._test_create_extra, params) def test_create_instance_invalid_key_name(self): image_href = 'http://localhost/v2/images/2' self.body['server']['imageRef'] = image_href self.body['server']['key_name'] = 'nonexistentkey' self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_valid_key_name(self): self.body['server']['key_name'] = 'key' self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj self.assertEqual(FAKE_UUID, res["server"]["id"]) self._check_admin_password_len(res["server"]) def test_create_instance_invalid_flavor_href(self): image_href = 'http://localhost/v2/images/2' flavor_ref = 'http://localhost/v2/flavors/asdf' self.body['server']['imageRef'] = image_href self.body['server']['flavorRef'] = flavor_ref self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_invalid_flavor_id_int(self): image_href = 'http://localhost/v2/images/2' flavor_ref = -1 self.body['server']['imageRef'] = image_href self.body['server']['flavorRef'] = flavor_ref self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_invalid_flavor_id_empty(self): flavor_ref = "" self.body['server']['flavorRef'] = flavor_ref self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_bad_flavor_href(self): image_href = 'http://localhost/v2/images/2' flavor_ref = 'http://localhost/v2/flavors/17' self.body['server']['imageRef'] = image_href self.body['server']['flavorRef'] = flavor_ref self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_bad_href(self): image_href = 'asdf' self.body['server']['imageRef'] = image_href self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_local_href(self): self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj server = res['server'] self.assertEqual(FAKE_UUID, server['id']) def test_create_instance_admin_password(self): self.body['server']['flavorRef'] = 3 self.body['server']['adminPass'] = 'testpass' self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj server = res['server'] self.assertEqual(server['adminPass'], self.body['server']['adminPass']) def test_create_instance_admin_password_pass_disabled(self): self.flags(enable_instance_password=False) self.body['server']['flavorRef'] = 3 self.body['server']['adminPass'] = 'testpass' self.req.body = jsonutils.dump_as_bytes(self.body) res = self.controller.create(self.req, body=self.body).obj self.assertIn('server', res) self.assertIn('adminPass', self.body['server']) def test_create_instance_admin_password_empty(self): self.body['server']['flavorRef'] = 3 self.body['server']['adminPass'] = '' self.req.body = jsonutils.dump_as_bytes(self.body) # The fact that the action doesn't raise is enough validation self.controller.create(self.req, body=self.body) def test_create_location(self): selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID self.req.body = jsonutils.dump_as_bytes(self.body) robj = self.controller.create(self.req, body=self.body) self.assertEqual(robj['Location'], selfhref) def _do_test_create_instance_above_quota(self, resource, allowed, quota, expected_msg): fakes.stub_out_instance_quota(self, allowed, quota, resource) self.body['server']['flavorRef'] = 3 self.req.body = jsonutils.dump_as_bytes(self.body) try: self.controller.create(self.req, body=self.body).obj['server'] self.fail('expected quota to be exceeded') except webob.exc.HTTPForbidden as e: self.assertEqual(e.explanation, expected_msg) def test_create_instance_above_quota_instances(self): msg = ('Quota exceeded for instances: Requested 1, but' ' already used 10 of 10 instances') self._do_test_create_instance_above_quota('instances', 0, 10, msg) def test_create_instance_above_quota_ram(self): msg = ('Quota exceeded for ram: Requested 4096, but' ' already used 8192 of 10240 ram') self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg) def test_create_instance_above_quota_cores(self): msg = ('Quota exceeded for cores: Requested 2, but' ' already used 9 of 10 cores') self._do_test_create_instance_above_quota('cores', 1, 10, msg) def test_create_instance_above_quota_server_group_members(self): ctxt = self.req.environ['nova.context'] fake_group = objects.InstanceGroup(ctxt) fake_group.project_id = ctxt.project_id fake_group.user_id = ctxt.user_id fake_group.create() def fake_count(context, name, group, user_id): self.assertEqual(name, "server_group_members") self.assertEqual(group.uuid, fake_group.uuid) self.assertEqual(user_id, self.req.environ['nova.context'].user_id) return 10 def fake_limit_check(context, **kwargs): if 'server_group_members' in kwargs: raise exception.OverQuota(overs={}) def fake_instance_destroy(context, uuid, constraint): return fakes.stub_instance(1) self.stubs.Set(fakes.QUOTAS, 'count', fake_count) self.stubs.Set(fakes.QUOTAS, 'limit_check', fake_limit_check) self.stub_out('nova.db.instance_destroy', fake_instance_destroy) self.body['os:scheduler_hints'] = {'group': fake_group.uuid} self.req.body = jsonutils.dump_as_bytes(self.body) expected_msg = "Quota exceeded, too many servers in group" try: self.controller.create(self.req, body=self.body).obj self.fail('expected quota to be exceeded') except webob.exc.HTTPForbidden as e: self.assertEqual(e.explanation, expected_msg) def test_create_instance_with_group_hint(self): ctxt = self.req.environ['nova.context'] test_group = objects.InstanceGroup(ctxt) test_group.project_id = ctxt.project_id test_group.user_id = ctxt.user_id test_group.create() def fake_instance_destroy(context, uuid, constraint): return fakes.stub_instance(1) self.stub_out('nova.db.instance_destroy', fake_instance_destroy) self.body['os:scheduler_hints'] = {'group': test_group.uuid} self.req.body = jsonutils.dump_as_bytes(self.body) server = self.controller.create(self.req, body=self.body).obj['server'] test_group = objects.InstanceGroup.get_by_uuid(ctxt, test_group.uuid) self.assertIn(server['id'], test_group.members) def test_create_instance_with_group_hint_group_not_found(self): def fake_instance_destroy(context, uuid, constraint): return fakes.stub_instance(1) self.stub_out('nova.db.instance_destroy', fake_instance_destroy) self.body['os:scheduler_hints'] = { 'group': '5b674f73-c8cf-40ef-9965-3b6fe4b304b1'} self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_with_group_hint_wrong_uuid_format(self): self.body['os:scheduler_hints'] = { 'group': 'non-uuid'} self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_with_neutronv2_port_in_use(self): network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network, 'port': port}] params = {'networks': requested_networks} def fake_create(*args, **kwargs): raise exception.PortInUse(port_id=port) self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPConflict, self._test_create_extra, params) @mock.patch.object(compute_api.API, 'create') def test_create_instance_public_network_non_admin(self, mock_create): public_network_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' params = {'networks': [{'uuid': public_network_uuid}]} self.req.body = jsonutils.dump_as_bytes(self.body) mock_create.side_effect = exception.ExternalNetworkAttachForbidden( network_uuid=public_network_uuid) self.assertRaises(webob.exc.HTTPForbidden, self._test_create_extra, params) @mock.patch.object(compute_api.API, 'create') def test_create_multiple_instance_with_specified_ip_neutronv2(self, _api_mock): _api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest( reason="") network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' address = '10.0.0.1' requested_networks = [{'uuid': network, 'fixed_ip': address, 'port': port}] params = {'networks': requested_networks} self.body['server']['max_count'] = 2 self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) def test_create_multiple_instance_with_neutronv2_port(self): network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network, 'port': port}] params = {'networks': requested_networks} self.body['server']['max_count'] = 2 def fake_create(*args, **kwargs): msg = ("Unable to launch multiple instances with" " a single configured port ID. Please launch your" " instance one by one with different ports.") raise exception.MultiplePortsNotApplicable(reason=msg) self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) def test_create_instance_with_neutronv2_not_found_network(self): network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' requested_networks = [{'uuid': network}] params = {'networks': requested_networks} def fake_create(*args, **kwargs): raise exception.NetworkNotFound(network_id=network) self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) def test_create_instance_with_neturonv2_network_duplicated(self): network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' requested_networks = [{'uuid': network}, {'uuid': network}] params = {'networks': requested_networks} def fake_create(*args, **kwargs): raise exception.NetworkDuplicated(network_id=network) self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) def test_create_instance_with_neutronv2_port_not_found(self): network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee' requested_networks = [{'uuid': network, 'port': port}] params = {'networks': requested_networks} def fake_create(*args, **kwargs): raise exception.PortNotFound(port_id=port) self.stubs.Set(compute_api.API, 'create', fake_create) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) @mock.patch.object(compute_api.API, 'create') def test_create_instance_with_network_ambiguous(self, mock_create): mock_create.side_effect = exception.NetworkAmbiguous() self.assertRaises(webob.exc.HTTPConflict, self._test_create_extra, {}) @mock.patch.object(compute_api.API, 'create', side_effect=exception.InstanceExists( name='instance-name')) def test_create_instance_raise_instance_exists(self, mock_create): self.assertRaises(webob.exc.HTTPConflict, self.controller.create, self.req, body=self.body) @mock.patch.object(compute_api.API, 'create', side_effect=exception.InvalidBDMEphemeralSize) def test_create_instance_raise_invalid_bdm_ephsize(self, mock_create): self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) @mock.patch.object(compute_api.API, 'create', side_effect=exception.InvalidBDMFormat(details='')) def test_create_instance_raise_invalid_bdm_format(self, mock_create): self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) @mock.patch.object(compute_api.API, 'create', side_effect=exception.InvalidBDMSwapSize) def test_create_instance_raise_invalid_bdm_swapsize(self, mock_create): self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) @mock.patch.object(compute_api.API, 'create', side_effect=exception.InvalidBDM) def test_create_instance_raise_invalid_bdm(self, mock_create): self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) @mock.patch.object(compute_api.API, 'create', side_effect=exception.ImageBadRequest( image_id='dummy', response='dummy')) def test_create_instance_raise_image_bad_request(self, mock_create): self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) @mock.patch.object(compute_api.API, 'create', side_effect=exception.FixedIpNotFoundForAddress( address='dummy')) def test_create_instance_raise_fixed_ip_not_found_bad_request(self, mock_create): self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) @mock.patch.object(compute_api.API, 'create') def test_create_instance_invalid_personality(self, mock_create): codec = 'utf8' content = 'b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==' start_position = 19 end_position = 20 msg = 'invalid start byte' mock_create.side_effect = UnicodeDecodeError(codec, content, start_position, end_position, msg) self.body['server']['personality'] = [ { "path": "/etc/banner.txt", "contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==", }, ] self.req.body = jsonutils.dump_as_bytes(self.body) self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, self.req, body=self.body) def test_create_instance_without_personality_should_get_empty_list(self): old_create = compute_api.API.create del self.body['server']['personality'] def create(*args, **kwargs): self.assertEqual([], kwargs['injected_files']) return old_create(*args, **kwargs) self.stub_out('nova.compute.api.API.create', create) self._test_create_instance() def test_create_instance_with_extra_personality_arg(self): self.body['server']['personality'] = [ { "path": "/etc/banner.txt", "contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==", "extra_arg": "extra value" }, ] self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) class ServersControllerCreateTestV219(ServersControllerCreateTest): def _create_instance_req(self, set_desc, desc=None): # proper local hrefs must start with 'http://localhost/v2/' image_href = 'http://localhost/v2/images/%s' % self.image_uuid self.body['server']['imageRef'] = image_href if set_desc: self.body['server']['description'] = desc self.req.body = jsonutils.dump_as_bytes(self.body) self.req.api_version_request = \ api_version_request.APIVersionRequest('2.19') def test_create_instance_with_description(self): self._create_instance_req(True, 'server_desc') # The fact that the action doesn't raise is enough validation self.controller.create(self.req, body=self.body).obj def test_create_instance_with_none_description(self): self._create_instance_req(True) # The fact that the action doesn't raise is enough validation self.controller.create(self.req, body=self.body).obj def test_create_instance_with_empty_description(self): self._create_instance_req(True, '') # The fact that the action doesn't raise is enough validation self.controller.create(self.req, body=self.body).obj def test_create_instance_without_description(self): self._create_instance_req(False) # The fact that the action doesn't raise is enough validation self.controller.create(self.req, body=self.body).obj def test_create_instance_description_too_long(self): self._create_instance_req(True, 'X' * 256) self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) def test_create_instance_description_invalid(self): self._create_instance_req(True, "abc\0ddef") self.assertRaises(exception.ValidationError, self.controller.create, self.req, body=self.body) class ServersControllerCreateTestWithMock(test.TestCase): image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' flavor_ref = 'http://localhost/123/flavors/3' def setUp(self): """Shared implementation for tests below that create instance.""" super(ServersControllerCreateTestWithMock, self).setUp() self.flags(verbose=True, enable_instance_password=True) self.instance_cache_num = 0 self.instance_cache_by_id = {} self.instance_cache_by_uuid = {} ext_info = extension_info.LoadedExtensionInfo() self.controller = servers.ServersController(extension_info=ext_info) self.body = { 'server': { 'name': 'server_test', 'imageRef': self.image_uuid, 'flavorRef': self.flavor_ref, 'metadata': { 'hello': 'world', 'open': 'stack', }, }, } self.req = fakes.HTTPRequest.blank('/fake/servers') self.req.method = 'POST' self.req.headers["content-type"] = "application/json" def _test_create_extra(self, params, no_image=False): self.body['server']['flavorRef'] = 2 if no_image: self.body['server'].pop('imageRef', None) self.body['server'].update(params) self.req.body = jsonutils.dump_as_bytes(self.body) self.req.headers["content-type"] = "application/json" self.controller.create(self.req, body=self.body).obj['server'] @mock.patch.object(compute_api.API, 'create') def test_create_instance_with_neutronv2_fixed_ip_already_in_use(self, create_mock): network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' address = '10.0.2.3' requested_networks = [{'uuid': network, 'fixed_ip': address}] params = {'networks': requested_networks} create_mock.side_effect = exception.FixedIpAlreadyInUse( address=address, instance_uuid=network) self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, params) self.assertEqual(1, len(create_mock.call_args_list)) @mock.patch.object(compute_api.API, 'create') def test_create_instance_with_neutronv2_invalid_fixed_ip(self, create_mock): self.flags(use_neutron=True) network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' address = '999.0.2.3' requested_networks = [{'uuid': network, 'fixed_ip': address}] params = {'networks': requested_networks} self.assertRaises(exception.ValidationError, self._test_create_extra, params) self.assertFalse(create_mock.called) @mock.patch.object(compute_api.API, 'create', side_effect=exception.InvalidVolume(reason='error')) def test_create_instance_with_invalid_volume_error(self, create_mock): # Tests that InvalidVolume is translated to a 400 error. self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_extra, {}) class ServersViewBuilderTest(test.TestCase): def setUp(self): super(ServersViewBuilderTest, self).setUp() self.flags(use_ipv6=True) self.flags(group='glance', api_servers=['http://localhost:9292']) nw_cache_info = self._generate_nw_cache_info() db_inst = fakes.stub_instance( id=1, image_ref="5", uuid="deadbeef-feed-edee-beef-d0ea7beefedd", display_name="test_server", include_fake_metadata=False, nw_cache=nw_cache_info) privates = ['172.19.0.1'] publics = ['192.168.0.3'] public6s = ['b33f::fdee:ddff:fecc:bbaa'] def nw_info(*args, **kwargs): return [(None, {'label': 'public', 'ips': [dict(ip=ip) for ip in publics], 'ip6s': [dict(ip=ip) for ip in public6s]}), (None, {'label': 'private', 'ips': [dict(ip=ip) for ip in privates]})] fakes.stub_out_nw_api_get_instance_nw_info(self, nw_info) self.uuid = db_inst['uuid'] self.view_builder = views.servers.ViewBuilderV21() self.request = fakes.HTTPRequestV21.blank("/fake") self.request.context = context.RequestContext('fake', 'fake') self.instance = fake_instance.fake_instance_obj( self.request.context, expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS, **db_inst) self.self_link = "http://localhost/v2/fake/servers/%s" % self.uuid self.bookmark_link = "http://localhost/fake/servers/%s" % self.uuid def _generate_nw_cache_info(self): fixed_ipv4 = ('192.168.1.100', '192.168.2.100', '192.168.3.100') fixed_ipv6 = ('2001:db8:0:1::1',) def _ip(ip): return {'address': ip, 'type': 'fixed'} nw_cache = [ {'address': 'aa:aa:aa:aa:aa:aa', 'id': 1, 'network': {'bridge': 'br0', 'id': 1, 'label': 'test1', 'subnets': [{'cidr': '192.168.1.0/24', 'ips': [_ip(fixed_ipv4[0])]}, {'cidr': 'b33f::/64', 'ips': [_ip(fixed_ipv6[0])]}]}}, {'address': 'bb:bb:bb:bb:bb:bb', 'id': 2, 'network': {'bridge': 'br0', 'id': 1, 'label': 'test1', 'subnets': [{'cidr': '192.168.2.0/24', 'ips': [_ip(fixed_ipv4[1])]}]}}, {'address': 'cc:cc:cc:cc:cc:cc', 'id': 3, 'network': {'bridge': 'br0', 'id': 2, 'label': 'test2', 'subnets': [{'cidr': '192.168.3.0/24', 'ips': [_ip(fixed_ipv4[2])]}]}}] return nw_cache def test_get_flavor_valid_instance_type(self): flavor_bookmark = "http://localhost/fake/flavors/1" expected = {"id": "1", "links": [{"rel": "bookmark", "href": flavor_bookmark}]} result = self.view_builder._get_flavor(self.request, self.instance) self.assertEqual(result, expected) def test_build_server(self): expected_server = { "server": { "id": self.uuid, "name": "test_server", "links": [ { "rel": "self", "href": self.self_link, }, { "rel": "bookmark", "href": self.bookmark_link, }, ], } } output = self.view_builder.basic(self.request, self.instance) self.assertThat(output, matchers.DictMatches(expected_server)) def test_build_server_with_project_id(self): expected_server = { "server": { "id": self.uuid, "name": "test_server", "links": [ { "rel": "self", "href": self.self_link, }, { "rel": "bookmark", "href": self.bookmark_link, }, ], } } output = self.view_builder.basic(self.request, self.instance) self.assertThat(output, matchers.DictMatches(expected_server)) def test_build_server_detail(self): image_bookmark = "http://localhost/fake/images/5" flavor_bookmark = "http://localhost/fake/flavors/1" expected_server = { "server": { "id": self.uuid, "user_id": "fake_user", "tenant_id": "fake_project", "updated": "2010-11-11T11:00:00Z", "created": "2010-10-10T12:00:00Z", "progress": 0, "name": "test_server", "status": "BUILD", "hostId": '', "image": { "id": "5", "links": [ { "rel": "bookmark", "href": image_bookmark, }, ], }, "flavor": { "id": "1", "links": [ { "rel": "bookmark", "href": flavor_bookmark, }, ], }, "addresses": { 'test1': [ {'version': 4, 'addr': '192.168.1.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 6, 'addr': '2001:db8:0:1::1', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 4, 'addr': '192.168.2.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'} ], 'test2': [ {'version': 4, 'addr': '192.168.3.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'}, ] }, "metadata": {}, "links": [ { "rel": "self", "href": self.self_link, }, { "rel": "bookmark", "href": self.bookmark_link, }, ], } } output = self.view_builder.show(self.request, self.instance) self.assertThat(output, matchers.DictMatches(expected_server)) def test_build_server_detail_with_fault(self): self.instance['vm_state'] = vm_states.ERROR self.instance['fault'] = fake_instance.fake_fault_obj( self.request.context, self.uuid) image_bookmark = "http://localhost/fake/images/5" flavor_bookmark = "http://localhost/fake/flavors/1" expected_server = { "server": { "id": self.uuid, "user_id": "fake_user", "tenant_id": "fake_project", "updated": "2010-11-11T11:00:00Z", "created": "2010-10-10T12:00:00Z", "name": "test_server", "status": "ERROR", "hostId": '', "image": { "id": "5", "links": [ { "rel": "bookmark", "href": image_bookmark, }, ], }, "flavor": { "id": "1", "links": [ { "rel": "bookmark", "href": flavor_bookmark, }, ], }, "addresses": { 'test1': [ {'version': 4, 'addr': '192.168.1.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 6, 'addr': '2001:db8:0:1::1', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 4, 'addr': '192.168.2.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'} ], 'test2': [ {'version': 4, 'addr': '192.168.3.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'}, ] }, "metadata": {}, "links": [ { "rel": "self", "href": self.self_link, }, { "rel": "bookmark", "href": self.bookmark_link, }, ], "fault": { "code": 404, "created": "2010-10-10T12:00:00Z", "message": "HTTPNotFound", "details": "Stock details for test", }, } } self.request.context = context.RequestContext('fake', 'fake') output = self.view_builder.show(self.request, self.instance) self.assertThat(output, matchers.DictMatches(expected_server)) def test_build_server_detail_with_fault_that_has_been_deleted(self): self.instance['deleted'] = 1 self.instance['vm_state'] = vm_states.ERROR fault = fake_instance.fake_fault_obj(self.request.context, self.uuid, code=500, message="No valid host was found") self.instance['fault'] = fault expected_fault = {"code": 500, "created": "2010-10-10T12:00:00Z", "message": "No valid host was found"} self.request.context = context.RequestContext('fake', 'fake') output = self.view_builder.show(self.request, self.instance) # Regardless of vm_state deleted servers sholud be DELETED self.assertEqual("DELETED", output['server']['status']) self.assertThat(output['server']['fault'], matchers.DictMatches(expected_fault)) def test_build_server_detail_with_fault_no_details_not_admin(self): self.instance['vm_state'] = vm_states.ERROR self.instance['fault'] = fake_instance.fake_fault_obj( self.request.context, self.uuid, code=500, message='Error') expected_fault = {"code": 500, "created": "2010-10-10T12:00:00Z", "message": "Error"} self.request.context = context.RequestContext('fake', 'fake') output = self.view_builder.show(self.request, self.instance) self.assertThat(output['server']['fault'], matchers.DictMatches(expected_fault)) def test_build_server_detail_with_fault_admin(self): self.instance['vm_state'] = vm_states.ERROR self.instance['fault'] = fake_instance.fake_fault_obj( self.request.context, self.uuid, code=500, message='Error') expected_fault = {"code": 500, "created": "2010-10-10T12:00:00Z", "message": "Error", 'details': 'Stock details for test'} self.request.environ['nova.context'].is_admin = True output = self.view_builder.show(self.request, self.instance) self.assertThat(output['server']['fault'], matchers.DictMatches(expected_fault)) def test_build_server_detail_with_fault_no_details_admin(self): self.instance['vm_state'] = vm_states.ERROR self.instance['fault'] = fake_instance.fake_fault_obj( self.request.context, self.uuid, code=500, message='Error', details='') expected_fault = {"code": 500, "created": "2010-10-10T12:00:00Z", "message": "Error"} self.request.environ['nova.context'].is_admin = True output = self.view_builder.show(self.request, self.instance) self.assertThat(output['server']['fault'], matchers.DictMatches(expected_fault)) def test_build_server_detail_with_fault_but_active(self): self.instance['vm_state'] = vm_states.ACTIVE self.instance['progress'] = 100 self.instance['fault'] = fake_instance.fake_fault_obj( self.request.context, self.uuid) output = self.view_builder.show(self.request, self.instance) self.assertNotIn('fault', output['server']) def test_build_server_detail_active_status(self): # set the power state of the instance to running self.instance['vm_state'] = vm_states.ACTIVE self.instance['progress'] = 100 image_bookmark = "http://localhost/fake/images/5" flavor_bookmark = "http://localhost/fake/flavors/1" expected_server = { "server": { "id": self.uuid, "user_id": "fake_user", "tenant_id": "fake_project", "updated": "2010-11-11T11:00:00Z", "created": "2010-10-10T12:00:00Z", "progress": 100, "name": "test_server", "status": "ACTIVE", "hostId": '', "image": { "id": "5", "links": [ { "rel": "bookmark", "href": image_bookmark, }, ], }, "flavor": { "id": "1", "links": [ { "rel": "bookmark", "href": flavor_bookmark, }, ], }, "addresses": { 'test1': [ {'version': 4, 'addr': '192.168.1.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 6, 'addr': '2001:db8:0:1::1', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 4, 'addr': '192.168.2.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'} ], 'test2': [ {'version': 4, 'addr': '192.168.3.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'}, ] }, "metadata": {}, "links": [ { "rel": "self", "href": self.self_link, }, { "rel": "bookmark", "href": self.bookmark_link, }, ], } } output = self.view_builder.show(self.request, self.instance) self.assertThat(output, matchers.DictMatches(expected_server)) def test_build_server_detail_with_metadata(self): metadata = [] metadata.append(models.InstanceMetadata(key="Open", value="Stack")) metadata = nova_utils.metadata_to_dict(metadata) self.instance['metadata'] = metadata image_bookmark = "http://localhost/fake/images/5" flavor_bookmark = "http://localhost/fake/flavors/1" expected_server = { "server": { "id": self.uuid, "user_id": "fake_user", "tenant_id": "fake_project", "updated": "2010-11-11T11:00:00Z", "created": "2010-10-10T12:00:00Z", "progress": 0, "name": "test_server", "status": "BUILD", "hostId": '', "image": { "id": "5", "links": [ { "rel": "bookmark", "href": image_bookmark, }, ], }, "flavor": { "id": "1", "links": [ { "rel": "bookmark", "href": flavor_bookmark, }, ], }, "addresses": { 'test1': [ {'version': 4, 'addr': '192.168.1.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 6, 'addr': '2001:db8:0:1::1', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}, {'version': 4, 'addr': '192.168.2.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'} ], 'test2': [ {'version': 4, 'addr': '192.168.3.100', 'OS-EXT-IPS:type': 'fixed', 'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'}, ] }, "metadata": {"Open": "Stack"}, "links": [ { "rel": "self", "href": self.self_link, }, { "rel": "bookmark", "href": self.bookmark_link, }, ], } } output = self.view_builder.show(self.request, self.instance) self.assertThat(output, matchers.DictMatches(expected_server)) class ServersAllExtensionsTestCase(test.TestCase): """Servers tests using default API router with all extensions enabled. The intent here is to catch cases where extensions end up throwing an exception because of a malformed request before the core API gets a chance to validate the request and return a 422 response. For example, AccessIPsController extends servers.Controller:: | @wsgi.extends | def create(self, req, resp_obj, body): | context = req.environ['nova.context'] | if authorize(context) and 'server' in resp_obj.obj: | resp_obj.attach(xml=AccessIPTemplate()) | server = resp_obj.obj['server'] | self._extend_server(req, server) we want to ensure that the extension isn't barfing on an invalid body. """ def setUp(self): super(ServersAllExtensionsTestCase, self).setUp() self.app = compute.APIRouterV21() def test_create_missing_server(self): # Test create with malformed body. def fake_create(*args, **kwargs): raise test.TestingException("Should not reach the compute API.") self.stubs.Set(compute_api.API, 'create', fake_create) req = fakes.HTTPRequestV21.blank('/fake/servers') req.method = 'POST' req.content_type = 'application/json' body = {'foo': {'a': 'b'}} req.body = jsonutils.dump_as_bytes(body) res = req.get_response(self.app) self.assertEqual(400, res.status_int) def test_update_missing_server(self): # Test update with malformed body. req = fakes.HTTPRequestV21.blank('/fake/servers/1') req.method = 'PUT' req.content_type = 'application/json' body = {'foo': {'a': 'b'}} req.body = jsonutils.dump_as_bytes(body) with mock.patch('nova.objects.Instance.save') as mock_save: res = req.get_response(self.app) self.assertFalse(mock_save.called) self.assertEqual(400, res.status_int) class ServersInvalidRequestTestCase(test.TestCase): """Tests of places we throw 400 Bad Request from.""" def setUp(self): super(ServersInvalidRequestTestCase, self).setUp() ext_info = extension_info.LoadedExtensionInfo() self.controller = servers.ServersController(extension_info=ext_info) def _invalid_server_create(self, body): req = fakes.HTTPRequestV21.blank('/fake/servers') req.method = 'POST' self.assertRaises(exception.ValidationError, self.controller.create, req, body=body) def test_create_server_no_body(self): self._invalid_server_create(body=None) def test_create_server_missing_server(self): body = {'foo': {'a': 'b'}} self._invalid_server_create(body=body) def test_create_server_malformed_entity(self): body = {'server': 'string'} self._invalid_server_create(body=body) def _unprocessable_server_update(self, body): req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID) req.method = 'PUT' self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update, req, FAKE_UUID, body=body) def test_update_server_no_body(self): self._invalid_server_create(body=None) def test_update_server_missing_server(self): body = {'foo': {'a': 'b'}} self._invalid_server_create(body=body) def test_create_update_malformed_entity(self): body = {'server': 'string'} self._invalid_server_create(body=body) class FakeExt(extensions.V21APIExtensionBase): name = "DiskConfig" alias = 'os-disk-config' version = 1 fake_schema = {'fake_ext_attr': {'type': 'string'}} def fake_extension_point(self, *args, **kwargs): pass def fake_schema_extension_point(self, version): if version == '2.1' or version == '2.19': return self.fake_schema elif version == '2.0': return {} # This fake method should reuturn the schema for expected version # Return None will make the tests failed, that means there is something # in the code. return None def get_controller_extensions(self): return [] def get_resources(self): return [] class TestServersExtensionPoint(test.NoDBTestCase): def setUp(self): super(TestServersExtensionPoint, self).setUp() CONF.set_override('extensions_whitelist', ['os-disk-config'], 'osapi_v21') self.stubs.Set(disk_config, 'DiskConfig', FakeExt) def _test_load_extension_point(self, name): setattr(FakeExt, 'server_%s' % name, FakeExt.fake_extension_point) ext_info = extension_info.LoadedExtensionInfo() controller = servers.ServersController(extension_info=ext_info) self.assertEqual( 'os-disk-config', list(getattr(controller, '%s_extension_manager' % name))[0].obj.alias) delattr(FakeExt, 'server_%s' % name) def test_load_update_extension_point(self): self._test_load_extension_point('update') def test_load_rebuild_extension_point(self): self._test_load_extension_point('rebuild') def test_load_create_extension_point(self): self._test_load_extension_point('create') def test_load_resize_extension_point(self): self._test_load_extension_point('resize') class TestServersExtensionSchema(test.NoDBTestCase): def setUp(self): super(TestServersExtensionSchema, self).setUp() CONF.set_override('extensions_whitelist', ['os-disk-config'], 'osapi_v21') self.stubs.Set(disk_config, 'DiskConfig', FakeExt) def _test_load_extension_schema(self, name): setattr(FakeExt, 'get_server_%s_schema' % name, FakeExt.fake_schema_extension_point) ext_info = extension_info.LoadedExtensionInfo() controller = servers.ServersController(extension_info=ext_info) self.assertTrue(hasattr(controller, '%s_schema_manager' % name)) delattr(FakeExt, 'get_server_%s_schema' % name) return getattr(controller, 'schema_server_%s' % name) def test_load_create_extension_point(self): # The expected is the schema combination of base and keypairs # because of the above extensions_whitelist. expected_schema = copy.deepcopy(servers_schema.base_create) expected_schema['properties']['server']['properties'].update( FakeExt.fake_schema) actual_schema = self._test_load_extension_schema('create') self.assertEqual(expected_schema, actual_schema) def test_load_update_extension_point(self): # keypair extension does not contain update_server() and # here checks that any extension is not added to the schema. expected_schema = copy.deepcopy(servers_schema.base_update) expected_schema['properties']['server']['properties'].update( FakeExt.fake_schema) actual_schema = self._test_load_extension_schema('update') self.assertEqual(expected_schema, actual_schema) def test_load_rebuild_extension_point(self): # keypair extension does not contain rebuild_server() and # here checks that any extension is not added to the schema. expected_schema = copy.deepcopy(servers_schema.base_rebuild) expected_schema['properties']['rebuild']['properties'].update( FakeExt.fake_schema) actual_schema = self._test_load_extension_schema('rebuild') self.assertEqual(expected_schema, actual_schema) def test_load_resize_extension_point(self): # keypair extension does not contain resize_server() and # here checks that any extension is not added to the schema. expected_schema = copy.deepcopy(servers_schema.base_resize) expected_schema['properties']['resize']['properties'].update( FakeExt.fake_schema) actual_schema = self._test_load_extension_schema('resize') self.assertEqual(expected_schema, actual_schema) # TODO(alex_xu): There isn't specified file for ips extension. Most of # unittest related to ips extension is in this file. So put the ips policy # enforcement tests at here until there is specified file for ips extension. class IPsPolicyEnforcementV21(test.NoDBTestCase): def setUp(self): super(IPsPolicyEnforcementV21, self).setUp() self.controller = ips.IPsController() self.req = fakes.HTTPRequest.blank("/v2/fake") def test_index_policy_failed(self): rule_name = "os_compute_api:ips:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises( exception.PolicyNotAuthorized, self.controller.index, self.req, fakes.FAKE_UUID) self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_show_policy_failed(self): rule_name = "os_compute_api:ips:show" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises( exception.PolicyNotAuthorized, self.controller.show, self.req, fakes.FAKE_UUID, fakes.FAKE_UUID) self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) class ServersPolicyEnforcementV21(test.NoDBTestCase): def setUp(self): super(ServersPolicyEnforcementV21, self).setUp() ext_info = extension_info.LoadedExtensionInfo() ext_info.extensions.update({'os-networks': 'fake'}) self.controller = servers.ServersController(extension_info=ext_info) self.req = fakes.HTTPRequest.blank('') self.image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' def _common_policy_check(self, rules, rule_name, func, *arg, **kwarg): self.policy.set_rules(rules) exc = self.assertRaises( exception.PolicyNotAuthorized, func, *arg, **kwarg) self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) @mock.patch.object(servers.ServersController, '_get_instance') def test_start_policy_failed(self, _get_instance_mock): _get_instance_mock.return_value = None rule_name = "os_compute_api:servers:start" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller._start_server, self.req, FAKE_UUID, body={}) @mock.patch.object(servers.ServersController, '_get_instance') def test_stop_policy_failed(self, _get_instance_mock): _get_instance_mock.return_value = None rule_name = "os_compute_api:servers:stop" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller._stop_server, self.req, FAKE_UUID, body={}) @mock.patch.object(servers.ServersController, '_get_instance') def test_trigger_crash_dump_policy_failed(self, _get_instance_mock): _get_instance_mock.return_value = None rule_name = "os_compute_api:servers:trigger_crash_dump" rule = {rule_name: "project:non_fake"} self.req.api_version_request =\ api_version_request.APIVersionRequest('2.17') self._common_policy_check( rule, rule_name, self.controller._action_trigger_crash_dump, self.req, FAKE_UUID, body={'trigger_crash_dump': None}) def test_index_policy_failed(self): rule_name = "os_compute_api:servers:index" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller.index, self.req) def test_detail_policy_failed(self): rule_name = "os_compute_api:servers:detail" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller.detail, self.req) def test_detail_get_tenants_policy_failed(self): req = fakes.HTTPRequest.blank('') req.GET["all_tenants"] = "True" rule_name = "os_compute_api:servers:detail:get_all_tenants" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller._get_servers, req, True) def test_index_get_tenants_policy_failed(self): req = fakes.HTTPRequest.blank('') req.GET["all_tenants"] = "True" rule_name = "os_compute_api:servers:index:get_all_tenants" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller._get_servers, req, False) @mock.patch.object(common, 'get_instance') def test_show_policy_failed(self, get_instance_mock): get_instance_mock.return_value = None rule_name = "os_compute_api:servers:show" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller.show, self.req, FAKE_UUID) def test_delete_policy_failed(self): rule_name = "os_compute_api:servers:delete" rule = {rule_name: "project:non_fake"} self._common_policy_check( rule, rule_name, self.controller.delete, self.req, FAKE_UUID) def test_update_policy_failed(self): rule_name = "os_compute_api:servers:update" rule = {rule_name: "project:non_fake"} body = {'server': {'name': 'server_test'}} self._common_policy_check( rule, rule_name, self.controller.update, self.req, FAKE_UUID, body=body) def test_confirm_resize_policy_failed(self): rule_name = "os_compute_api:servers:confirm_resize" rule = {rule_name: "project:non_fake"} body = {'server': {'name': 'server_test'}} self._common_policy_check( rule, rule_name, self.controller._action_confirm_resize, self.req, FAKE_UUID, body=body) def test_revert_resize_policy_failed(self): rule_name = "os_compute_api:servers:revert_resize" rule = {rule_name: "project:non_fake"} body = {'server': {'name': 'server_test'}} self._common_policy_check( rule, rule_name, self.controller._action_revert_resize, self.req, FAKE_UUID, body=body) def test_reboot_policy_failed(self): rule_name = "os_compute_api:servers:reboot" rule = {rule_name: "project:non_fake"} body = {'reboot': {'type': 'HARD'}} self._common_policy_check( rule, rule_name, self.controller._action_reboot, self.req, FAKE_UUID, body=body) def test_resize_policy_failed(self): rule_name = "os_compute_api:servers:resize" rule = {rule_name: "project:non_fake"} flavor_id = 1 self._common_policy_check( rule, rule_name, self.controller._resize, self.req, FAKE_UUID, flavor_id) def test_create_image_policy_failed(self): rule_name = "os_compute_api:servers:create_image" rule = {rule_name: "project:non_fake"} body = { 'createImage': { 'name': 'Snapshot 1', }, } self._common_policy_check( rule, rule_name, self.controller._action_create_image, self.req, FAKE_UUID, body=body) @mock.patch.object(compute_api.API, 'is_volume_backed_instance', return_value=True) @mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid') @mock.patch.object(servers.ServersController, '_get_server') def test_create_vol_backed_img_snapshotting_policy_blocks_project(self, mock_get_server, mock_get_uuidi, mock_is_vol_back): """Don't permit a snapshot of a volume backed instance if configured not to based on project """ rule_name = "os_compute_api:servers:create_image:allow_volume_backed" rules = { rule_name: "project:non_fake", "os_compute_api:servers:create_image": "", } body = { 'createImage': { 'name': 'Snapshot 1', }, } self._common_policy_check( rules, rule_name, self.controller._action_create_image, self.req, FAKE_UUID, body=body) @mock.patch.object(compute_api.API, 'is_volume_backed_instance', return_value=True) @mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid') @mock.patch.object(servers.ServersController, '_get_server') def test_create_vol_backed_img_snapshotting_policy_blocks_role(self, mock_get_server, mock_get_uuidi, mock_is_vol_back): """Don't permit a snapshot of a volume backed instance if configured not to based on role """ rule_name = "os_compute_api:servers:create_image:allow_volume_backed" rules = { rule_name: "role:non_fake", "os_compute_api:servers:create_image": "", } body = { 'createImage': { 'name': 'Snapshot 1', }, } self._common_policy_check( rules, rule_name, self.controller._action_create_image, self.req, FAKE_UUID, body=body) def _create_policy_check(self, rules, rule_name): flavor_ref = 'http://localhost/123/flavors/3' body = { 'server': { 'name': 'server_test', 'imageRef': self.image_uuid, 'flavorRef': flavor_ref, 'availability_zone': "zone1:host1:node1", 'block_device_mapping': [{'device_name': "/dev/sda1"}], 'networks': [{'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}], 'metadata': { 'hello': 'world', 'open': 'stack', }, }, } self._common_policy_check( rules, rule_name, self.controller.create, self.req, body=body) def test_create_policy_failed(self): rule_name = "os_compute_api:servers:create" rules = {rule_name: "project:non_fake"} self._create_policy_check(rules, rule_name) def test_create_forced_host_policy_failed(self): rule_name = "os_compute_api:servers:create:forced_host" rule = {"os_compute_api:servers:create": "@", rule_name: "project:non_fake"} self._create_policy_check(rule, rule_name) def test_create_attach_volume_policy_failed(self): rule_name = "os_compute_api:servers:create:attach_volume" rules = {"os_compute_api:servers:create": "@", "os_compute_api:servers:create:forced_host": "@", rule_name: "project:non_fake"} self._create_policy_check(rules, rule_name) def test_create_attach_attach_network_policy_failed(self): rule_name = "os_compute_api:servers:create:attach_network" rules = {"os_compute_api:servers:create": "@", "os_compute_api:servers:create:forced_host": "@", "os_compute_api:servers:create:attach_volume": "@", rule_name: "project:non_fake"} self._create_policy_check(rules, rule_name)
42.921253
79
0.59235
0f772a9bb74577463772ff6fd8227033eafd59bc
4,213
py
Python
nozomi/__init__.py
thatch/nozomi
547fb5c554f79cda1b038b1500c513fbec8bee0b
[ "MIT" ]
null
null
null
nozomi/__init__.py
thatch/nozomi
547fb5c554f79cda1b038b1500c513fbec8bee0b
[ "MIT" ]
null
null
null
nozomi/__init__.py
thatch/nozomi
547fb5c554f79cda1b038b1500c513fbec8bee0b
[ "MIT" ]
null
null
null
from nozomi.ancillary.immutable import Immutable from nozomi.ancillary.configuration import Configuration from nozomi.ancillary.database_credentials import DatabaseCredentials from nozomi.ancillary.server_name import ServerName from nozomi.ancillary.file_content import FileBody from nozomi.ancillary.command_line import CommandLine from nozomi.temporal.time import NozomiTime from nozomi.temporal.date import NozomiDate from nozomi.temporal.tz_utc import TimeZoneUTC from nozomi.data.datastore import Datastore from nozomi.data.encodable import Encodable from nozomi.data.abstract_encodable import AbstractEncodable from nozomi.data.format import Format from nozomi.data.encoder import Encoder from nozomi.data.decodable import Decodable from nozomi.data.codable import Codable from nozomi.data.query import Query from nozomi.data.index_sql_conforming import IndexSQLConforming from nozomi.data.limit import Limit from nozomi.data.offset import Offset from nozomi.data.sql_conforming import SQLConforming from nozomi.data.sql_conforming import AnySQLConforming from nozomi.data.index_equitable import IndexEquitable from nozomi.data.order import Order from nozomi.data.fragment import Fragment from nozomi.data.disposition import Disposition from nozomi.data.partial_format import PartialFormat from nozomi.errors.error import NozomiError from nozomi.errors.bad_request import BadRequest from nozomi.errors.not_found import NotFound from nozomi.errors.not_authorised import NotAuthorised from nozomi.errors.not_authenticated import NotAuthenticated from nozomi.errors.already_exists import AlreadyExists from nozomi.http.headers import Headers from nozomi.http.method import HTTPMethod from nozomi.http.query_string import QueryString from nozomi.http.status_code import HTTPStatusCode from nozomi.http.parseable_data import ParseableData from nozomi.http.redirect import Redirect from nozomi.http.url_parameter import URLParameter from nozomi.http.url_parameters import URLParameters from nozomi.http.api_request import ApiRequest from nozomi.http.user_agent import UserAgent from nozomi.http.character import Character from nozomi.rendering.context import Context from nozomi.rendering.open_graph import OpenGraph from nozomi.rendering.view_template import ViewTemplate from nozomi.rendering.view.view import View from nozomi.rendering.view.base import BaseView from nozomi.rendering.view.open import OpenView from nozomi.rendering.view.secure import SecureView from nozomi.rendering.render_dependency import RenderDependency from nozomi.rendering.javascript_class import JavaScriptClass from nozomi.rendering.script import Script from nozomi.resources.open import OpenResource from nozomi.resources.resource import Resource from nozomi.resources.secure import SecureResource from nozomi.resources.internal import InternalResource from nozomi.security.agent import Agent from nozomi.security.standalone_agent import StandaloneAgent from nozomi.security.broadcastable import Broadcastable from nozomi.security.considers_perspective import ConsidersPerspective from nozomi.security.cookies import Cookies from nozomi.security.credentials import Credentials from nozomi.security.internal_key import InternalKey from nozomi.security.ip_address import IpAddress from nozomi.security.permission_record import PermissionRecord from nozomi.security.perspective import Perspective from nozomi.security.privilege import Privilege from nozomi.security.protected import Protected from nozomi.security.read_protected import ReadProtected from nozomi.security.random_number import RandomNumber from nozomi.security.salt import Salt from nozomi.security.secret import Secret from nozomi.security.cors_policy import CORSPolicy from nozomi.security.access_control import AccessControl from nozomi.security.cookie_headers import CookieHeaders from nozomi.security.abstract_session import AbstractSession from nozomi.security.request_credentials import RequestCredentials from nozomi.security.forwarded_agent import ForwardedAgent from nozomi import api from nozomi import app from nozomi.translation.language import Language from nozomi.translation.text import Text from nozomi.translation.translated import Translated
44.347368
70
0.87681
406029865a12545eedb9156d61ad30ef142e917f
15,143
py
Python
src/pykeen/triples/leakage.py
DJRavinszkha/pykeen
d79fe39f83bc2831137f22be6421b37568694cf4
[ "MIT" ]
1
2021-03-24T13:25:54.000Z
2021-03-24T13:25:54.000Z
src/pykeen/triples/leakage.py
Moon-xm/pykeen
eeaf1d623aa881c0c897772372988390e1d8302d
[ "MIT" ]
null
null
null
src/pykeen/triples/leakage.py
Moon-xm/pykeen
eeaf1d623aa881c0c897772372988390e1d8302d
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Tools for removing the leakage from datasets. Leakage is when the inverse of a given training triple appears in either the testing or validation set. This scenario generally leads to inflated and misleading evaluation because predicting an inverse triple is usually very easy and not a sign of the generalizability of a model to predict novel triples. """ import logging from collections import defaultdict from typing import Collection, Dict, Iterable, List, Mapping, Optional, Set, Tuple, TypeVar, Union import numpy import scipy.sparse import torch from pykeen.datasets.base import EagerDataset from pykeen.triples.triples_factory import TriplesFactory, cat_triples from pykeen.typing import MappedTriples from pykeen.utils import compact_mapping __all__ = [ 'Sealant', 'unleak', 'reindex', ] logger = logging.getLogger(__name__) X = TypeVar('X') Y = TypeVar('Y') def find(x: X, parent: Mapping[X, X]) -> X: # check validity if x not in parent: raise ValueError(f'Unknown element: {x}.') # path compression while parent[x] != x: x, parent[x] = parent[x], parent[parent[x]] # type: ignore return x def _get_connected_components(pairs: Iterable[Tuple[X, X]]) -> Collection[Collection[X]]: # collect connected components using union find with path compression parent: Dict[X, X] = dict() for x, y in pairs: parent.setdefault(x, x) parent.setdefault(y, y) # get representatives x = find(x=x, parent=parent) y = find(x=y, parent=parent) # already merged if x == y: continue # make x the smaller one if y < x: # type: ignore x, y = y, x # merge parent[y] = x # extract partitions result = defaultdict(list) for k, v in parent.items(): result[v].append(k) return list(result.values()) def _select_by_most_pairs( components: Collection[Collection[int]], size: Mapping[int, int], ) -> Collection[int]: """Select relations to keep with the most associated pairs.""" result: Set[int] = set() for component in components: keep = max(component, key=size.__getitem__) result.update(r for r in component if r != keep) return result def jaccard_similarity_scipy( a: scipy.sparse.spmatrix, b: scipy.sparse.spmatrix, ) -> numpy.ndarray: r"""Compute the Jaccard similarity between sets represented as sparse matrices. The similarity is computed as .. math :: J(A, B) = \frac{|A \cap B|}{|A \cup B|} = \frac{|A \cap B|}{|A| + |B| - |A \cap B|} where the intersection can be computed in one batch as matrix product. :param a: shape: (m, max_num_elements) The first sets. :param b: shape: (n, max_num_elements) The second sets. :return: shape: (m, n) The pairwise Jaccard similarity. """ sum_size = numpy.asarray(a.sum(axis=1) + b.sum(axis=1).T) intersection_size = numpy.asarray((a @ b.T).todense()) # safe division for empty sets divisor = numpy.clip(sum_size - intersection_size, a_min=1, a_max=None) return intersection_size / divisor def triples_factory_to_sparse_matrices( triples_factory: TriplesFactory, ) -> Tuple[scipy.sparse.spmatrix, scipy.sparse.spmatrix]: """Compute relation representations as sparse matrices of entity pairs. .. note :: Both sets, head-tail-set, tail-head-set, have to be created at once since they need to share the same entity pair to Id mapping. :param triples_factory: The triples factory. :return: shape: (num_relations, num_entity_pairs) head-tail-set, tail-head-set matrices as {0, 1} integer matrices. """ return mapped_triples_to_sparse_matrices( triples_factory.mapped_triples, num_relations=triples_factory.num_relations, ) def _to_one_hot( rows: torch.LongTensor, cols: torch.LongTensor, shape: Tuple[int, int], ) -> scipy.sparse.spmatrix: """Create a one-hot matrix given indices of non-zero elements (potentially containing duplicates).""" rows, cols = torch.stack([rows, cols], dim=0).unique(dim=1).numpy() values = numpy.ones(rows.shape[0], dtype=numpy.int32) return scipy.sparse.coo_matrix( (values, (rows, cols)), shape=shape, dtype=numpy.int32, ) def mapped_triples_to_sparse_matrices( mapped_triples: MappedTriples, num_relations: int, ) -> Tuple[scipy.sparse.spmatrix, scipy.sparse.spmatrix]: """Compute relation representations as sparse matrices of entity pairs. .. note :: Both sets, head-tail-set, tail-head-set, have to be created at once since they need to share the same entity pair to Id mapping. :param mapped_triples: The input triples. :param num_relations: The number of input relations :return: shape: (num_relations, num_entity_pairs) head-tail-set, tail-head-set matrices as {0, 1} integer matrices. """ num_triples = mapped_triples.shape[0] # compute unique pairs in triples *and* inverted triples for consistent pair-to-id mapping extended_mapped_triples = torch.cat( [ mapped_triples, mapped_triples.flip(-1), ], dim=0, ) pairs, pair_id = extended_mapped_triples[:, [0, 2]].unique(dim=0, return_inverse=True) n_pairs = pairs.shape[0] forward, backward = pair_id.split(num_triples) relations = mapped_triples[:, 1] rel = _to_one_hot(rows=relations, cols=forward, shape=(num_relations, n_pairs)) inv = _to_one_hot(rows=relations, cols=backward, shape=(num_relations, n_pairs)) return rel, inv def get_candidate_pairs( *, a: scipy.sparse.spmatrix, b: Optional[scipy.sparse.spmatrix] = None, threshold: float, no_self: bool = True, ) -> Set[Tuple[int, int]]: """Find pairs of sets with Jaccard similarity above threshold using :func:`jaccard_similarity_scipy`. :param a: The first set. :param b: The second set. If not specified, reuse the first set. :param threshold: The threshold above which the similarity has to be. :param no_self: Whether to exclude (i, i) pairs. :return: A set of index pairs. """ if b is None: b = a # duplicates sim = jaccard_similarity_scipy(a, b) if no_self: # we are not interested in self-similarity num = sim.shape[0] idx = numpy.arange(num) sim[idx, idx] = 0 return set(zip(*(sim >= threshold).nonzero())) class Sealant: """Stores inverse frequencies and inverse mappings in a given triples factory.""" triples_factory: TriplesFactory minimum_frequency: float inverses: Mapping[int, int] inverse_relations_to_delete: Set[int] def __init__( self, triples_factory: TriplesFactory, minimum_frequency: Optional[float] = None, symmetric: bool = True, ): """Index the inverse frequencies and the inverse relations in the triples factory. :param triples_factory: The triples factory to index. :param minimum_frequency: The minimum overlap between two relations' triples to consider them as inverses. The default value, 0.97, is taken from `Toutanova and Chen (2015) <https://www.aclweb.org/anthology/W15-4007/>`_, who originally described the generation of FB15k-237. """ self.triples_factory = triples_factory if minimum_frequency is None: minimum_frequency = 0.97 self.minimum_frequency = minimum_frequency # compute similarities if symmetric: rel, inv = triples_factory_to_sparse_matrices(triples_factory=triples_factory) self.candidate_duplicate_relations = get_candidate_pairs(a=rel, threshold=self.minimum_frequency) self.candidate_inverse_relations = get_candidate_pairs(a=rel, b=inv, threshold=self.minimum_frequency) else: raise NotImplementedError logger.info( f'identified {len(self.candidate_duplicate_relations)} candidate duplicate relationships' f' at similarity > {self.minimum_frequency} in {self.triples_factory}.', ) logger.info( f'identified {len(self.candidate_inverse_relations)} candidate inverse pairs' f' at similarity > {self.minimum_frequency} in {self.triples_factory}', ) self.candidates = set(self.candidate_duplicate_relations).union(self.candidate_inverse_relations) sizes = dict(zip(*triples_factory.mapped_triples[:, 1].unique(return_counts=True))) self.relations_to_delete = _select_by_most_pairs( size=sizes, components=_get_connected_components( (a, b) for a, b in self.candidates if a != b ), ) logger.info(f'identified {len(self.candidates)} from {self.triples_factory} to delete') def apply(self, triples_factory: TriplesFactory) -> TriplesFactory: """Make a new triples factory containing neither duplicate nor inverse relationships.""" return triples_factory.new_with_restriction(relations=self.relations_to_delete, invert_relation_selection=True) def unleak( train: TriplesFactory, *triples_factories: TriplesFactory, n: Union[None, int, float] = None, minimum_frequency: Optional[float] = None, ) -> Iterable[TriplesFactory]: """Unleak a train, test, and validate triples factory. :param train: The target triples factory :param triples_factories: All other triples factories (test, validate, etc.) :param n: Either the (integer) number of top relations to keep or the (float) percentage of top relationships to keep. If left none, frequent relations are not removed. :param minimum_frequency: The minimum overlap between two relations' triples to consider them as inverses or duplicates. The default value, 0.97, is taken from `Toutanova and Chen (2015) <https://www.aclweb.org/anthology/W15-4007/>`_, who originally described the generation of FB15k-237. """ if n is not None: frequent_relations = train.get_most_frequent_relations(n=n) logger.info(f'keeping most frequent relations from {train}') train = train.new_with_restriction(relations=frequent_relations) triples_factories = tuple( triples_factory.new_with_restriction(relations=frequent_relations) for triples_factory in triples_factories ) # Calculate which relations are the inverse ones sealant = Sealant(train, minimum_frequency=minimum_frequency) if not sealant.relations_to_delete: logger.info(f'no relations to delete identified from {train}') else: train = sealant.apply(train) triples_factories = tuple( sealant.apply(triples_factory) for triples_factory in triples_factories ) return reindex(train, *triples_factories) def _generate_compact_vectorized_lookup( ids: torch.LongTensor, label_to_id: Mapping[str, int], ) -> Tuple[Mapping[str, int], torch.LongTensor]: """ Given a tensor of IDs and a label to ID mapping, retain only occurring IDs, and compact the mapping. Additionally returns a vectorized translation, i.e. a tensor `translation` of shape (max_old_id,) with `translation[old_id] = new_id` for all translated IDs and `translation[old_id] = -1` for non-occurring IDs. This allows to use `translation[ids]` to translate the IDs as a simple integer index based lookup. :param ids: The tensor of IDs. :param label_to_id: The label to ID mapping. :return: A tuple new_label_to_id, vectorized_translation. """ # get existing IDs existing_ids = set(ids.view(-1).unique().tolist()) # remove non-existing ID from label mapping label_to_id, old_to_new_id = compact_mapping(mapping={ label: i for label, i in label_to_id.items() if i in existing_ids }) # create translation tensor translation = torch.full(size=(max(existing_ids) + 1,), fill_value=-1) for old, new in old_to_new_id.items(): translation[old] = new return label_to_id, translation def _translate_triples( triples: MappedTriples, entity_translation: torch.LongTensor, relation_translation: torch.LongTensor, ) -> MappedTriples: """ Translate triples given vectorized translations for entities and relations. :param triples: shape: (num_triples, 3) The original triples :param entity_translation: shape: (num_old_entity_ids,) The translation from old to new entity IDs. :param relation_translation: shape: (num_old_relation_ids,) The translation from old to new relation IDs. :return: shape: (num_triples, 3) The translated triples. """ triples = torch.stack( [ trans[column] for column, trans in zip( triples.t(), (entity_translation, relation_translation, entity_translation), ) ], dim=-1, ) assert (triples >= 0).all() return triples def reindex(*triples_factories: TriplesFactory) -> List[TriplesFactory]: """Reindex a set of triples factories.""" # get entities and relations occurring in triples all_triples = cat_triples(*triples_factories) # generate ID translation and new label to Id mappings one_factory = triples_factories[0] (entity_to_id, entity_id_translation), (relation_to_id, relation_id_translation) = [ _generate_compact_vectorized_lookup( ids=all_triples[:, cols], label_to_id=label_to_id, ) for cols, label_to_id in ( ([0, 2], one_factory.entity_to_id), (1, one_factory.relation_to_id), ) ] return [ TriplesFactory( entity_to_id=entity_to_id, relation_to_id=relation_to_id, mapped_triples=_translate_triples( triples=factory.mapped_triples, entity_translation=entity_id_translation, relation_translation=relation_id_translation, ), create_inverse_triples=factory.create_inverse_triples, ) for factory in triples_factories ] def _main(): """Test unleaking FB15K. Run with ``python -m pykeen.triples.leakage``. """ from pykeen.datasets import get_dataset logging.basicConfig(format='pykeen: %(message)s', level=logging.INFO) fb15k = get_dataset(dataset='fb15k') fb15k.summarize() n = 401 # magic 401 from the paper train, test, validate = unleak(fb15k.training, fb15k.testing, fb15k.validation, n=n) print() EagerDataset(train, test, validate).summarize(title='FB15k (cleaned)') fb15k237 = get_dataset(dataset='fb15k237') print('\nSummary FB15K-237') fb15k237.summarize() if __name__ == '__main__': _main()
34.652174
119
0.668824
6895e7188b65cf5cc75a6948144a93d2f50a0c35
1,310
py
Python
venv/lib/python2.7/site-packages/IPython/html/terminal/api_handlers.py
mutaihillary/mycalculator
55685dd7c968861f18ae0701129f5af2bc682d67
[ "MIT" ]
null
null
null
venv/lib/python2.7/site-packages/IPython/html/terminal/api_handlers.py
mutaihillary/mycalculator
55685dd7c968861f18ae0701129f5af2bc682d67
[ "MIT" ]
7
2021-02-08T20:22:15.000Z
2022-03-11T23:19:41.000Z
venv/lib/python2.7/site-packages/IPython/html/terminal/api_handlers.py
mutaihillary/mycalculator
55685dd7c968861f18ae0701129f5af2bc682d67
[ "MIT" ]
null
null
null
import json from tornado import web, gen from ..base.handlers import IPythonHandler, json_errors from ..utils import url_path_join class TerminalRootHandler(IPythonHandler): @web.authenticated @json_errors def get(self): tm = self.terminal_manager terms = [{'name': name} for name in tm.terminals] self.finish(json.dumps(terms)) @web.authenticated @json_errors def post(self): """POST /terminals creates a new terminal and redirects to it""" name, _ = self.terminal_manager.new_named_terminal() self.finish(json.dumps({'name': name})) class TerminalHandler(IPythonHandler): SUPPORTED_METHODS = ('GET', 'DELETE') @web.authenticated @json_errors def get(self, name): tm = self.terminal_manager if name in tm.terminals: self.finish(json.dumps({'name': name})) else: raise web.HTTPError(404, "Terminal not found: %r" % name) @web.authenticated @json_errors @gen.coroutine def delete(self, name): tm = self.terminal_manager if name in tm.terminals: yield tm.terminate(name, force=True) self.set_status(204) self.finish() else: raise web.HTTPError(404, "Terminal not found: %r" % name)
29.111111
72
0.633588
f31f9a9c649793994e7cc243217b0df8d65e9137
2,299
py
Python
main.py
adakoda/U-2-Net-http
7010a7099aea067dea5c6c904e3ea64ee9833c2e
[ "MIT" ]
1
2020-12-24T06:40:14.000Z
2020-12-24T06:40:14.000Z
main.py
adakoda/U-2-Net-http
7010a7099aea067dea5c6c904e3ea64ee9833c2e
[ "MIT" ]
null
null
null
main.py
adakoda/U-2-Net-http
7010a7099aea067dea5c6c904e3ea64ee9833c2e
[ "MIT" ]
null
null
null
import io import os import sys import cv2 import numpy as np import torch from PIL import Image from flask import Flask, request, send_file from flask_cors import CORS sys.path.append('U-2-Net') from u2net_portrait_demo import detect_single_face, crop_face, inference from model import U2NET app = Flask(__name__) CORS(app) @app.route('/', methods=['POST']) def run(): data = request.files['data'].read() pil_img = Image.open(io.BytesIO(data)) if pil_img.size[0] > 1024 or pil_img.size[1] > 1024: pil_img.thumbnail((1024, 1024)) torch.cuda.empty_cache() cfg_net = app.config['U2N_NET'] cfg_face_cascade = app.config['U2N_FACE_CASCADE'] cv_img = pil_to_cv(pil_img) cv_face = detect_single_face(cfg_face_cascade, cv_img) cv_im_face = crop_face(cv_img, cv_face) cv_im_portrait = inference(cfg_net, cv_im_face) pil_result = cv_to_pil((cv_im_portrait * 255).astype(np.uint8)) buf = io.BytesIO() pil_result.save(buf, 'PNG') buf.seek(0) return send_file(buf, mimetype='image/png') def pil_to_cv(img): new_img = np.array(img, dtype=np.uint8) if new_img.ndim == 2: pass elif new_img.shape[2] == 3: new_img = new_img[:, :, ::-1] elif new_img.shape[2] == 4: new_img = new_img[:, :, [2, 1, 0, 3]] return new_img def cv_to_pil(img): new_img = img.copy() if new_img.ndim == 2: pass elif new_img.shape[2] == 3: new_img = new_img[:, :, ::-1] elif new_img.shape[2] == 4: new_img = new_img[:, :, [2, 1, 0, 3]] new_img = Image.fromarray(new_img) return new_img if __name__ == '__main__': face_cascade = cv2.CascadeClassifier( './U-2-Net/saved_models/face_detection_cv2/haarcascade_frontalface_default.xml') model_dir = './U-2-Net/saved_models/u2net_portrait/u2net_portrait.pth' net = U2NET(3, 1) if torch.cuda.is_available(): net.load_state_dict(torch.load(model_dir)) else: net.load_state_dict(torch.load(model_dir, map_location=torch.device('cpu'))) if torch.cuda.is_available(): net.cuda() net.eval() app.config['U2N_NET'] = net app.config['U2N_FACE_CASCADE'] = face_cascade port = int(os.environ.get('PORT', 8080)) app.run(debug=True, host='0.0.0.0', port=port)
27.369048
88
0.658112
0b4534fa19c2416782b92c20b996ac75fc801f92
1,019
py
Python
__init__.py
craymichael/ArgValidation
05369edd1ad4c345f655a0e153b60ec56366d76a
[ "Apache-2.0" ]
null
null
null
__init__.py
craymichael/ArgValidation
05369edd1ad4c345f655a0e153b60ec56366d76a
[ "Apache-2.0" ]
null
null
null
__init__.py
craymichael/ArgValidation
05369edd1ad4c345f655a0e153b60ec56366d76a
[ "Apache-2.0" ]
null
null
null
# ====================================================================================================================== # Copyright 2016 Zachariah Carmichael # # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ====================================================================================================================== from ._req import req # Decorator for validation requirements from ._req import ARGS, KWARGS # Special cases from ._arg import Arg, _Arg # `Arg` validation class and abstract base
63.6875
121
0.578018
ea08a1c7cbf33927d7770ca9147509b2bee0bd15
3,299
py
Python
src/gan-dogs/model/callbacks.py
caciolai/Generative-Dog-Images-with-BigGan
535d5a9c35ad4187a6f0099c4a02772a45c4a064
[ "MIT" ]
null
null
null
src/gan-dogs/model/callbacks.py
caciolai/Generative-Dog-Images-with-BigGan
535d5a9c35ad4187a6f0099c4a02772a45c4a064
[ "MIT" ]
null
null
null
src/gan-dogs/model/callbacks.py
caciolai/Generative-Dog-Images-with-BigGan
535d5a9c35ad4187a6f0099c4a02772a45c4a064
[ "MIT" ]
null
null
null
import tensorflow as tf from keras.applications.inception_v3 import InceptionV3 from .utils import scale_images, calculate_fid, plot_imgs_grid class FIDCallback(tf.keras.callbacks.Callback): """Callback to calculate FID score during training """ def __init__(self, dataset, num_classes, period=5): super().__init__() self.period = period self.dataset = iter(dataset) self.num_classes = num_classes self.inception_model = InceptionV3( include_top=False, pooling='avg', input_shape=(299, 299, 3)) def compute_fid(self): num_classes = self.num_classes # real images real_images, real_labels = next(self.dataset) num_images = real_images.shape[0] latent_dim = self.model.latent_dim # generated images latent_samples = tf.random.truncated_normal( shape=(num_images, latent_dim)) random_labels = tf.math.floor( num_classes * tf.random.uniform((num_images, 1))) inputs = (latent_samples, random_labels) generated_images = self.model(inputs, training=False) # resize images real_images = scale_images(real_images, (299, 299, 3)) generated_images = scale_images(generated_images, (299, 299, 3)) # calculate fid fid = calculate_fid(self.inception_model, real_images, generated_images) return fid def on_epoch_end(self, epoch, logs=None): if epoch % self.period != 0: return fid = self.compute_fid() print(f"\n\n === FID: {fid} ===\n") class PlotImagesCallback(tf.keras.callbacks.Callback): """Callback to plot images during training (evaluation pass) """ def __init__(self, num_classes, n_images=16, period=10): super().__init__() self.num_classes = num_classes self.n_images = n_images self.period = period def on_epoch_end(self, epoch, logs=None): if epoch % self.period != 0: return num_classes = self.num_classes latent_dim = self.model.latent_dim latent_sample = tf.random.truncated_normal( shape=(self.n_images, latent_dim)) random_labels = tf.math.floor( num_classes * tf.random.uniform((self.n_images, 1))) inputs = (latent_sample, random_labels) imgs = self.model(inputs, training=False) plot_imgs_grid(imgs) def get_callbacks(train_dataset, checkpoint_path=None): """Return selected list of callbacks to employ during training. Args: train_dataset (tf.data.Dataset): training dataset checkpoint_path (str, optional): path where to save model training progress. Defaults to None. Returns: List[tf.keras.callbacks.Callback]: list of callbacks """ callbacks = [] if checkpoint_path: callbacks.append( tf.keras.callbacks.ModelCheckpoint( checkpoint_path, verbose=0, mode='auto', save_freq='epoch', options=None ) ) callbacks.append( FIDCallback( train_dataset, period=10 ) ) callbacks.append( PlotImagesCallback( n_images=16, period=10 ) ) return callbacks
28.686957
102
0.630494
fae60cb8a75710a31bba52135ace73cfce224429
54,401
py
Python
jamf/api/departments_api.py
jensenbox/python-jamf
85213085b1064a00375a7aa7df5e33c19f5178eb
[ "RSA-MD" ]
1
2021-04-20T15:28:57.000Z
2021-04-20T15:28:57.000Z
jamf/api/departments_api.py
jensenbox/python-jamf
85213085b1064a00375a7aa7df5e33c19f5178eb
[ "RSA-MD" ]
null
null
null
jamf/api/departments_api.py
jensenbox/python-jamf
85213085b1064a00375a7aa7df5e33c19f5178eb
[ "RSA-MD" ]
null
null
null
# coding: utf-8 """ Jamf Pro API ## Overview This is a sample Jamf Pro server which allows for usage without any authentication. The Jamf Pro environment which supports the Try it Out functionality does not run the current beta version of Jamf Pro, thus any newly added endpoints will result in an error and should be used soley for documentation purposes. # noqa: E501 The version of the OpenAPI document: 10.25.0 Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from jamf.api_client import ApiClient from jamf.exceptions import ( # noqa: F401 ApiTypeError, ApiValueError ) class DepartmentsApi(object): """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def v1_departments_delete_multiple_post(self, ids, **kwargs): # noqa: E501 """Deletes all departments by ids passed in body # noqa: E501 Deletes all departments by ids passed in body # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_delete_multiple_post(ids, async_req=True) >>> result = thread.get() :param ids: ids of departments to be deleted. pass in an array of ids (required) :type ids: Ids :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: None """ kwargs['_return_http_data_only'] = True return self.v1_departments_delete_multiple_post_with_http_info(ids, **kwargs) # noqa: E501 def v1_departments_delete_multiple_post_with_http_info(self, ids, **kwargs): # noqa: E501 """Deletes all departments by ids passed in body # noqa: E501 Deletes all departments by ids passed in body # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_delete_multiple_post_with_http_info(ids, async_req=True) >>> result = thread.get() :param ids: ids of departments to be deleted. pass in an array of ids (required) :type ids: Ids :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: None """ local_var_params = locals() all_params = [ 'ids' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_delete_multiple_post" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'ids' is set if self.api_client.client_side_validation and ('ids' not in local_var_params or # noqa: E501 local_var_params['ids'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `ids` when calling `v1_departments_delete_multiple_post`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'ids' in local_var_params: body_params = local_var_params['ids'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 response_types_map = {} return self.api_client.call_api( '/v1/departments/delete-multiple', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def v1_departments_get(self, **kwargs): # noqa: E501 """Search for Departments # noqa: E501 Search for Departments # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_get(async_req=True) >>> result = thread.get() :param page: :type page: int :param page_size: :type page_size: int :param sort: Sorting criteria in the format: property:asc/desc. Default sort is id:asc. Multiple sort criteria are supported and must be separated with a comma. Example: sort=date:desc,name:asc :type sort: list[str] :param filter: Query in the RSQL format, allowing to filter department collection. Default filter is empty query - returning all results for the requested page. Fields allowed in the query: id, name. Example: name==\"*department*\" :type filter: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: DepartmentsSearchResults """ kwargs['_return_http_data_only'] = True return self.v1_departments_get_with_http_info(**kwargs) # noqa: E501 def v1_departments_get_with_http_info(self, **kwargs): # noqa: E501 """Search for Departments # noqa: E501 Search for Departments # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_get_with_http_info(async_req=True) >>> result = thread.get() :param page: :type page: int :param page_size: :type page_size: int :param sort: Sorting criteria in the format: property:asc/desc. Default sort is id:asc. Multiple sort criteria are supported and must be separated with a comma. Example: sort=date:desc,name:asc :type sort: list[str] :param filter: Query in the RSQL format, allowing to filter department collection. Default filter is empty query - returning all results for the requested page. Fields allowed in the query: id, name. Example: name==\"*department*\" :type filter: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(DepartmentsSearchResults, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'page', 'page_size', 'sort', 'filter' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_get" % key ) local_var_params[key] = val del local_var_params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501 query_params.append(('page', local_var_params['page'])) # noqa: E501 if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 query_params.append(('page-size', local_var_params['page_size'])) # noqa: E501 if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501 query_params.append(('sort', local_var_params['sort'])) # noqa: E501 collection_formats['sort'] = 'multi' # noqa: E501 if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 query_params.append(('filter', local_var_params['filter'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 response_types_map = { 200: "DepartmentsSearchResults", } return self.api_client.call_api( '/v1/departments', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def v1_departments_id_delete(self, id, **kwargs): # noqa: E501 """Remove specified department record # noqa: E501 Removes specified department record # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_delete(id, async_req=True) >>> result = thread.get() :param id: instance id of department record (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: None """ kwargs['_return_http_data_only'] = True return self.v1_departments_id_delete_with_http_info(id, **kwargs) # noqa: E501 def v1_departments_id_delete_with_http_info(self, id, **kwargs): # noqa: E501 """Remove specified department record # noqa: E501 Removes specified department record # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_delete_with_http_info(id, async_req=True) >>> result = thread.get() :param id: instance id of department record (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: None """ local_var_params = locals() all_params = [ 'id' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_id_delete" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `v1_departments_id_delete`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 response_types_map = {} return self.api_client.call_api( '/v1/departments/{id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def v1_departments_id_get(self, id, **kwargs): # noqa: E501 """Get specified Department object # noqa: E501 Gets specified Department object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_get(id, async_req=True) >>> result = thread.get() :param id: instance id of department record (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: Department """ kwargs['_return_http_data_only'] = True return self.v1_departments_id_get_with_http_info(id, **kwargs) # noqa: E501 def v1_departments_id_get_with_http_info(self, id, **kwargs): # noqa: E501 """Get specified Department object # noqa: E501 Gets specified Department object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_get_with_http_info(id, async_req=True) >>> result = thread.get() :param id: instance id of department record (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(Department, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_id_get" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `v1_departments_id_get`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 response_types_map = { 200: "Department", 404: "ApiError", } return self.api_client.call_api( '/v1/departments/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def v1_departments_id_history_get(self, id, **kwargs): # noqa: E501 """Get specified Department history object # noqa: E501 Gets specified Department history object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_history_get(id, async_req=True) >>> result = thread.get() :param id: instance id of department history record (required) :type id: str :param page: :type page: int :param page_size: :type page_size: int :param sort: Sorting criteria in the format: property:asc/desc. Default sort is date:desc. Multiple sort criteria are supported and must be separated with a comma. Example: sort=date:desc,name:asc :type sort: list[str] :param filter: Query in the RSQL format, allowing to filter history notes collection. Default filter is empty query - returning all results for the requested page. Fields allowed in the query: username, date, note, details. This param can be combined with paging and sorting. Example: filter=username!=admin and details==*disabled* and date<2019-12-15 :type filter: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: HistorySearchResults """ kwargs['_return_http_data_only'] = True return self.v1_departments_id_history_get_with_http_info(id, **kwargs) # noqa: E501 def v1_departments_id_history_get_with_http_info(self, id, **kwargs): # noqa: E501 """Get specified Department history object # noqa: E501 Gets specified Department history object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_history_get_with_http_info(id, async_req=True) >>> result = thread.get() :param id: instance id of department history record (required) :type id: str :param page: :type page: int :param page_size: :type page_size: int :param sort: Sorting criteria in the format: property:asc/desc. Default sort is date:desc. Multiple sort criteria are supported and must be separated with a comma. Example: sort=date:desc,name:asc :type sort: list[str] :param filter: Query in the RSQL format, allowing to filter history notes collection. Default filter is empty query - returning all results for the requested page. Fields allowed in the query: username, date, note, details. This param can be combined with paging and sorting. Example: filter=username!=admin and details==*disabled* and date<2019-12-15 :type filter: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(HistorySearchResults, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id', 'page', 'page_size', 'sort', 'filter' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_id_history_get" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `v1_departments_id_history_get`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501 query_params.append(('page', local_var_params['page'])) # noqa: E501 if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 query_params.append(('page-size', local_var_params['page_size'])) # noqa: E501 if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501 query_params.append(('sort', local_var_params['sort'])) # noqa: E501 collection_formats['sort'] = 'multi' # noqa: E501 if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 query_params.append(('filter', local_var_params['filter'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 response_types_map = { 200: "HistorySearchResults", 404: "ApiError", } return self.api_client.call_api( '/v1/departments/{id}/history', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def v1_departments_id_history_post(self, id, object_history_note, **kwargs): # noqa: E501 """Add specified Department history object notes # noqa: E501 Adds specified Department history object notes # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_history_post(id, object_history_note, async_req=True) >>> result = thread.get() :param id: instance id of department history record (required) :type id: str :param object_history_note: history notes to create (required) :type object_history_note: ObjectHistoryNote :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: HrefResponse """ kwargs['_return_http_data_only'] = True return self.v1_departments_id_history_post_with_http_info(id, object_history_note, **kwargs) # noqa: E501 def v1_departments_id_history_post_with_http_info(self, id, object_history_note, **kwargs): # noqa: E501 """Add specified Department history object notes # noqa: E501 Adds specified Department history object notes # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_history_post_with_http_info(id, object_history_note, async_req=True) >>> result = thread.get() :param id: instance id of department history record (required) :type id: str :param object_history_note: history notes to create (required) :type object_history_note: ObjectHistoryNote :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(HrefResponse, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id', 'object_history_note' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_id_history_post" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `v1_departments_id_history_post`") # noqa: E501 # verify the required parameter 'object_history_note' is set if self.api_client.client_side_validation and ('object_history_note' not in local_var_params or # noqa: E501 local_var_params['object_history_note'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `object_history_note` when calling `v1_departments_id_history_post`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'object_history_note' in local_var_params: body_params = local_var_params['object_history_note'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 response_types_map = { 201: "HrefResponse", 404: "ApiError", 503: "ApiError", } return self.api_client.call_api( '/v1/departments/{id}/history', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def v1_departments_id_put(self, id, department, **kwargs): # noqa: E501 """Update specified department object # noqa: E501 Update specified department object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_put(id, department, async_req=True) >>> result = thread.get() :param id: instance id of department record (required) :type id: str :param department: department object to create. ids defined in this body will be ignored (required) :type department: Department :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: Department """ kwargs['_return_http_data_only'] = True return self.v1_departments_id_put_with_http_info(id, department, **kwargs) # noqa: E501 def v1_departments_id_put_with_http_info(self, id, department, **kwargs): # noqa: E501 """Update specified department object # noqa: E501 Update specified department object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_id_put_with_http_info(id, department, async_req=True) >>> result = thread.get() :param id: instance id of department record (required) :type id: str :param department: department object to create. ids defined in this body will be ignored (required) :type department: Department :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(Department, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id', 'department' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_id_put" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `v1_departments_id_put`") # noqa: E501 # verify the required parameter 'department' is set if self.api_client.client_side_validation and ('department' not in local_var_params or # noqa: E501 local_var_params['department'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `department` when calling `v1_departments_id_put`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'department' in local_var_params: body_params = local_var_params['department'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 response_types_map = { 200: "Department", } return self.api_client.call_api( '/v1/departments/{id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def v1_departments_post(self, department, **kwargs): # noqa: E501 """Create department record # noqa: E501 Create department record # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_post(department, async_req=True) >>> result = thread.get() :param department: department object to create. ids defined in this body will be ignored (required) :type department: Department :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: HrefResponse """ kwargs['_return_http_data_only'] = True return self.v1_departments_post_with_http_info(department, **kwargs) # noqa: E501 def v1_departments_post_with_http_info(self, department, **kwargs): # noqa: E501 """Create department record # noqa: E501 Create department record # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_departments_post_with_http_info(department, async_req=True) >>> result = thread.get() :param department: department object to create. ids defined in this body will be ignored (required) :type department: Department :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(HrefResponse, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'department' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method v1_departments_post" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'department' is set if self.api_client.client_side_validation and ('department' not in local_var_params or # noqa: E501 local_var_params['department'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `department` when calling `v1_departments_post`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'department' in local_var_params: body_params = local_var_params['department'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 response_types_map = { 201: "HrefResponse", } return self.api_client.call_api( '/v1/departments', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth'))
45.371977
359
0.599401
faa2597f1837886b608f6380b4bbc0f0d8ce5c9d
830
py
Python
pytype/tools/xref/testdata/attr.py
jjedele/pytype
3c5d920d26ac583bdfd68080e7db454ecb1dc900
[ "Apache-2.0" ]
null
null
null
pytype/tools/xref/testdata/attr.py
jjedele/pytype
3c5d920d26ac583bdfd68080e7db454ecb1dc900
[ "Apache-2.0" ]
null
null
null
pytype/tools/xref/testdata/attr.py
jjedele/pytype
3c5d920d26ac583bdfd68080e7db454ecb1dc900
[ "Apache-2.0" ]
null
null
null
# pylint: skip-file #- @A defines/binding ClassA #- ClassA.node/kind class class A(object): #- @__init__ defines/binding FnInit #- @self defines/binding ArgSelf #- FnInit.node/kind function #- FnInit param.0 ArgSelf def __init__(self): #- @self ref ArgSelf #- @foo defines/binding AttrFoo self.foo = [] def f(self, x): #- @foo ref AttrFoo self.foo[x] = 10 ## The attr can be initialised somewhere other than __init__ #- @B defines/binding ClassB #- ClassB.node/kind class class B(object): def f(self, x): #- @bar ref AttrBar self.bar[x] #- @init_bar defines/binding FnInitBar #- @self defines/binding ArgBSelf #- FnInitBar.node/kind function #- FnInitBar param.0 ArgBSelf def init_bar(self): #- @self ref ArgBSelf #- @bar defines/binding AttrBar self.bar = []
22.432432
60
0.661446
dfb4bd93b9bebe7aa48b9b232c986866b59182de
5,143
py
Python
QuickDrawApp.py
VikasChowdary/QuickDraw
5eb201c8a59162961878289ebeefd13aaa6159e5
[ "MIT" ]
null
null
null
QuickDrawApp.py
VikasChowdary/QuickDraw
5eb201c8a59162961878289ebeefd13aaa6159e5
[ "MIT" ]
null
null
null
QuickDrawApp.py
VikasChowdary/QuickDraw
5eb201c8a59162961878289ebeefd13aaa6159e5
[ "MIT" ]
null
null
null
import cv2 from keras.models import load_model import numpy as np from collections import deque import os model = load_model('QuickDraw.h5') def main(): emojis = get_QD_emojis() cap = cv2.VideoCapture(0) Lower_green = np.array([110, 50, 50]) Upper_green = np.array([130, 255, 255]) pts = deque(maxlen=512) blackboard = np.zeros((480, 640, 3), dtype=np.uint8) digit = np.zeros((200, 200, 3), dtype=np.uint8) pred_class = 0 while (cap.isOpened()): ret, img = cap.read() img = cv2.flip(img, 1) hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) kernel = np.ones((5, 5), np.uint8) mask = cv2.inRange(hsv, Lower_green, Upper_green) mask = cv2.erode(mask, kernel, iterations=2) mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel) #mask=cv2.morphologyEx(mask,cv2.MORPH_CLOSE,kernel) mask = cv2.dilate(mask, kernel, iterations=1) res = cv2.bitwise_and(img, img, mask=mask) cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] center = None if len(cnts) >= 1: cnt = max(cnts, key=cv2.contourArea) if cv2.contourArea(cnt) > 200: ((x, y), radius) = cv2.minEnclosingCircle(cnt) cv2.circle(img, (int(x), int(y)), int(radius), (0, 255, 255), 2) cv2.circle(img, center, 5, (0, 0, 255), -1) M = cv2.moments(cnt) center = (int(M['m10'] / M['m00']), int(M['m01'] / M['m00'])) pts.appendleft(center) for i in range(1, len(pts)): if pts[i - 1] is None or pts[i] is None: continue cv2.line(blackboard, pts[i - 1], pts[i], (255, 255, 255), 7) cv2.line(img, pts[i - 1], pts[i], (0, 0, 255), 2) elif len(cnts) == 0: if len(pts) != []: blackboard_gray = cv2.cvtColor(blackboard, cv2.COLOR_BGR2GRAY) blur1 = cv2.medianBlur(blackboard_gray, 15) blur1 = cv2.GaussianBlur(blur1, (5, 5), 0) thresh1 = cv2.threshold(blur1, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)[1] blackboard_cnts = cv2.findContours(thresh1.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)[-2] if len(blackboard_cnts) >= 1: cnt = max(blackboard_cnts, key=cv2.contourArea) print(cv2.contourArea(cnt)) if cv2.contourArea(cnt) > 200: x, y, w, h = cv2.boundingRect(cnt) digit = blackboard_gray[y:y + h, x:x + w] pred_probab, pred_class = keras_predict(model, digit) print(pred_class, pred_probab) pts = deque(maxlen=512) blackboard = np.zeros((480, 640, 3), dtype=np.uint8) img = overlay(img, emojis[pred_class], 400, 250, 100, 100) cv2.imshow("Draw_Here", img) k = cv2.waitKey(10) if k == 27: break def keras_predict(model, image): processed = keras_process_image(image) print("processed: " + str(processed.shape)) pred_probab = model.predict(processed)[0] pred_class = list(pred_probab).index(max(pred_probab)) return max(pred_probab), pred_class def keras_process_image(img): image_x = 28 image_y = 28 img = cv2.resize(img, (image_x, image_y)) img = np.array(img, dtype=np.float32) img = np.reshape(img, (-1, image_x, image_y, 1)) return img def get_QD_emojis(): emojis_folder = 'E:\shristi_QuickDraw\QuickDraw-master\qd_emo/' emojis = [] for emoji in range(len(os.listdir(emojis_folder))): print(emoji) emojis.append(cv2.imread(emojis_folder + str(emoji) + '.png', -1)) return emojis def overlay(image, emoji, x, y, w, h): emoji = cv2.resize(emoji, (w, h)) try: image[y:y + h, x:x + w] = blend_transparent(image[y:y + h, x:x + w], emoji) except: pass return image def blend_transparent(face_img, overlay_t_img): # Split out the transparency mask from the colour info overlay_img = overlay_t_img[:, :, :3] # Grab the BRG planes overlay_mask = overlay_t_img[:, :, 3:] # And the alpha plane # Again calculate the inverse mask background_mask = 255 - overlay_mask # Turn the masks into three channel, so we can use them as weights overlay_mask = cv2.cvtColor(overlay_mask, cv2.COLOR_GRAY2BGR) background_mask = cv2.cvtColor(background_mask, cv2.COLOR_GRAY2BGR) # Create a masked out face image, and masked out overlay # We convert the images to floating point in range 0.0 - 1.0 face_part = (face_img * (1 / 255.0)) * (background_mask * (1 / 255.0)) overlay_part = (overlay_img * (1 / 255.0)) * (overlay_mask * (1 / 255.0)) # And finally just add them together, and rescale it back to an 8bit integer image return np.uint8(cv2.addWeighted(face_part, 255.0, overlay_part, 255.0, 0.0)) keras_predict(model, np.zeros((50, 50, 1), dtype=np.uint8)) if __name__ == '__main__': main()
38.669173
108
0.590317
6dbf97240e66718d72732428fec9a50525ac08f1
1,478
py
Python
sympy/plotting/pygletplot/plot_rotation.py
ovolve/sympy
0a15782f20505673466b940454b33b8014a25c13
[ "BSD-3-Clause" ]
8
2019-05-29T09:38:30.000Z
2021-01-20T03:36:59.000Z
sympy/plotting/pygletplot/plot_rotation.py
ovolve/sympy
0a15782f20505673466b940454b33b8014a25c13
[ "BSD-3-Clause" ]
12
2021-03-09T03:01:16.000Z
2022-03-11T23:59:36.000Z
sympy/plotting/pygletplot/plot_rotation.py
ovolve/sympy
0a15782f20505673466b940454b33b8014a25c13
[ "BSD-3-Clause" ]
1
2018-10-22T09:17:11.000Z
2018-10-22T09:17:11.000Z
from __future__ import print_function, division try: from pyglet.gl.gl import c_float except ImportError: pass from pyglet.gl import * from math import sqrt as _sqrt, acos as _acos def cross(a, b): return (a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0]) def dot(a, b): return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] def mag(a): return _sqrt(a[0]**2 + a[1]**2 + a[2]**2) def norm(a): m = mag(a) return (a[0] / m, a[1] / m, a[2] / m) def get_sphere_mapping(x, y, width, height): x = min([max([x, 0]), width]) y = min([max([y, 0]), height]) sr = _sqrt((width/2)**2 + (height/2)**2) sx = ((x - width / 2) / sr) sy = ((y - height / 2) / sr) sz = 1.0 - sx**2 - sy**2 if sz > 0.0: sz = _sqrt(sz) return (sx, sy, sz) else: sz = 0 return norm((sx, sy, sz)) rad2deg = 180.0 / 3.141592 def get_spherical_rotatation(p1, p2, width, height, theta_multiplier): v1 = get_sphere_mapping(p1[0], p1[1], width, height) v2 = get_sphere_mapping(p2[0], p2[1], width, height) d = min(max([dot(v1, v2), -1]), 1) if abs(d - 1.0) < 0.000001: return None raxis = norm( cross(v1, v2) ) rtheta = theta_multiplier * rad2deg * _acos(d) glPushMatrix() glLoadIdentity() glRotatef(rtheta, *raxis) mat = (c_float*16)() glGetFloatv(GL_MODELVIEW_MATRIX, mat) glPopMatrix() return mat
20.816901
70
0.535183
d5102a78079166d4e98094c0f0b03a735563a60e
3,934
py
Python
scripts/score_links4.py
aakashsur/docker-hirise
9b97cc4e7522e287aa2ee39c2993270e75b43a6d
[ "MIT" ]
null
null
null
scripts/score_links4.py
aakashsur/docker-hirise
9b97cc4e7522e287aa2ee39c2993270e75b43a6d
[ "MIT" ]
null
null
null
scripts/score_links4.py
aakashsur/docker-hirise
9b97cc4e7522e287aa2ee39c2993270e75b43a6d
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # # Copyright 2015 Dovetail Genomics LLC # # from __future__ import print_function from builtins import str from builtins import map import sys import networkx as nx import chicago_edge_scores as ces import numpy as np from scipy.stats import poisson import math #G=3.0e9 N=100000000.0 pn=0.3 G=3000000000.0 import math if __name__=="__main__": import sys import argparse parser = argparse.ArgumentParser() # parser.add_argument('-t','--threshold',default=0.0 , type=float) # parser.add_argument('-b','--besthits') parser.add_argument('-d','--debug',default=False , action='store_true') # parser.add_argument('-P','--plot',default=False , action='store_true') parser.add_argument('-M','--set_insert_size_dist_fit_params',default=False ) # parser.add_argument('-G','--maxTrueGap',type=int,default=False) parser.add_argument('-S','--scoreDelta',type=float,default=2.0) parser.add_argument('-p','--pvalue',type=float,default=0.000001) parser.add_argument('-E','--endwindow',type=float,default=False,help="Ignore links where either read is burried more than endwindow bases from the end of its mapped contig.") parser.add_argument('-N','--maxN',type=int,default=False) parser.add_argument('-L','--minlen',type=int,default=1000) args = parser.parse_args() ces.debug=args.debug fmodel=open( args.set_insert_size_dist_fit_params ) contents = fmodel.read() try: fit_params=eval(contents) except: "couldn't deal with option", args.param fmodel.close ces.set_exp_insert_size_dist_fit_params(fit_params) # besthit={} # if args.besthits: # besthit={} # if args.besthits: # f = open(args.besthits) # while True: # l = f.readline() # if not l: break # if not l[:5]=="best:": continue # c=l.strip().split() # besthit[c[1]]=c[2:] # print c[1],besthit[c[1]] # f.close() # if args.progress: print("#Done reading besthits") def ppf_cached(y,cache={}): x=round(y,4) if x==0: return(poisson.ppf(0.99999999,y)) if x in cache: return cache[x] ppf=poisson.ppf(0.99999999,x) if np.isnan(ppf) or ppf==np.inf: print("wtf:",y,x) cache[x]=max(ppf,1) return ppf def ppf_mtc(y,N,cache={}): pp=(1.0-(args.pvalue/N)) if pp==1.0: pp=1.0-np.finfo(float).resolution x=round(y,4) if x==0: ppf=poisson.ppf(pp,y) if np.isnan(ppf) or np.isinf(ppf): print("wtf:",y,x,ppf,(1.0-(args.pvalue/N)),N) return ppf if (x) in cache: return cache[x] ppf=poisson.ppf(pp,x) cache[x]=max(ppf,1) return ppf n_done=0 G2=(ces.model.G*ces.model.G) while (not args.maxN) or n_done<args.maxN: l=sys.stdin.readline() if not l: break if l[0]=="#": continue c = l.strip().split("\t") s1,s2,l1,l2,n = c[0],c[1],int(c[2]),int(c[3]),int(c[4]) if l1<args.minlen : continue if l2<args.minlen : continue if args.endwindow and ((l1>args.endwindow*2) or (l2>args.endwindow*2)) : links = eval( " ".join(c[5:]) ) n=0 for x,y in links: if not (x<args.endwindow or (l1-x)<args.endwindow): continue if not (y<args.endwindow or (l2-y)<args.endwindow): continue n+=1 l1 = min(l1,2*args.endwindow) l2 = min(l2,2*args.endwindow) n_done+=1 n_bar0= ces.model.N*ces.model.pn*l1*l2*2/G2 # n=len(links) # ppf=ppf_cached(n_bar0) N=int((G2/(l1*l2))) ppf=ppf_mtc(n_bar0,N) if (n-ppf)>=args.scoreDelta: print("\t".join(map(str,[s1,s2,n,ppf,round(n_bar0,4),l1,l2,N])))
31.472
178
0.579563
f2a72bf6750d5ef8930cc4514b3aa5473ca75a19
1,233
py
Python
scenarios/data_access/client1/submit_job.py
SecConNet/proof_of_concept
80f6b27ff6b97796803e554387ca2881a792be79
[ "Apache-2.0" ]
4
2021-03-26T09:17:51.000Z
2021-05-17T10:31:59.000Z
scenarios/data_access/client1/submit_job.py
SecConNet/proof_of_concept
80f6b27ff6b97796803e554387ca2881a792be79
[ "Apache-2.0" ]
58
2020-03-02T10:02:51.000Z
2021-07-09T09:23:49.000Z
scenarios/data_access/client1/submit_job.py
SecConNet/proof_of_concept
80f6b27ff6b97796803e554387ca2881a792be79
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 from mahiru.definitions.workflows import Job, Workflow, WorkflowStep from mahiru.rest.internal_client import InternalSiteRestClient if __name__ == '__main__': # create single-step workflow workflow = Workflow( ['input'], {'result': 'compute.output0'}, [ WorkflowStep( name='compute', inputs={'input0': 'input'}, outputs={'output0': 'asset:party1_ns:da.data.output_base' ':party1_ns:site1'}, compute_asset_id=( 'asset:party1_ns:da.software.script1' ':party1_ns:site1')) ] ) inputs = {'input': 'asset:party2_ns:da.data.input:party2_ns:site2'} # run workflow client = InternalSiteRestClient( 'site:party1_ns:site1', 'http://site1:1080') print('Submitting job...') job_id = client.submit_job(Job(workflow, inputs)) print(f'Submitted, waiting for result at {job_id}') result = client.get_job_result(job_id) print(f'Job complete:') print(f'Job: {result.job}') print(f'Plan: {result.plan}') print(f'Outputs: {result.outputs}')
33.324324
71
0.570965
5d57e62b0e4b15d28881b6c1785e621ec7c25bd2
3,298
py
Python
sdk/python/pulumi_azure_native/azureactivedirectory/v20190101preview/outputs.py
pulumi-bot/pulumi-azure-native
f7b9490b5211544318e455e5cceafe47b628e12c
[ "Apache-2.0" ]
31
2020-09-21T09:41:01.000Z
2021-02-26T13:21:59.000Z
sdk/python/pulumi_azure_native/azureactivedirectory/v20190101preview/outputs.py
pulumi-bot/pulumi-azure-native
f7b9490b5211544318e455e5cceafe47b628e12c
[ "Apache-2.0" ]
231
2020-09-21T09:38:45.000Z
2021-03-01T11:16:03.000Z
sdk/python/pulumi_azure_native/azureactivedirectory/v20190101preview/outputs.py
pulumi-bot/pulumi-azure-native
f7b9490b5211544318e455e5cceafe47b628e12c
[ "Apache-2.0" ]
4
2020-09-29T14:14:59.000Z
2021-02-10T20:38:16.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from ._enums import * __all__ = [ 'B2CResourceSKUResponse', 'B2CTenantResourcePropertiesResponseBillingConfig', ] @pulumi.output_type class B2CResourceSKUResponse(dict): """ SKU properties of the Azure AD B2C tenant. Learn more about Azure AD B2C billing at [aka.ms/b2cBilling](https://aka.ms/b2cBilling). """ def __init__(__self__, *, name: Optional[str] = None, tier: Optional[str] = None): """ SKU properties of the Azure AD B2C tenant. Learn more about Azure AD B2C billing at [aka.ms/b2cBilling](https://aka.ms/b2cBilling). :param str name: The name of the SKU for the tenant. :param str tier: The tier of the tenant. """ if name is not None: pulumi.set(__self__, "name", name) if tier is not None: pulumi.set(__self__, "tier", tier) @property @pulumi.getter def name(self) -> Optional[str]: """ The name of the SKU for the tenant. """ return pulumi.get(self, "name") @property @pulumi.getter def tier(self) -> Optional[str]: """ The tier of the tenant. """ return pulumi.get(self, "tier") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class B2CTenantResourcePropertiesResponseBillingConfig(dict): """ The billing configuration for the tenant. """ def __init__(__self__, *, effective_start_date_utc: str, billing_type: Optional[str] = None): """ The billing configuration for the tenant. :param str effective_start_date_utc: The data from which the billing type took effect :param str billing_type: The type of billing. Will be MAU for all new customers. If 'Auths', it can be updated to 'MAU'. Cannot be changed if value is 'MAU'. Learn more about Azure AD B2C billing at [aka.ms/b2cBilling](https://aka.ms/b2cbilling). """ pulumi.set(__self__, "effective_start_date_utc", effective_start_date_utc) if billing_type is not None: pulumi.set(__self__, "billing_type", billing_type) @property @pulumi.getter(name="effectiveStartDateUtc") def effective_start_date_utc(self) -> str: """ The data from which the billing type took effect """ return pulumi.get(self, "effective_start_date_utc") @property @pulumi.getter(name="billingType") def billing_type(self) -> Optional[str]: """ The type of billing. Will be MAU for all new customers. If 'Auths', it can be updated to 'MAU'. Cannot be changed if value is 'MAU'. Learn more about Azure AD B2C billing at [aka.ms/b2cBilling](https://aka.ms/b2cbilling). """ return pulumi.get(self, "billing_type") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
35.847826
254
0.650697
74ddedbee4ecd450a3a54576df5b2e63fc8e5d62
3,676
py
Python
microbenchmarks/re_split_ubench.py
aisk/pyston
ac69cfef0621dbc8901175e84fa2b5cb5781a646
[ "BSD-2-Clause", "Apache-2.0" ]
1
2020-02-06T14:28:45.000Z
2020-02-06T14:28:45.000Z
microbenchmarks/re_split_ubench.py
aisk/pyston
ac69cfef0621dbc8901175e84fa2b5cb5781a646
[ "BSD-2-Clause", "Apache-2.0" ]
null
null
null
microbenchmarks/re_split_ubench.py
aisk/pyston
ac69cfef0621dbc8901175e84fa2b5cb5781a646
[ "BSD-2-Clause", "Apache-2.0" ]
1
2020-02-06T14:29:00.000Z
2020-02-06T14:29:00.000Z
import re FILTER_SEPARATOR = '|' FILTER_ARGUMENT_SEPARATOR = ':' VARIABLE_ATTRIBUTE_SEPARATOR = '.' BLOCK_TAG_START = '{%' BLOCK_TAG_END = '%}' VARIABLE_TAG_START = '{{' VARIABLE_TAG_END = '}}' COMMENT_TAG_START = '{#' COMMENT_TAG_END = '#}' TRANSLATOR_COMMENT_MARK = 'Translators' SINGLE_BRACE_START = '{' SINGLE_BRACE_END = '}' tag_re = (re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END), re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END), re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END)))) template_source = """ {% extends "admin/base_site.html" %} {% load i18n admin_static %} {% block extrastyle %}{{ block.super }}<link rel="stylesheet" type="text/css" href="{% static "admin/css/dashboard.css" %}" />{% endblock %} {% block coltype %}colMS{% endblock %} {% block bodyclass %}{{ block.super }} dashboard{% endblock %} {% block breadcrumbs %}{% endblock %} {% block content %} <div id="content-main"> {% if app_list %} {% for app in app_list %} <div class="app-{{ app.app_label }} module"> <table> <caption> <a href="{{ app.app_url }}" class="section" title="{% blocktrans with name=app.name %}Models in the {{ name }} application{% endblocktrans %}">{{ app.name }}</a> </caption> {% for model in app.models %} <tr class="model-{{ model.object_name|lower }}"> {% if model.admin_url %} <th scope="row"><a href="{{ model.admin_url }}">{{ model.name }}</a></th> {% else %} <th scope="row">{{ model.name }}</th> {% endif %} {% if model.add_url %} <td><a href="{{ model.add_url }}" class="addlink">{% trans 'Add' %}</a></td> {% else %} <td>&nbsp;</td> {% endif %} {% if model.admin_url %} <td><a href="{{ model.admin_url }}" class="changelink">{% trans 'Change' %}</a></td> {% else %} <td>&nbsp;</td> {% endif %} </tr> {% endfor %} </table> </div> {% endfor %} {% else %} <p>{% trans "You don't have permission to edit anything." %}</p> {% endif %} </div> {% endblock %} {% block sidebar %} <div id="content-related"> <div class="module" id="recent-actions-module"> <h2>{% trans 'Recent Actions' %}</h2> <h3>{% trans 'My Actions' %}</h3> {% load log %} {% get_admin_log 10 as admin_log for_user user %} {% if not admin_log %} <p>{% trans 'None available' %}</p> {% else %} <ul class="actionlist"> {% for entry in admin_log %} <li class="{% if entry.is_addition %}addlink{% endif %}{% if entry.is_change %}changelink{% endif %}{% if entry.is_deletion %}deletelink{% endif %}"> {% if entry.is_deletion or not entry.get_admin_url %} {{ entry.object_repr }} {% else %} <a href="{{ entry.get_admin_url }}">{{ entry.object_repr }}</a> {% endif %} <br/> {% if entry.content_type %} <span class="mini quiet">{% filter capfirst %}{% trans entry.content_type.name %}{% endfilter %}</span> {% else %} <span class="mini quiet">{% trans 'Unknown content' %}</span> {% endif %} </li> {% endfor %} </ul> {% endif %} </div> </div> {% endblock %} """ for i in xrange(30000): tag_re.split(template_source)
34.037037
173
0.509793
7cec8c189539bd868c3c46be87696774d5a16518
347
py
Python
erpnext_shipping/config/docs.py
arahimfahim40/erpnext_shipping1
c86bfe865cff31ec3e6f54f0acbe57def42d7a06
[ "MIT" ]
3
2021-06-22T01:36:22.000Z
2022-02-26T00:12:19.000Z
erpnext_shipping/config/docs.py
arahimfahim40/erpnext_shipping
625a7e41b5bfcfb6a3a55dc345f262bd8398ad83
[ "MIT" ]
null
null
null
erpnext_shipping/config/docs.py
arahimfahim40/erpnext_shipping
625a7e41b5bfcfb6a3a55dc345f262bd8398ad83
[ "MIT" ]
1
2021-08-01T11:46:12.000Z
2021-08-01T11:46:12.000Z
""" Configuration for docs """ # source_link = "https://github.com/[org_name]/erpnext_shipping" # docs_base_url = "https://[org_name].github.io/erpnext_shipping" # headline = "App that does everything" # sub_heading = "Yes, you got that right the first time, everything" def get_context(context): context.brand_html = "Manage Vehicle Shipping"
28.916667
68
0.743516
334e5e08f931ba2cf50791b0b0e8b3fe8afdd839
1,383
py
Python
djangoBlog/urls.py
snehalkhandve/djangoBlog
a579396a7c4cce68b294d06787ea8249aa59e1e6
[ "MIT" ]
null
null
null
djangoBlog/urls.py
snehalkhandve/djangoBlog
a579396a7c4cce68b294d06787ea8249aa59e1e6
[ "MIT" ]
null
null
null
djangoBlog/urls.py
snehalkhandve/djangoBlog
a579396a7c4cce68b294d06787ea8249aa59e1e6
[ "MIT" ]
1
2020-03-29T14:18:08.000Z
2020-03-29T14:18:08.000Z
"""djangoBlog URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.2/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.contrib.auth import views as auth_views from django.urls import path, include from user import views as user_views from django.conf import settings from django.conf.urls.static import static urlpatterns = [ path('admin/', admin.site.urls), path('register/', user_views.register, name="register"), path('profile/', user_views.profile, name="profile"), path('login/', auth_views.LoginView.as_view(template_name='user/login.html'), name="login"), path('logout/', auth_views.LogoutView.as_view(template_name='user/logout.html'), name="logout"), path('', include('blog.urls')), ] if settings.DEBUG: urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
38.416667
101
0.725958
1835180eb1a11f3089ad78d05335b1495562ca8a
1,329
py
Python
escala/migrations/0001_initial.py
renannoronha/iasd
2fd94af8c08cf16ae028f587e35431b99a64ec57
[ "MIT" ]
null
null
null
escala/migrations/0001_initial.py
renannoronha/iasd
2fd94af8c08cf16ae028f587e35431b99a64ec57
[ "MIT" ]
null
null
null
escala/migrations/0001_initial.py
renannoronha/iasd
2fd94af8c08cf16ae028f587e35431b99a64ec57
[ "MIT" ]
null
null
null
# Generated by Django 3.1.7 on 2022-03-08 12:34 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Departamento', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('departamento', models.CharField(max_length=255, verbose_name='Departamento')), ], options={ 'verbose_name': 'Departamento', 'verbose_name_plural': 'Departamentos', }, ), migrations.CreateModel( name='Escala', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('data', models.DateField(verbose_name='Data')), ('nome', models.CharField(max_length=255, verbose_name='Responsável')), ('departamento', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='escala.departamento')), ], options={ 'verbose_name': 'Escala', 'verbose_name_plural': 'Escala', }, ), ]
33.225
123
0.562077
ae01d989fcac56fce4e32b13215c92b46f284274
681
py
Python
tests/nnapi/specs/V1_0/mul_4D_nnfw.mod.py
juitem/ONE
8c6a4b7738074573b6ac5c82dcf1f6697520d1ed
[ "Apache-2.0" ]
255
2020-05-22T07:45:29.000Z
2022-03-29T23:58:22.000Z
tests/nnapi/specs/V1_0/mul_4D_nnfw.mod.py
juitem/ONE
8c6a4b7738074573b6ac5c82dcf1f6697520d1ed
[ "Apache-2.0" ]
5,102
2020-05-22T07:48:33.000Z
2022-03-31T23:43:39.000Z
tests/nnapi/specs/V1_0/mul_4D_nnfw.mod.py
juitem/ONE
8c6a4b7738074573b6ac5c82dcf1f6697520d1ed
[ "Apache-2.0" ]
120
2020-05-22T07:51:08.000Z
2022-02-16T19:08:05.000Z
# model model = Model() i1 = Input("op1", "TENSOR_FLOAT32", "{2, 2, 2, 2}") i2 = Input("op2", "TENSOR_FLOAT32", "{2, 2, 2, 2}") act = Int32Scalar("act", 0) # an int32_t scalar fuse_activation i3 = Output("op3", "TENSOR_FLOAT32", "{2, 2, 2, 2}") model = model.Operation("MUL", i1, i2, act).To(i3) # Example 1. Input in operand 0, input0 = {i1: # input 0 [1, 2, -3, -4, -15, 6, 23, 8, -1, -2, 3, 4, 10, -6, 7, -2], i2: # input 1 [-1, -2, 3, 4, -5, -6, 7, -8, 1, -2, -3, -4, -5, 6, 7, 8]} output0 = {i3: # output 0 [-1, -4, -9, -16, 75, -36, 161, -64, -1, 4, -9, -16, -50, -36, 49, -16]} # Instantiate an example Example((input0, output0))
34.05
83
0.50514
141c5878e06495bc0dd01151236b6dcfd40fe652
15,885
py
Python
env/lib/python3.7/site-packages/docusign_rooms/apis/task_lists_api.py
davidgacc/docusign
e63167101656d0066d481844576ce687ea80eb91
[ "MIT" ]
null
null
null
env/lib/python3.7/site-packages/docusign_rooms/apis/task_lists_api.py
davidgacc/docusign
e63167101656d0066d481844576ce687ea80eb91
[ "MIT" ]
null
null
null
env/lib/python3.7/site-packages/docusign_rooms/apis/task_lists_api.py
davidgacc/docusign
e63167101656d0066d481844576ce687ea80eb91
[ "MIT" ]
null
null
null
# coding: utf-8 """ DocuSign Rooms API - v2 An API for an integrator to access the features of DocuSign Rooms # noqa: E501 OpenAPI spec version: v2 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..client.configuration import Configuration from ..client.api_client import ApiClient class TaskListsApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def create_task_list(self, room_id, account_id, **kwargs): """ Add a task list to a room based on a task list template. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_task_list(room_id, account_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int room_id: Room ID. (required) :param str account_id: (required) :param TaskListForCreate body: :return: TaskList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_task_list_with_http_info(room_id, account_id, **kwargs) else: (data) = self.create_task_list_with_http_info(room_id, account_id, **kwargs) return data def create_task_list_with_http_info(self, room_id, account_id, **kwargs): """ Add a task list to a room based on a task list template. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_task_list_with_http_info(room_id, account_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int room_id: Room ID. (required) :param str account_id: (required) :param TaskListForCreate body: :return: TaskList If the method is called asynchronously, returns the request thread. """ all_params = ['room_id', 'account_id', 'body'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_task_list" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'room_id' is set if ('room_id' not in params) or (params['room_id'] is None): raise ValueError("Missing the required parameter `room_id` when calling `create_task_list`") # verify the required parameter 'account_id' is set if ('account_id' not in params) or (params['account_id'] is None): raise ValueError("Missing the required parameter `account_id` when calling `create_task_list`") collection_formats = {} resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/task_lists'.replace('{format}', 'json') path_params = {} if 'room_id' in params: path_params['roomId'] = params['room_id'] if 'account_id' in params: path_params['accountId'] = params['account_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/plain', 'application/json', 'text/json']) # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TaskList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_task_list(self, task_list_id, account_id, **kwargs): """ Deletes a task list. If there are attached documents they will remain in the associated This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_task_list(task_list_id, account_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int task_list_id: Task List ID (required) :param str account_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_task_list_with_http_info(task_list_id, account_id, **kwargs) else: (data) = self.delete_task_list_with_http_info(task_list_id, account_id, **kwargs) return data def delete_task_list_with_http_info(self, task_list_id, account_id, **kwargs): """ Deletes a task list. If there are attached documents they will remain in the associated This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_task_list_with_http_info(task_list_id, account_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int task_list_id: Task List ID (required) :param str account_id: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['task_list_id', 'account_id'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_task_list" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'task_list_id' is set if ('task_list_id' not in params) or (params['task_list_id'] is None): raise ValueError("Missing the required parameter `task_list_id` when calling `delete_task_list`") # verify the required parameter 'account_id' is set if ('account_id' not in params) or (params['account_id'] is None): raise ValueError("Missing the required parameter `account_id` when calling `delete_task_list`") collection_formats = {} resource_path = '/v2/accounts/{accountId}/task_lists/{taskListId}'.replace('{format}', 'json') path_params = {} if 'task_list_id' in params: path_params['taskListId'] = params['task_list_id'] if 'account_id' in params: path_params['accountId'] = params['account_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/plain', 'application/json', 'text/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_task_lists(self, room_id, account_id, **kwargs): """ Returns the summary for all viewable task lists in a This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_task_lists(room_id, account_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int room_id: Room ID (required) :param str account_id: (required) :return: TaskListSummaryList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_task_lists_with_http_info(room_id, account_id, **kwargs) else: (data) = self.get_task_lists_with_http_info(room_id, account_id, **kwargs) return data def get_task_lists_with_http_info(self, room_id, account_id, **kwargs): """ Returns the summary for all viewable task lists in a This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_task_lists_with_http_info(room_id, account_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int room_id: Room ID (required) :param str account_id: (required) :return: TaskListSummaryList If the method is called asynchronously, returns the request thread. """ all_params = ['room_id', 'account_id'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_task_lists" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'room_id' is set if ('room_id' not in params) or (params['room_id'] is None): raise ValueError("Missing the required parameter `room_id` when calling `get_task_lists`") # verify the required parameter 'account_id' is set if ('account_id' not in params) or (params['account_id'] is None): raise ValueError("Missing the required parameter `account_id` when calling `get_task_lists`") collection_formats = {} resource_path = '/v2/accounts/{accountId}/rooms/{roomId}/task_lists'.replace('{format}', 'json') path_params = {} if 'room_id' in params: path_params['roomId'] = params['room_id'] if 'account_id' in params: path_params['accountId'] = params['account_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/plain', 'application/json', 'text/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TaskListSummaryList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
42.473262
126
0.581366
02a7b74cc57fa455338766970daebce567c92f6d
1,235
py
Python
tekstovni_vmesnik.py
TurkAndreja/vislice
f8b3b7dbdde8af92a55bbf85b53c7e5b324af9b5
[ "MIT" ]
null
null
null
tekstovni_vmesnik.py
TurkAndreja/vislice
f8b3b7dbdde8af92a55bbf85b53c7e5b324af9b5
[ "MIT" ]
null
null
null
tekstovni_vmesnik.py
TurkAndreja/vislice
f8b3b7dbdde8af92a55bbf85b53c7e5b324af9b5
[ "MIT" ]
null
null
null
import model lojtrice = "#################################################\n" def izpis_zmage(igra): tekst = lojtrice + "Uganili ste geslo {0}.\n".format(igra.geslo) return tekst def izpis_poraza(igra): tekst = lojtrice + "Obešeni ste! Pravilno geslo je bilo {0}.\n".format(igra.geslo) return tekst def izpis_igre(igra): tekst = lojtrice + igra.pravilni_del_gesla() + "\n" + ("Preostalo število poskusov: {0}\nNapačni ugibi: {1}\n").format(model.STEVILO_DOVOLJENIH_NAPAK - igra.stevilo_napak() + 1, igra.nepravilni_ugibi()) + lojtrice return tekst def zahtevaj_vnos(): return input("Ugibaj črko: ") def pozeni_vmesnik(): igra = model.nova_igra() while True: #izpisemo stanje igre print(izpis_igre(igra)) #zahtevamo vnos uporabnika poskus = zahtevaj_vnos() igra.ugibaj(poskus) #preveri ali smo končali if igra.poraz(): print(izpis_poraza(igra)) break elif igra.zmaga(): print(izpis_zmage(igra)) break else: pass return None pozeni_vmesnik() # igra = model.nova_igra() # print(izpis_zmage(igra)) # print(izpis_poraza(igra)) # print(izpis_igre(igra))
28.068182
217
0.606478
cc9c851964cbb223b6607852b1c390a6be1c0ac1
11,433
py
Python
tests/extensions/artificial_intelligence/intents/test_intents_extensions.py
mirlarof/blip-sdk-python
f958149b2524d4340eeafad8739a33db71df45ed
[ "MIT" ]
2
2021-07-02T20:10:48.000Z
2021-07-13T20:51:18.000Z
tests/extensions/artificial_intelligence/intents/test_intents_extensions.py
mirlarof/blip-sdk-python
f958149b2524d4340eeafad8739a33db71df45ed
[ "MIT" ]
3
2021-06-24T13:27:21.000Z
2021-07-30T15:37:43.000Z
tests/extensions/artificial_intelligence/intents/test_intents_extensions.py
mirlarof/blip-sdk-python
f958149b2524d4340eeafad8739a33db71df45ed
[ "MIT" ]
3
2021-06-23T19:53:20.000Z
2022-01-04T17:50:44.000Z
from lime_python import Command from pytest import fixture, mark from pytest_mock import MockerFixture from src import AIExtension from ....async_mock import async_return AI_TO = '[email protected]' class TestIntentsExtension: @fixture def target(self, mocker: MockerFixture) -> AIExtension: yield AIExtension(mocker.MagicMock(), 'msging.net') @mark.asyncio async def test_get_intent_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' intent_uri = f'/intentions/{intent_id}?deep=True' expected_command = Command('get', intent_uri) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.get_intent_async(intent_id, True) # Assert expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_get_intents_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_uri = '/intentions?$skip=0&$take=100&deep=True&$ascending=False' expected_command = Command('get', intent_uri) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.get_intents_async(0, 100, True) # Assert expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_set_intent( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_resource = { 'name': 'intents' } expected_command = Command( 'set', '/intentions', 'application/vnd.iris.ai.intention+json', intent_resource ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.set_intent_async(intent_resource) # Assert expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_set_intents( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intents = [ { 'name': 'intents' } ] intent_resource = { 'itemType': 'application/vnd.iris.ai.intention+json', 'items': intents } expected_command = Command( 'set', '/intentions', 'application/vnd.lime.collection+json', intent_resource ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.set_intents_async(intents) # Assert expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_merge_intent( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent = { 'name': 'intents' } expected_command = Command( 'merge', '/intentions', 'application/vnd.iris.ai.intention+json', intent ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.merge_intent_async(intent) # Assert expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_merge_intents( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intents = [ { 'name': 'intents' } ] intents_resource = { 'itemType': 'application/vnd.iris.ai.intention+json', 'items': intents } expected_command = Command( 'merge', '/intentions', 'application/vnd.lime.collection+json', intents_resource ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.merge_intents_async(intents) # Assert expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_delete_intent_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' expected_command = Command( 'delete', f'/intentions/{intent_id}' ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.delete_intent_async(intent_id) # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_delete_intents_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange expected_command = Command( 'delete', '/intentions' ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.delete_intents_async() # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_get_intent_answers_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' expected_command = Command( 'get', f'/intentions/{intent_id}/answers?$skip=0&$take=100&$ascending=False' # noqa=E501 ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.get_intent_answers_async(intent_id) # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_set_intent_answers_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' answers = [ { 'type': 'text/plain', 'value': 'Which flavor do you want?' } ] set_answer_resource = { 'itemType': 'application/vnd.iris.ai.answer+json', 'items': answers } expected_command = Command( 'set', f'/intentions/{intent_id}/answers', 'application/vnd.lime.collection+json', set_answer_resource ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.set_intent_answers_async(intent_id, answers) # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_delete_intent_answers_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' answer_id = '4321' expected_command = Command( 'delete', f'/intentions/{intent_id}/answers/{answer_id}' ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.delete_intent_answers_async(intent_id, answer_id) # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_get_intent_questions_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' expected_command = Command( 'get', f'/intentions/{intent_id}/questions' ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.get_intent_questions_async(intent_id) # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_set_intent_questions_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' questions = [ { 'text': 'Qual a pizza', }, { 'text': 'Qual o sanduiche', } ] set_question_resource = { 'itemType': 'application/vnd.iris.ai.question+json', 'items': questions } expected_command = Command( 'set', f'/intentions/{intent_id}/questions', 'application/vnd.lime.collection+json', set_question_resource ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.set_intent_questions_async(intent_id, questions) # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command) @mark.asyncio async def test_delete_intent_question_async( self, mocker: MockerFixture, target: AIExtension ) -> None: # Arrange intent_id = '1234' question_id = '4321' expected_command = Command( 'delete', f'/intentions/{intent_id}/questions/{question_id}' ) expected_command.to = AI_TO mock = mocker.MagicMock( return_value=async_return(None) ) target.client.process_command_async = mock # Act await target.delete_intent_question_async(intent_id, question_id) # Arrange expected_command.id = mock.call_args[0][0].id mock.assert_called_once_with(expected_command)
27.549398
94
0.576139
8fe4e944325a6e642c5196600cda47cf0987395c
2,952
py
Python
accounts/migrations/0001_initial.py
knowapi/DeveloperPortalExamples
bbe50f333d1257fdbb107b507c09221b543e7d8d
[ "MIT" ]
2
2016-10-30T20:28:03.000Z
2017-03-28T08:55:20.000Z
accounts/migrations/0001_initial.py
knowapi/DeveloperPortalExamples
bbe50f333d1257fdbb107b507c09221b543e7d8d
[ "MIT" ]
11
2016-09-10T09:43:16.000Z
2022-01-13T00:42:40.000Z
accounts/migrations/0001_initial.py
knowapi/DeveloperPortalExamples
bbe50f333d1257fdbb107b507c09221b543e7d8d
[ "MIT" ]
3
2017-01-02T11:54:54.000Z
2018-01-02T05:58:43.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import django.utils.timezone import django.core.validators import django.contrib.auth.models class Migration(migrations.Migration): dependencies = [ ('auth', '0006_require_contenttypes_0002'), ] operations = [ migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(null=True, verbose_name='last login', blank=True)), ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, max_length=30, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.', 'invalid')], help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, verbose_name='username')), ('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)), ('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)), ('email', models.EmailField(max_length=254, verbose_name='email address', blank=True)), ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), ('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', verbose_name='groups')), ('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')), ], options={ 'abstract': False, 'verbose_name': 'user', 'verbose_name_plural': 'users', }, managers=[ ('objects', django.contrib.auth.models.UserManager()), ], ), ]
67.090909
432
0.659892
fa0077eb3367e3260386b25d96577ac91f0ff645
4,348
py
Python
test_project/components/social_auth_app_django_spa.py
ebenh/django-flex-user
efffb21e4ce33d2ea8665756334e2a391f4b5a72
[ "MIT" ]
1
2021-09-13T20:26:02.000Z
2021-09-13T20:26:02.000Z
test_project/components/social_auth_app_django_spa.py
ebenh/django-flex-user
efffb21e4ce33d2ea8665756334e2a391f4b5a72
[ "MIT" ]
null
null
null
test_project/components/social_auth_app_django_spa.py
ebenh/django-flex-user
efffb21e4ce33d2ea8665756334e2a391f4b5a72
[ "MIT" ]
null
null
null
# # Configure social-auth-app-django for single-page apps # from test_project.components.django_environ import env # # Production Variables ... Needed for social-auth-app-django # ALLOWED_HOSTS = ['localhost', '127.0.0.1'] # Configure social-auth-app-django # SOCIAL_AUTH_POSTGRES_JSONFIELD = True # social-auth-app-django ... deprecated # SOCIAL_AUTH_JSONFIELD_ENABLED = True # social-auth-app-django ... todo: enable this when using pgsql # SOCIAL_AUTH_URL_NAMESPACE = 'social' # social-auth-app-django SOCIAL_AUTH_ALLOWED_REDIRECT_HOSTS = ['localhost:4200', 'cnn.com', 'yahoo.com', 'cbc.ca'] # note eben: no need to include 'localhost:8000', it's added implicitly SOCIAL_AUTH_NEW_USER_REDIRECT_URL = 'http://localhost:4200/sign-in/oauth/complete/' SOCIAL_AUTH_NEW_ASSOCIATION_REDIRECT_URL = 'http://localhost:4200/sign-in/oauth/complete/' SOCIAL_AUTH_LOGIN_REDIRECT_URL = 'http://localhost:4200/sign-in/oauth/complete/' SOCIAL_AUTH_LOGIN_ERROR_URL = 'http://localhost:4200/sign-in/oauth/complete/' SOCIAL_AUTH_DISCONNECT_REDIRECT_URL = 'http://localhost:4200/account/' # Facebook configuration SOCIAL_AUTH_FACEBOOK_KEY = env('SOCIAL_AUTH_FACEBOOK_KEY') # App ID SOCIAL_AUTH_FACEBOOK_SECRET = env('SOCIAL_AUTH_FACEBOOK_SECRET') # App Secret SOCIAL_AUTH_FACEBOOK_SCOPE = ['email', ] # It seems email scope is now included by default SOCIAL_AUTH_FACEBOOK_PROFILE_EXTRA_PARAMS = { 'fields': 'id, name, email, picture', } SOCIAL_AUTH_FACEBOOK_EXTRA_DATA = [ ('name', 'name', True), ('email', 'email', True) ] # Google configuration SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = env('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY') # client_id SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = env('SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET') # client_secret SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = [ # These scopes don't seem to be necessary 'https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/userinfo.profile', ] SOCIAL_AUTH_GOOGLE_OAUTH2_EXTRA_DATA = [ ('name', 'name', True), ('email', 'email', True) ] SOCIAL_AUTH_PIPELINE = ( # Get the information we can about the user and return it in a simple # format to create the user instance later. On some cases the details are # already part of the auth response from the provider, but sometimes this # could hit a provider API. 'social_core.pipeline.social_auth.social_details', # Get the social uid from whichever service we're authing thru. The uid is # the unique identifier of the given user in the provider. 'social_core.pipeline.social_auth.social_uid', # Verifies that the current auth process is valid within the current # project, this is where emails and domains whitelists are applied (if # defined). 'social_core.pipeline.social_auth.auth_allowed', # Checks if the current social-account is already associated in the site. 'social_core.pipeline.social_auth.social_user', # Make up a username for this person, appends a random string at the end if # there's any collision. 'social_core.pipeline.user.get_username', # Send a validation email to the user to verify its email address. # 'social_core.pipeline.mail.mail_validation', 'django_flex_user.verification.mail_validation', # Associates the current social details with another user account with # a similar email address. 'social_core.pipeline.social_auth.associate_by_email', # Create a user account if we haven't found one yet. 'social_core.pipeline.user.create_user', # Create the record that associated the social account with this user. 'social_core.pipeline.social_auth.associate_user', # Populate the extra_data field in the social record with the values # specified by settings (and the default ones like access_token, etc). 'social_core.pipeline.social_auth.load_extra_data', # Update the user record with any changed info from the auth service. 'social_core.pipeline.user.user_details' # todo: disable this step ) SOCIAL_AUTH_EMAIL_VALIDATION_URL = 'http://localhost:4200/verify/email/' SOCIAL_AUTH_EMAIL_VALIDATION_FUNCTION = 'django_flex_user.verification.email_validation_link' SOCIAL_AUTH_FACEBOOK_FORCE_EMAIL_VALIDATION = True SOCIAL_AUTH_CLEAN_USERNAME_FUNCTION = 'django_flex_user.validators.flex_user_clean_username'
41.807692
120
0.75851
76e1835f98eab191361af8510023d65babac0d08
960
py
Python
examples/tensorflow/text-generator/predictor.py
honeypotz-eu/cortex
c6a3894009dae55de5cec68deb97c7d832514da0
[ "Apache-2.0" ]
1
2020-09-18T04:29:24.000Z
2020-09-18T04:29:24.000Z
examples/tensorflow/text-generator/predictor.py
awesomemachinelearning/cortex
7c2b5b07f9dbf9bf56def8a6e3a60763e271bd39
[ "Apache-2.0" ]
null
null
null
examples/tensorflow/text-generator/predictor.py
awesomemachinelearning/cortex
7c2b5b07f9dbf9bf56def8a6e3a60763e271bd39
[ "Apache-2.0" ]
null
null
null
# WARNING: you are on the master branch; please refer to examples on the branch corresponding to your `cortex version` (e.g. for version 0.19.*, run `git checkout -b 0.19` or switch to the `0.19` branch on GitHub) import os import boto3 from botocore import UNSIGNED from botocore.client import Config from encoder import get_encoder class TensorFlowPredictor: def __init__(self, tensorflow_client, config): self.client = tensorflow_client if os.environ.get("AWS_ACCESS_KEY_ID"): s3 = boto3.client("s3") # client will use your credentials if available else: s3 = boto3.client("s3", config=Config(signature_version=UNSIGNED)) # anonymous client self.encoder = get_encoder(s3) def predict(self, payload): model_input = {"context": [self.encoder.encode(payload["text"])]} prediction = self.client.predict(model_input) return self.encoder.decode(prediction["sample"])
38.4
213
0.701042
ed75342a1c759e01453a9ce8dd4a6dc5584f3aad
10,715
py
Python
tools/rle_encode.py
StarGate01/pinetime-mcuboot-bootloader
2e1a66e10310f94d3af6e090b736ba84b7dbc177
[ "Apache-2.0" ]
33
2020-12-27T16:34:19.000Z
2021-12-16T21:18:04.000Z
tools/rle_encode.py
StarGate01/pinetime-mcuboot-bootloader
2e1a66e10310f94d3af6e090b736ba84b7dbc177
[ "Apache-2.0" ]
7
2021-01-25T20:32:46.000Z
2021-11-28T18:28:49.000Z
tools/rle_encode.py
StarGate01/pinetime-mcuboot-bootloader
2e1a66e10310f94d3af6e090b736ba84b7dbc177
[ "Apache-2.0" ]
10
2021-01-04T06:58:10.000Z
2021-08-02T15:39:58.000Z
#!/usr/bin/env python3 # SPDX-License-Identifier: LGPL-3.0-or-later # Copyright (C) 2020 Daniel Thompson import argparse import sys import os.path from PIL import Image def clut8_rgb888(i): """Reference CLUT for wasp-os. Technically speaking this is not a CLUT because the we lookup the colours algorithmically to avoid the cost of a genuine CLUT. The palette is designed to be fairly easy to generate algorithmically. The palette includes all 216 web-safe colours together 4 grays and 36 additional colours that target "gaps" at the brighter end of the web safe set. There are 11 greys (plus black and white) although two are fairly close together. :param int i: Index (from 0..255 inclusive) into the CLUT :return: 24-bit colour in RGB888 format """ if i < 216: rgb888 = ( i % 6) * 0x33 rg = i // 6 rgb888 += (rg % 6) * 0x3300 rgb888 += (rg // 6) * 0x330000 elif i < 252: i -= 216 rgb888 = 0x7f + (( i % 3) * 0x33) rg = i // 3 rgb888 += 0x4c00 + ((rg % 4) * 0x3300) rgb888 += 0x7f0000 + ((rg // 4) * 0x330000) else: i -= 252 rgb888 = 0x2c2c2c + (0x101010 * i) return rgb888 def clut8_rgb565(i): """RBG565 CLUT for wasp-os. This CLUT implements the same palette as :py:meth:`clut8_888` but outputs RGB565 pixels. .. note:: This function is unused within this file but needs to be maintained alongside the reference clut so it is reproduced here. :param int i: Index (from 0..255 inclusive) into the CLUT :return: 16-bit colour in RGB565 format """ if i < 216: rgb565 = (( i % 6) * 0x33) >> 3 rg = i // 6 rgb565 += ((rg % 6) * (0x33 << 3)) & 0x07e0 rgb565 += ((rg // 6) * (0x33 << 8)) & 0xf800 elif i < 252: i -= 216 rgb565 = (0x7f + (( i % 3) * 0x33)) >> 3 rg = i // 3 rgb565 += ((0x4c << 3) + ((rg % 4) * (0x33 << 3))) & 0x07e0 rgb565 += ((0x7f << 8) + ((rg // 4) * (0x33 << 8))) & 0xf800 else: i -= 252 gr6 = (0x2c + (0x10 * i)) >> 2 gr5 = gr6 >> 1 rgb565 = (gr5 << 11) + (gr6 << 5) + gr5 return rgb565 class ReverseCLUT: def __init__(self, clut): l = [] for i in range(256): l.append(clut(i)) self.clut = tuple(l) self.lookup = {} def __call__(self, rgb888): """Compare rgb888 to every element of the CLUT and pick the closest match. """ if rgb888 in self.lookup: return self.lookup[rgb888] best = 200000 index = -1 clut = self.clut r = rgb888 >> 16 g = (rgb888 >> 8) & 0xff b = rgb888 & 0xff for i in range(256): candidate = clut[i] rd = r - (candidate >> 16) gd = g - ((candidate >> 8) & 0xff) bd = b - (candidate & 0xff) # This is the Euclidian distance (squared) distance = rd * rd + gd * gd + bd * bd if distance < best: best = distance index = i self.lookup[rgb888] = index #print(f'# #{rgb888:06x} -> #{clut8_rgb888(index):06x}') return index def varname(p): return os.path.basename(os.path.splitext(p)[0]) def encode(im): pixels = im.load() rle = [] rl = 0 px = pixels[0, 0] def encode_pixel(px, rl): while rl > 255: rle.append(255) rle.append(0) rl -= 255 rle.append(rl) for y in range(im.height): for x in range(im.width): newpx = pixels[x, y] if newpx == px: rl += 1 assert(rl < (1 << 21)) continue # Code the previous run encode_pixel(px, rl) # Start a new run rl = 1 px = newpx # Handle the final run encode_pixel(px, rl) return (im.width, im.height, bytes(rle)) def encode_2bit(im): """2-bit palette based RLE encoder. This encoder has a reprogrammable 2-bit palette. This allows it to encode arbitrary images with a full 8-bit depth but the 2-byte overhead each time a new colour is introduced means it is not efficient unless the image is carefully constructed to keep a good locality of reference for the three non-background colours. The encoding competes well with the 1-bit encoder for small monochrome images but once run-lengths longer than 62 start to become frequent then this encoding is about 30% larger than a 1-bit encoding. """ pixels = im.load() assert(im.width <= 255) assert(im.height <= 255) full_palette = ReverseCLUT(clut8_rgb888) rle = [] rl = 0 px = pixels[0, 0] # black, grey25, grey50, white palette = [0, 254, 219, 215] next_color = 1 def encode_pixel(px, rl): nonlocal next_color px = full_palette((px[0] << 16) + (px[1] << 8) + px[2]) if px not in palette: rle.append(next_color << 6) rle.append(px) palette[next_color] = px next_color += 1 if next_color >= len(palette): next_color = 1 px = palette.index(px) if rl >= 63: rle.append((px << 6) + 63) rl -= 63 while rl >= 255: rle.append(255) rl -= 255 rle.append(rl) else: rle.append((px << 6) + rl) # Issue the descriptor rle.append(2) rle.append(im.width) rle.append(im.height) for y in range(im.height): for x in range(im.width): newpx = pixels[x, y] if newpx == px: rl += 1 assert(rl < (1 << 21)) continue # Code the previous run encode_pixel(px, rl) # Start a new run rl = 1 px = newpx # Handle the final run encode_pixel(px, rl) return bytes(rle) def encode_8bit(im): """Experimental 8-bit RLE encoder. For monochrome images this is about 3x less efficient than the 1-bit encoder. This encoder is not currently used anywhere in wasp-os and currently there is no decoder either (so don't assume this code actually works). """ pixels = im.load() rle = [] rl = 0 px = pixels[0, 0] def encode_pixel(px, rl): px = (px[0] & 0xe0) | ((px[1] & 0xe0) >> 3) | ((px[2] & 0xc0) >> 6) rle.append(px) if rl > 0: rle.append(px) rl -= 2 if rl > (1 << 14): rle.append(0x80 | ((rl >> 14) & 0x7f)) if rl > (1 << 7): rle.append(0x80 | ((rl >> 7) & 0x7f)) if rl >= 0: rle.append( rl & 0x7f ) for y in range(im.height): for x in range(im.width): newpx = pixels[x, y] if newpx == px: rl += 1 assert(rl < (1 << 21)) continue # Code the previous run encode_pixel(px, rl) # Start a new run rl = 1 px = newpx # Handle the final run encode_pixel(px, rl) return (im.width, im.height, bytes(rle)) def render_c(image, fname, indent, depth): extra_indent = ' ' * indent if len(image) == 3: print(f'{extra_indent}// {depth}-bit RLE, generated from {fname}, ' f'{len(image[2])} bytes') (x, y, pixels) = image else: print(f'{extra_indent}// {depth}-bit RLE, generated from {fname}, ' f'{len(image)} bytes') pixels = image print(f'{extra_indent}static const uint8_t {varname(fname)}[] = {{') print(f'{extra_indent} ', end='') i = 0 for rl in pixels: print(f' {hex(rl)},', end='') i += 1 if i == 12: print(f'\n{extra_indent} ', end='') i = 0 print('\n};') def render_py(image, fname, indent, depth): extra_indent = ' ' * indent if len(image) == 3: print(f'{extra_indent}# {depth}-bit RLE, generated from {fname}, ' f'{len(image[2])} bytes') (x, y, pixels) = image print(f'{extra_indent}{varname(fname)} = (') print(f'{extra_indent} {x}, {y},') else: print(f'{extra_indent}# {depth}-bit RLE, generated from {fname}, ' f'{len(image)} bytes') pixels = image[3:] print(f'{extra_indent}{varname(fname)} = (') print(f'{extra_indent} {image[0:1]}') print(f'{extra_indent} {image[1:3]}') # Split the bytestring to ensure each line is short enough to # be absorbed on the target if needed. for i in range(0, len(pixels), 16): print(f'{extra_indent} {pixels[i:i+16]}') print(f'{extra_indent})') def decode_to_ascii(image): (sx, sy, rle) = image data = bytearray(2*sx) dp = 0 black = ord('#') white = ord(' ') color = black for rl in rle: while rl: data[dp] = color data[dp+1] = color dp += 2 rl -= 1 if dp >= (2*sx): print(data.decode('utf-8')) dp = 0 if color == black: color = white else: color = black # Check the image is the correct length assert(dp == 0) parser = argparse.ArgumentParser(description='RLE encoder tool.') parser.add_argument('files', nargs='+', help='files to be encoded') parser.add_argument('--ascii', action='store_true', help='Run the resulting image(s) through an ascii art decoder') parser.add_argument('--c', action='store_true', help='Render the output as C instead of python') parser.add_argument('--indent', default=0, type=int, help='Add extra indentation in the generated code') parser.add_argument('--2bit', action='store_true', dest='twobit', help='Generate 2-bit image') parser.add_argument('--8bit', action='store_true', dest='eightbit', help='Generate 8-bit image') args = parser.parse_args() if args.eightbit: encoder = encode_8bit depth = 8 elif args.twobit: encoder = encode_2bit depth = 2 else: encoder = encode depth =1 for fname in args.files: image = encoder(Image.open(fname)) if args.c: render_c(image, fname, args.indent, depth) else: render_py(image, fname, args.indent, depth) if args.ascii: print() decode_to_ascii(image)
28.197368
83
0.526645
d1cd211273b1809c0e335b036f56db0269df32f0
45
py
Python
src/Calculator/Addition.py
gitvicky97/Statistical-Calculator-
f5f413b43833fe1f0eec45b363fc86e3d962f825
[ "MIT" ]
null
null
null
src/Calculator/Addition.py
gitvicky97/Statistical-Calculator-
f5f413b43833fe1f0eec45b363fc86e3d962f825
[ "MIT" ]
null
null
null
src/Calculator/Addition.py
gitvicky97/Statistical-Calculator-
f5f413b43833fe1f0eec45b363fc86e3d962f825
[ "MIT" ]
3
2020-03-22T01:56:45.000Z
2020-03-22T20:20:48.000Z
def add(a, b): return float(a) + float(b)
22.5
30
0.577778
618d02108973d4f7e2245e13acccda6fea103dcb
532
py
Python
Download/PythonExercicios/ex076.py
r-luis/Python-CursoemVideo
f978b2f4ab8444ebb746b4c85bd6db6d7775cbb4
[ "MIT" ]
null
null
null
Download/PythonExercicios/ex076.py
r-luis/Python-CursoemVideo
f978b2f4ab8444ebb746b4c85bd6db6d7775cbb4
[ "MIT" ]
null
null
null
Download/PythonExercicios/ex076.py
r-luis/Python-CursoemVideo
f978b2f4ab8444ebb746b4c85bd6db6d7775cbb4
[ "MIT" ]
null
null
null
'''Desafio 76 Crie um programa que tenha uma tupla única com nomes de produtos e seus respectivos preços, na sequencia. No final, mostre uma listagem de preços, organizando os dados em forma tabular.''' produto = ('25 coins', 25, '50 coins', 50, '75 coins', 75, '80 coins', 80, '100 coins', 100, '250 coins', 250) i, f = 0, 1 sep = '-' * 40 tam = len(produto) print(sep) print(f'{"TIBIA COINS":^40}') print(sep) while f < tam: print(f'{produto[i]:.<31}R$ {produto[f]:.2f}', end='\n') i += 2 f += 2 print(sep)
20.461538
119
0.62406
de06a7884f66425562ba7bf418bbe52596f3a2c1
24
py
Python
venv/Lib/site-packages/thumbnail/__init__.py
Vishwas-bit/Ecommerce-Recommender-System
edf1ab1a6116720b7fb2038de18b494cdc7f08fb
[ "BSD-3-Clause" ]
5
2021-06-30T13:15:33.000Z
2022-03-01T22:00:02.000Z
venv/Lib/site-packages/thumbnail/__init__.py
Vishwas-bit/Ecommerce-Recommender-System
edf1ab1a6116720b7fb2038de18b494cdc7f08fb
[ "BSD-3-Clause" ]
1
2021-04-21T15:38:55.000Z
2021-04-21T15:38:55.000Z
venv/Lib/site-packages/thumbnail/__init__.py
Vishwas-bit/Ecommerce-Recommender-System
edf1ab1a6116720b7fb2038de18b494cdc7f08fb
[ "BSD-3-Clause" ]
4
2021-09-06T04:43:49.000Z
2022-03-11T21:18:14.000Z
from .thumbnail import *
24
24
0.791667
4c00c0d4b8c8e15ee3bb8e2276303865e5b5a542
7,955
py
Python
dataset/total_text.py
xieyufei1993/TextSnake.pytorch
e5eafdfc3845823dfef297ca6e576a1d72af57f7
[ "MIT" ]
null
null
null
dataset/total_text.py
xieyufei1993/TextSnake.pytorch
e5eafdfc3845823dfef297ca6e576a1d72af57f7
[ "MIT" ]
null
null
null
dataset/total_text.py
xieyufei1993/TextSnake.pytorch
e5eafdfc3845823dfef297ca6e576a1d72af57f7
[ "MIT" ]
null
null
null
import copy import cv2 import os import torch.utils.data as data import scipy.io as io import numpy as np from util.config import config as cfg from skimage.draw import polygon as drawpoly from util.misc import find_bottom, find_long_edges, split_edge_seqence, \ norm2, vector_cos, vector_sin from dataset.data_util import pil_load_img class TextInstance(object): def __init__(self, points, orient, text): self.orient = orient self.text = text self.points = [] # remove point if area is almost unchanged after removing ori_area = cv2.contourArea(points) for p in range(len(points)): index = list(range(len(points))) index.remove(p) area = cv2.contourArea(points[index]) if np.abs(ori_area - area) / ori_area > 0.017: self.points.append(points[p]) self.points = np.array(self.points) def find_bottom_and_sideline(self): self.bottoms = find_bottom(self.points) # find two bottoms of this Text self.e1, self.e2 = find_long_edges(self.points, self.bottoms) # find two long edge sequence def disk_cover(self, n_disk=15): """ cover text region with several disks :param n_disk: number of disks :return: """ inner_points1 = split_edge_seqence(self.points, self.e1, n_disk) inner_points2 = split_edge_seqence(self.points, self.e2, n_disk) inner_points2 = inner_points2[::-1] # innverse one of long edge center_points = (inner_points1 + inner_points2) / 2 # disk center radii = norm2(inner_points1 - center_points, axis=1) # disk radius return inner_points1, inner_points2, center_points, radii def __repr__(self): return str(self.__dict__) def __getitem__(self, item): return getattr(self, item) class TotalText(data.Dataset): def __init__(self, data_root, ignore_list=None, is_training=True, transform=None): super().__init__() self.data_root = data_root self.is_training = is_training self.transform = transform if ignore_list: with open(ignore_list) as f: ignore_list = f.readlines() ignore_list = [line.strip() for line in ignore_list] else: ignore_list = [] self.image_root = os.path.join(data_root, 'Images', 'Train' if is_training else 'Test') self.annotation_root = os.path.join(data_root, 'gt', 'Train' if is_training else 'Test') self.image_list = os.listdir(self.image_root) self.image_list = list(filter(lambda img: img.replace('.jpg', '') not in ignore_list, self.image_list)) self.annotation_list = ['poly_gt_{}.mat'.format(img_name.replace('.jpg', '')) for img_name in self.image_list] self.polygons = [None] * len(self.image_list) # polygon cache def parse_mat(self, mat_path): """ .mat file parser :param mat_path: (str), mat file path :return: (list), TextInstance """ annot = io.loadmat(mat_path) polygon = [] for cell in annot['polygt']: x = cell[1][0] y = cell[3][0] text = cell[4][0] if len(x) < 4: # too few points continue try: ori = cell[5][0] except: ori = 'c' pts = np.stack([x, y]).T.astype(np.int32) polygon.append(TextInstance(pts, ori, text)) return polygon def parse_txt(self, txt_path): """ .txt file parser :param txt_path: (str), txt file path :return: (list), TextInstance """ polygon = [] with open(txt_path)as fr: for line in fr.readlines(): line_list = line.strip().split(",") text = np.str_(line[-1]) ori = np.str_('m') pts = np.asarray(line_list[:8]).reshape((4, 2)).astype(np.int32) polygon.append(TextInstance(pts, ori, text)) return polygon def make_text_region(self, image, polygons): tr_mask = np.zeros(image.shape[:2], np.uint8) train_mask = np.ones(image.shape[:2], np.uint8) for polygon in polygons: cv2.fillPoly(tr_mask, [polygon.points.astype(np.int32)], color=(1,)) if polygon.text == '#': cv2.fillPoly(train_mask, [polygon.points.astype(np.int32)], color=(0,)) return tr_mask, train_mask def fill_polygon(self, mask, polygon, value): """ fill polygon in the mask with value :param mask: input mask :param polygon: polygon to draw :param value: fill value """ rr, cc = drawpoly(polygon[:, 1], polygon[:, 0]) mask[rr, cc] = value def make_text_center_line(self, sideline1, sideline2, center_line, radius, \ tcl_mask, radius_map, sin_map, cos_map, expand=0.2, shrink=2): # TODO: shrink 1/2 * radius at two line end for i in range(shrink, len(center_line) - 1 - shrink): c1 = center_line[i] c2 = center_line[i + 1] top1 = sideline1[i] top2 = sideline1[i + 1] bottom1 = sideline2[i] bottom2 = sideline2[i + 1] sin_theta = vector_sin(c2 - c1) cos_theta = vector_cos(c2 - c1) p1 = c1 + (top1 - c1) * expand p2 = c1 + (bottom1 - c1) * expand p3 = c2 + (bottom2 - c2) * expand p4 = c2 + (top2 - c2) * expand polygon = np.stack([p1, p2, p3, p4]) self.fill_polygon(tcl_mask, polygon, value=1) self.fill_polygon(radius_map, polygon, value=radius[i]) self.fill_polygon(sin_map, polygon, value=sin_theta) self.fill_polygon(cos_map, polygon, value=cos_theta) def __getitem__(self, item): image_id = self.image_list[item] image_path = os.path.join(self.image_root, image_id) # print(image_path) annotation_id = self.annotation_list[item] annotation_path = os.path.join(self.annotation_root, annotation_id) polygons = self.parse_mat(annotation_path) for i, polygon in enumerate(polygons): if polygon.text != '#': polygon.find_bottom_and_sideline() # print(image_path, annotation_path) # Read image data image = pil_load_img(image_path) H, W, _ = image.shape if self.transform: image, polygons = self.transform(image, copy.copy(polygons)) tcl_mask = np.zeros(image.shape[:2], np.uint8) radius_map = np.zeros(image.shape[:2], np.float32) sin_map = np.zeros(image.shape[:2], np.float32) cos_map = np.zeros(image.shape[:2], np.float32) for i, polygon in enumerate(polygons): if polygon.text != '#': sideline1, sideline2, center_points, radius = polygon.disk_cover(n_disk=cfg.n_disk) self.make_text_center_line(sideline1, sideline2, center_points, radius, tcl_mask, radius_map, sin_map, cos_map) tr_mask, train_mask = self.make_text_region(image, polygons) # to pytorch channel sequence image = image.transpose(2, 0, 1) meta = { 'image_id': image_id, 'image_path': image_path, 'Height': H, 'Width': W } return image, train_mask, tr_mask, tcl_mask, radius_map, sin_map, cos_map, meta def __len__(self): return len(self.image_list) if __name__ == '__main__': import os from util.augmentation import BaseTransform transform = BaseTransform( size=512, mean=0.5, std=0.5 ) trainset = TotalText( data_root='data/total-text', ignore_list='./ignore_list.txt', is_training=True, transform=transform )
35.355556
127
0.591578
24b9db949fde96c536255ee1ffff997d0dd0b2f8
7,313
py
Python
ceilometer/tests/unit/hardware/inspector/test_snmp.py
muralidharan10/ceilometer
70c30578b994694550bcd24dfc36c7f3be4946dc
[ "Apache-2.0" ]
1
2018-11-18T16:03:10.000Z
2018-11-18T16:03:10.000Z
ceilometer/tests/unit/hardware/inspector/test_snmp.py
muralidharan10/ceilometer
70c30578b994694550bcd24dfc36c7f3be4946dc
[ "Apache-2.0" ]
null
null
null
ceilometer/tests/unit/hardware/inspector/test_snmp.py
muralidharan10/ceilometer
70c30578b994694550bcd24dfc36c7f3be4946dc
[ "Apache-2.0" ]
2
2015-12-28T14:36:47.000Z
2018-11-18T16:03:11.000Z
# # Copyright 2013 Intel Corp # # Authors: Lianhao Lu <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for ceilometer/hardware/inspector/snmp/inspector.py """ from oslo_utils import netutils from oslotest import mockpatch from ceilometer.hardware.inspector import snmp from ceilometer.tests import base as test_base ins = snmp.SNMPInspector class FakeObjectName(object): def __init__(self, name): self.name = name def __str__(self): return str(self.name) def faux_getCmd_new(authData, transportTarget, *oids, **kwargs): varBinds = [(FakeObjectName(oid), int(oid.split('.')[-1])) for oid in oids] return (None, None, 0, varBinds) def faux_bulkCmd_new(authData, transportTarget, nonRepeaters, maxRepetitions, *oids, **kwargs): varBindTable = [ [(FakeObjectName(oid + ".%d" % i), i) for i in range(1, 3)] for oid in oids ] return (None, None, 0, varBindTable) class TestSNMPInspector(test_base.BaseTestCase): mapping = { 'test_exact': { 'matching_type': snmp.EXACT, 'metric_oid': ('1.3.6.1.4.1.2021.10.1.3.1', int), 'metadata': { 'meta': ('1.3.6.1.4.1.2021.10.1.3.8', int) }, 'post_op': '_fake_post_op', }, 'test_prefix': { 'matching_type': snmp.PREFIX, 'metric_oid': ('1.3.6.1.4.1.2021.9.1.8', int), 'metadata': { 'meta': ('1.3.6.1.4.1.2021.9.1.3', int) }, 'post_op': None, }, } def setUp(self): super(TestSNMPInspector, self).setUp() self.inspector = snmp.SNMPInspector() self.host = netutils.urlsplit("snmp://localhost") self.useFixture(mockpatch.PatchObject( self.inspector._cmdGen, 'getCmd', new=faux_getCmd_new)) self.useFixture(mockpatch.PatchObject( self.inspector._cmdGen, 'bulkCmd', new=faux_bulkCmd_new)) def test_snmp_error(self): def get_list(func, *args, **kwargs): return list(func(*args, **kwargs)) def faux_parse(ret, is_bulk): return (True, 'forced error') self.useFixture(mockpatch.PatchObject( snmp, 'parse_snmp_return', new=faux_parse)) self.assertRaises(snmp.SNMPException, get_list, self.inspector.inspect_generic, host=self.host, cache={}, extra_metadata={}, param=self.mapping['test_exact']) @staticmethod def _fake_post_op(host, cache, meter_def, value, metadata, extra, suffix): metadata.update(post_op_meta=4) extra.update(project_id=2) return value def test_inspect_generic_exact(self): self.inspector._fake_post_op = self._fake_post_op cache = {} ret = list(self.inspector.inspect_generic(self.host, cache, {}, self.mapping['test_exact'])) keys = cache[ins._CACHE_KEY_OID].keys() self.assertIn('1.3.6.1.4.1.2021.10.1.3.1', keys) self.assertIn('1.3.6.1.4.1.2021.10.1.3.8', keys) self.assertEqual(1, len(ret)) self.assertEqual(1, ret[0][0]) self.assertEqual(8, ret[0][1]['meta']) self.assertEqual(4, ret[0][1]['post_op_meta']) self.assertEqual(2, ret[0][2]['project_id']) def test_inspect_generic_prefix(self): cache = {} ret = list(self.inspector.inspect_generic(self.host, cache, {}, self.mapping['test_prefix'])) keys = cache[ins._CACHE_KEY_OID].keys() self.assertIn('1.3.6.1.4.1.2021.9.1.8' + '.1', keys) self.assertIn('1.3.6.1.4.1.2021.9.1.8' + '.2', keys) self.assertIn('1.3.6.1.4.1.2021.9.1.3' + '.1', keys) self.assertIn('1.3.6.1.4.1.2021.9.1.3' + '.2', keys) self.assertEqual(2, len(ret)) self.assertIn(ret[0][0], (1, 2)) self.assertEqual(ret[0][0], ret[0][1]['meta']) def test_post_op_net(self): self.useFixture(mockpatch.PatchObject( self.inspector._cmdGen, 'bulkCmd', new=faux_bulkCmd_new)) cache = {} metadata = dict(name='lo', speed=0, mac='ba21e43302fe') extra = {} ret = self.inspector._post_op_net(self.host, cache, None, value=8, metadata=metadata, extra=extra, suffix=".2") self.assertEqual(8, ret) self.assertIn('ip', metadata) self.assertIn("2", metadata['ip']) self.assertIn('resource_id', extra) self.assertEqual("localhost.lo", extra['resource_id']) def test_post_op_disk(self): cache = {} metadata = dict(device='/dev/sda1', path='/') extra = {} ret = self.inspector._post_op_disk(self.host, cache, None, value=8, metadata=metadata, extra=extra, suffix=None) self.assertEqual(8, ret) self.assertIn('resource_id', extra) self.assertEqual("localhost./dev/sda1", extra['resource_id']) def test_prepare_params(self): param = {'post_op': '_post_op_disk', 'oid': '1.3.6.1.4.1.2021.9.1.6', 'type': 'int', 'matching_type': 'type_prefix', 'metadata': { 'device': {'oid': '1.3.6.1.4.1.2021.9.1.3', 'type': 'str'}, 'path': {'oid': '1.3.6.1.4.1.2021.9.1.2', 'type': "lambda x: str(x)"}}} processed = self.inspector.prepare_params(param) self.assertEqual('_post_op_disk', processed['post_op']) self.assertEqual('1.3.6.1.4.1.2021.9.1.6', processed['metric_oid'][0]) self.assertEqual(int, processed['metric_oid'][1]) self.assertEqual(snmp.PREFIX, processed['matching_type']) self.assertEqual(2, len(processed['metadata'].keys())) self.assertEqual('1.3.6.1.4.1.2021.9.1.2', processed['metadata']['path'][0]) self.assertEqual("4", processed['metadata']['path'][1](4))
38.898936
79
0.52646
d9f10d914519f9f61193dbc467831bbceaef2882
2,314
py
Python
userbot/modules/quotly.py
RoyalBoy69/OUB
d450fd55bbf2e292bc1a70e669217b79106edc7a
[ "Naumen", "Condor-1.1", "MS-PL" ]
39
2020-05-11T11:51:34.000Z
2022-03-04T19:23:40.000Z
userbot/modules/quotly.py
RoyalBoy69/OUB
d450fd55bbf2e292bc1a70e669217b79106edc7a
[ "Naumen", "Condor-1.1", "MS-PL" ]
41
2020-04-29T16:56:53.000Z
2021-05-25T20:46:44.000Z
userbot/modules/quotly.py
RoyalBoy69/OUB
d450fd55bbf2e292bc1a70e669217b79106edc7a
[ "Naumen", "Condor-1.1", "MS-PL" ]
615
2020-04-27T14:50:53.000Z
2022-03-12T19:54:32.000Z
# Copyright (C) 2020 The Raphielscape Company LLC. # # Licensed under the Raphielscape Public License, Version 1.d (the "License"); # you may not use this file except in compliance with the License. # # Port From UniBorg to UserBot by MoveAngel import telethon from asyncio.exceptions import TimeoutError from telethon import events from telethon.errors.rpcerrorlist import YouBlockedUserError from userbot import bot, CMD_HELP from userbot.events import register @register(outgoing=True, pattern=r"^\.q") async def quotess(qotli): if qotli.fwd_from: return if not qotli.reply_to_msg_id: return await qotli.edit("```Reply to any user message.```") reply_message = await qotli.get_reply_message() if not reply_message.text: return await qotli.edit("```Reply to text message```") chat = "@QuotLyBot" if reply_message.sender.bot: return await qotli.edit("```Reply to actual users message.```") await qotli.edit("```Making a Quote```") try: async with bot.conversation(chat) as conv: try: response = conv.wait_event( events.NewMessage( incoming=True, from_users=1031952739)) msg = await bot.forward_messages(chat, reply_message) response = await response """ - don't spam notif - """ await bot.send_read_acknowledge(conv.chat_id) except YouBlockedUserError: return await qotli.reply("```Please unblock @QuotLyBot and try again```") if response.text.startswith("Hi!"): await qotli.edit("```Can you kindly disable your forward privacy settings for good?```") else: await qotli.delete() await bot.forward_messages(qotli.chat_id, response.message) await bot.send_read_acknowledge(qotli.chat_id) """ - cleanup chat after completed - """ await qotli.client.delete_messages(conv.chat_id, [msg.id, response.id]) except TimeoutError: await qotli.edit() CMD_HELP.update({ "quotly": "`.q`\ \nUsage: Enhance ur text to sticker.\ \n\n`.pch`\ \nUsage: Better than quotly." })
37.322581
104
0.61452
346cd1ccc82bfa4d9e43fd951ce1dfcddc991853
1,409
py
Python
apartment_price_server/util.py
kushal-from-dolpo/project_apartment_price
33c911915c2194327936d4db7c6da916ef5aa943
[ "MIT" ]
null
null
null
apartment_price_server/util.py
kushal-from-dolpo/project_apartment_price
33c911915c2194327936d4db7c6da916ef5aa943
[ "MIT" ]
null
null
null
apartment_price_server/util.py
kushal-from-dolpo/project_apartment_price
33c911915c2194327936d4db7c6da916ef5aa943
[ "MIT" ]
null
null
null
import pickle import json import numpy as np __locations = None __data_columns = None __model = None def get_estimated_price(location,sqft,bhk,bath): try: loc_index = __data_columns.index(location.lower()) except: loc_index = -1 x = np.zeros(len(__data_columns)) x[0] = sqft x[1] = bath x[2] = bhk if loc_index>=0: x[loc_index] = 1 return round(__model.predict([x])[0],2) def load_saved_artifacts(): print("loading saved artifacts...start") global __data_columns global __locations with open("./artifacts/columns.json", "r") as f: __data_columns = json.load(f)['data_columns'] __locations = __data_columns[3:] # first 3 columns are sqft, bath, bhk global __model if __model is None: with open('./artifacts/bengalaru_home_price_modle_pickle', 'rb') as f: __model = pickle.load(f) print("loading saved artifacts...done") def get_location_names(): return __locations def get_data_columns(): return __data_columns if __name__ == '__main__': load_saved_artifacts() print(get_location_names()) print(get_estimated_price('1st Phase JP Nagar',1000, 3, 3)) print(get_estimated_price('1st Phase JP Nagar', 1000, 2, 2)) print(get_estimated_price('Kalhalli', 1000, 2, 2)) # other location print(get_estimated_price('Ejipura', 1000, 2, 2)) # other location
27.096154
79
0.671398
fd800cf67d5f605fa067bebafd9a6f46bb87026f
40,626
bzl
Python
tensorflow/workspace.bzl
cmpt376Kor/tensorflow
0f87d26ed7c8ab3427f75eacfeba797262ab7483
[ "Apache-2.0" ]
1
2018-11-18T12:53:23.000Z
2018-11-18T12:53:23.000Z
tensorflow/workspace.bzl
Quin1an/tensorflow
3b4b45dd4436d007dc954d1a6c8061f2d276a6d4
[ "Apache-2.0" ]
null
null
null
tensorflow/workspace.bzl
Quin1an/tensorflow
3b4b45dd4436d007dc954d1a6c8061f2d276a6d4
[ "Apache-2.0" ]
1
2018-11-30T01:35:01.000Z
2018-11-30T01:35:01.000Z
# TensorFlow external dependencies that can be loaded in WORKSPACE files. load("//third_party/gpus:cuda_configure.bzl", "cuda_configure") load("//third_party/gpus:rocm_configure.bzl", "rocm_configure") load("//third_party/tensorrt:tensorrt_configure.bzl", "tensorrt_configure") load("//third_party:nccl/nccl_configure.bzl", "nccl_configure") load("//third_party/mkl:build_defs.bzl", "mkl_repository") load("//third_party/git:git_configure.bzl", "git_configure") load("//third_party/py:python_configure.bzl", "python_configure") load("//third_party/sycl:sycl_configure.bzl", "sycl_configure") load("//third_party/systemlibs:syslibs_configure.bzl", "syslibs_configure") load("//third_party/toolchains/clang6:repo.bzl", "clang6_configure") load("//third_party/toolchains/cpus/arm:arm_compiler_configure.bzl", "arm_compiler_configure") load("//third_party:repo.bzl", "tf_http_archive") load("//third_party/clang_toolchain:cc_configure_clang.bzl", "cc_download_clang_toolchain") load("@io_bazel_rules_closure//closure/private:java_import_external.bzl", "java_import_external") load("@io_bazel_rules_closure//closure:defs.bzl", "filegroup_external") load( "//tensorflow/tools/def_file_filter:def_file_filter_configure.bzl", "def_file_filter_configure", ) load("//third_party/aws:workspace.bzl", aws = "repo") load("//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo") load("//third_party/highwayhash:workspace.bzl", highwayhash = "repo") load("//third_party/icu:workspace.bzl", icu = "repo") load("//third_party/jpeg:workspace.bzl", jpeg = "repo") load("//third_party/nasm:workspace.bzl", nasm = "repo") load("//third_party/kissfft:workspace.bzl", kissfft = "repo") load("//third_party/keras_applications_archive:workspace.bzl", keras_applications = "repo") def initialize_third_party(): """ Load third party repositories. See above load() statements. """ aws() flatbuffers() highwayhash() icu() keras_applications() kissfft() jpeg() nasm() # Sanitize a dependency so that it works correctly from code that includes # TensorFlow as a submodule. def clean_dep(dep): return str(Label(dep)) # If TensorFlow is linked as a submodule. # path_prefix is no longer used. # tf_repo_name is thought to be under consideration. def tf_workspace(path_prefix = "", tf_repo_name = ""): # Note that we check the minimum bazel version in WORKSPACE. clang6_configure(name = "local_config_clang6") cc_download_clang_toolchain(name = "local_config_download_clang") cuda_configure(name = "local_config_cuda") tensorrt_configure(name = "local_config_tensorrt") nccl_configure(name = "local_config_nccl") git_configure(name = "local_config_git") sycl_configure(name = "local_config_sycl") syslibs_configure(name = "local_config_syslibs") python_configure(name = "local_config_python") rocm_configure(name = "local_config_rocm") initialize_third_party() # For windows bazel build # TODO: Remove def file filter when TensorFlow can export symbols properly on Windows. def_file_filter_configure(name = "local_config_def_file_filter") # Point //external/local_config_arm_compiler to //external/arm_compiler arm_compiler_configure( name = "local_config_arm_compiler", build_file = clean_dep("//third_party/toolchains/cpus/arm:BUILD"), remote_config_repo = "../arm_compiler", ) mkl_repository( name = "mkl_linux", build_file = clean_dep("//third_party/mkl:mkl.BUILD"), sha256 = "e2233534a9d15c387e22260997af4312a39e9f86f791768409be273b5453c4e6", strip_prefix = "mklml_lnx_2019.0.20180710", urls = [ "https://mirror.bazel.build/github.com/intel/mkl-dnn/releases/download/v0.16/mklml_lnx_2019.0.20180710.tgz", "https://github.com/intel/mkl-dnn/releases/download/v0.16/mklml_lnx_2019.0.20180710.tgz", ], ) mkl_repository( name = "mkl_windows", build_file = clean_dep("//third_party/mkl:mkl.BUILD"), sha256 = "3fdcff17b018a0082491adf3ba143358265336a801646e46e0191ec8d58d24a2", strip_prefix = "mklml_win_2019.0.20180710", urls = [ "https://mirror.bazel.build/github.com/intel/mkl-dnn/releases/download/v0.16/mklml_win_2019.0.20180710.zip", "https://github.com/intel/mkl-dnn/releases/download/v0.16/mklml_win_2019.0.20180710.zip", ], ) mkl_repository( name = "mkl_darwin", build_file = clean_dep("//third_party/mkl:mkl.BUILD"), sha256 = "411a30014a938eb83fb9f37b3dbe8e371b106fc1dd621fc23123cadc72737ce6", strip_prefix = "mklml_mac_2019.0.20180710", urls = [ "https://mirror.bazel.build/github.com/intel/mkl-dnn/releases/download/v0.16/mklml_mac_2019.0.20180710.tgz", "https://github.com/intel/mkl-dnn/releases/download/v0.16/mklml_mac_2019.0.20180710.tgz", ], ) if path_prefix: print("path_prefix was specified to tf_workspace but is no longer used " + "and will be removed in the future.") tf_http_archive( name = "mkl_dnn", build_file = clean_dep("//third_party/mkl_dnn:mkldnn.BUILD"), sha256 = "b100f57af4a2b59a3a37a1ba38f77b644d2107d758a1a7f4e51310063cd21e73", strip_prefix = "mkl-dnn-733fc908874c71a5285043931a1cf80aa923165c", urls = [ "https://mirror.bazel.build/github.com/intel/mkl-dnn/archive/733fc908874c71a5285043931a1cf80aa923165c.tar.gz", "https://github.com/intel/mkl-dnn/archive/733fc908874c71a5285043931a1cf80aa923165c.tar.gz", ], ) tf_http_archive( name = "com_google_absl", build_file = clean_dep("//third_party:com_google_absl.BUILD"), sha256 = "28a6cb644dcebe7d3e0ee347706fec2e6975fae2bceb0add834c77140c7b6632", strip_prefix = "abseil-cpp-f6ae816808cd913e0e2b3e2af14f328fa1071af0", urls = [ "https://mirror.bazel.build/github.com/abseil/abseil-cpp/archive/f6ae816808cd913e0e2b3e2af14f328fa1071af0.tar.gz", "https://github.com/abseil/abseil-cpp/archive/f6ae816808cd913e0e2b3e2af14f328fa1071af0.tar.gz", ], ) tf_http_archive( name = "eigen_archive", build_file = clean_dep("//third_party:eigen.BUILD"), sha256 = "1e045bef75e9b17d459b60cc30b34408f3fdab300c5053d3919d1a5921f3c86a", strip_prefix = "eigen-eigen-af2071407280", urls = [ "https://mirror.bazel.build/bitbucket.org/eigen/eigen/get/af2071407280.tar.gz", "https://bitbucket.org/eigen/eigen/get/af2071407280.tar.gz", ], ) tf_http_archive( name = "arm_compiler", build_file = clean_dep("//:arm_compiler.BUILD"), sha256 = "970285762565c7890c6c087d262b0a18286e7d0384f13a37786d8521773bc969", strip_prefix = "tools-0e906ebc527eab1cdbf7adabff5b474da9562e9f/arm-bcm2708/arm-rpi-4.9.3-linux-gnueabihf", urls = [ "https://mirror.bazel.build/github.com/raspberrypi/tools/archive/0e906ebc527eab1cdbf7adabff5b474da9562e9f.tar.gz", # Please uncomment me, when the next upgrade happens. Then # remove the whitelist entry in third_party/repo.bzl. # "https://github.com/raspberrypi/tools/archive/0e906ebc527eab1cdbf7adabff5b474da9562e9f.tar.gz", ], ) tf_http_archive( name = "libxsmm_archive", build_file = clean_dep("//third_party:libxsmm.BUILD"), sha256 = "cd8532021352b4a0290d209f7f9bfd7c2411e08286a893af3577a43457287bfa", strip_prefix = "libxsmm-1.9", urls = [ "https://mirror.bazel.build/github.com/hfp/libxsmm/archive/1.9.tar.gz", "https://github.com/hfp/libxsmm/archive/1.9.tar.gz", ], ) tf_http_archive( name = "com_googlesource_code_re2", sha256 = "803c7811146edeef8f91064de37c6f19136ff01a2a8cdb3230e940b2fd9f07fe", strip_prefix = "re2-2018-07-01", system_build_file = clean_dep("//third_party/systemlibs:re2.BUILD"), urls = [ "https://mirror.bazel.build/github.com/google/re2/archive/2018-07-01.tar.gz", "https://github.com/google/re2/archive/2018-07-01.tar.gz", ], ) tf_http_archive( name = "com_github_googlecloudplatform_google_cloud_cpp", sha256 = "fdd3b3aecce60987e5525e55bf3a21d68a8695320bd5b980775af6507eec3944", strip_prefix = "google-cloud-cpp-14760a86c4ffab9943b476305c4fe927ad95db1c", system_build_file = clean_dep("//third_party/systemlibs:google_cloud_cpp.BUILD"), system_link_files = { "//third_party/systemlibs:google_cloud_cpp.google.cloud.bigtable.BUILD": "google/cloud/bigtable/BUILD", }, urls = [ "https://mirror.bazel.build/github.com/GoogleCloudPlatform/google-cloud-cpp/archive/14760a86c4ffab9943b476305c4fe927ad95db1c.tar.gz", "https://github.com/GoogleCloudPlatform/google-cloud-cpp/archive/14760a86c4ffab9943b476305c4fe927ad95db1c.tar.gz", ], ) tf_http_archive( name = "com_github_googleapis_googleapis", build_file = clean_dep("//third_party:googleapis.BUILD"), sha256 = "824870d87a176f26bcef663e92051f532fac756d1a06b404055dc078425f4378", strip_prefix = "googleapis-f81082ea1e2f85c43649bee26e0d9871d4b41cdb", system_build_file = clean_dep("//third_party/systemlibs:googleapis.BUILD"), urls = [ "https://mirror.bazel.build/github.com/googleapis/googleapis/archive/f81082ea1e2f85c43649bee26e0d9871d4b41cdb.zip", "https://github.com/googleapis/googleapis/archive/f81082ea1e2f85c43649bee26e0d9871d4b41cdb.zip", ], ) tf_http_archive( name = "gemmlowp", sha256 = "b87faa7294dfcc5d678f22a59d2c01ca94ea1e2a3b488c38a95a67889ed0a658", strip_prefix = "gemmlowp-38ebac7b059e84692f53e5938f97a9943c120d98", urls = [ "https://mirror.bazel.build/github.com/google/gemmlowp/archive/38ebac7b059e84692f53e5938f97a9943c120d98.zip", "https://github.com/google/gemmlowp/archive/38ebac7b059e84692f53e5938f97a9943c120d98.zip", ], ) tf_http_archive( name = "farmhash_archive", build_file = clean_dep("//third_party:farmhash.BUILD"), sha256 = "6560547c63e4af82b0f202cb710ceabb3f21347a4b996db565a411da5b17aba0", strip_prefix = "farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45", urls = [ "https://mirror.bazel.build/github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz", "https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz", ], ) tf_http_archive( name = "png_archive", build_file = clean_dep("//third_party:png.BUILD"), patch_file = clean_dep("//third_party:png_fix_rpi.patch"), sha256 = "e45ce5f68b1d80e2cb9a2b601605b374bdf51e1798ef1c2c2bd62131dfcf9eef", strip_prefix = "libpng-1.6.34", system_build_file = clean_dep("//third_party/systemlibs:png.BUILD"), urls = [ "https://mirror.bazel.build/github.com/glennrp/libpng/archive/v1.6.34.tar.gz", "https://github.com/glennrp/libpng/archive/v1.6.34.tar.gz", ], ) tf_http_archive( name = "org_sqlite", build_file = clean_dep("//third_party:sqlite.BUILD"), sha256 = "ad68c1216c3a474cf360c7581a4001e952515b3649342100f2d7ca7c8e313da6", strip_prefix = "sqlite-amalgamation-3240000", system_build_file = clean_dep("//third_party/systemlibs:sqlite.BUILD"), urls = [ "https://mirror.bazel.build/www.sqlite.org/2018/sqlite-amalgamation-3240000.zip", "https://www.sqlite.org/2018/sqlite-amalgamation-3240000.zip", ], ) tf_http_archive( name = "gif_archive", build_file = clean_dep("//third_party:gif.BUILD"), sha256 = "34a7377ba834397db019e8eb122e551a49c98f49df75ec3fcc92b9a794a4f6d1", strip_prefix = "giflib-5.1.4", system_build_file = clean_dep("//third_party/systemlibs:gif.BUILD"), urls = [ "https://mirror.bazel.build/ufpr.dl.sourceforge.net/project/giflib/giflib-5.1.4.tar.gz", "http://pilotfiber.dl.sourceforge.net/project/giflib/giflib-5.1.4.tar.gz", ], ) tf_http_archive( name = "six_archive", build_file = clean_dep("//third_party:six.BUILD"), sha256 = "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a", strip_prefix = "six-1.10.0", system_build_file = clean_dep("//third_party/systemlibs:six.BUILD"), urls = [ "https://mirror.bazel.build/pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz", "https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz", ], ) tf_http_archive( name = "astor_archive", build_file = clean_dep("//third_party:astor.BUILD"), sha256 = "ff6d2e2962d834acb125cc4dcc80c54a8c17c253f4cc9d9c43b5102a560bb75d", strip_prefix = "astor-0.6.2", system_build_file = clean_dep("//third_party/systemlibs:astor.BUILD"), urls = [ "https://mirror.bazel.build/pypi.python.org/packages/d8/be/c4276b3199ec3feee2a88bc64810fbea8f26d961e0a4cd9c68387a9f35de/astor-0.6.2.tar.gz", "https://pypi.python.org/packages/d8/be/c4276b3199ec3feee2a88bc64810fbea8f26d961e0a4cd9c68387a9f35de/astor-0.6.2.tar.gz", ], ) tf_http_archive( name = "gast_archive", build_file = clean_dep("//third_party:gast.BUILD"), sha256 = "7068908321ecd2774f145193c4b34a11305bd104b4551b09273dfd1d6a374930", strip_prefix = "gast-0.2.0", system_build_file = clean_dep("//third_party/systemlibs:gast.BUILD"), urls = [ "https://mirror.bazel.build/pypi.python.org/packages/5c/78/ff794fcae2ce8aa6323e789d1f8b3b7765f601e7702726f430e814822b96/gast-0.2.0.tar.gz", "https://pypi.python.org/packages/5c/78/ff794fcae2ce8aa6323e789d1f8b3b7765f601e7702726f430e814822b96/gast-0.2.0.tar.gz", ], ) tf_http_archive( name = "termcolor_archive", build_file = clean_dep("//third_party:termcolor.BUILD"), sha256 = "1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b", strip_prefix = "termcolor-1.1.0", system_build_file = clean_dep("//third_party/systemlibs:termcolor.BUILD"), urls = [ "https://mirror.bazel.build/pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz", "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz", ], ) tf_http_archive( name = "absl_py", sha256 = "95160f778a62c7a60ddeadc7bf2d83f85a23a27359814aca12cf949e896fa82c", strip_prefix = "abseil-py-pypi-v0.2.2", system_build_file = clean_dep("//third_party/systemlibs:absl_py.BUILD"), system_link_files = { "//third_party/systemlibs:absl_py.absl.flags.BUILD": "absl/flags/BUILD", "//third_party/systemlibs:absl_py.absl.testing.BUILD": "absl/testing/BUILD", }, urls = [ "https://mirror.bazel.build/github.com/abseil/abseil-py/archive/pypi-v0.2.2.tar.gz", "https://github.com/abseil/abseil-py/archive/pypi-v0.2.2.tar.gz", ], ) tf_http_archive( name = "org_python_pypi_backports_weakref", build_file = clean_dep("//third_party:backports_weakref.BUILD"), sha256 = "8813bf712a66b3d8b85dc289e1104ed220f1878cf981e2fe756dfaabe9a82892", strip_prefix = "backports.weakref-1.0rc1/src", urls = [ "https://mirror.bazel.build/pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz", "https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz", ], ) filegroup_external( name = "org_python_license", licenses = ["notice"], # Python 2.0 sha256_urls = { "7ca8f169368827781684f7f20876d17b4415bbc5cb28baa4ca4652f0dda05e9f": [ "https://mirror.bazel.build/docs.python.org/2.7/_sources/license.rst.txt", "https://docs.python.org/2.7/_sources/license.rst.txt", ], }, ) PROTOBUF_URLS = [ "https://mirror.bazel.build/github.com/google/protobuf/archive/v3.6.1.tar.gz", "https://github.com/google/protobuf/archive/v3.6.1.tar.gz", ] PROTOBUF_SHA256 = "3d4e589d81b2006ca603c1ab712c9715a76227293032d05b26fca603f90b3f5b" PROTOBUF_STRIP_PREFIX = "protobuf-3.6.1" tf_http_archive( name = "protobuf_archive", sha256 = PROTOBUF_SHA256, strip_prefix = PROTOBUF_STRIP_PREFIX, urls = PROTOBUF_URLS, ) # We need to import the protobuf library under the names com_google_protobuf # and com_google_protobuf_cc to enable proto_library support in bazel. # Unfortunately there is no way to alias http_archives at the moment. tf_http_archive( name = "com_google_protobuf", sha256 = PROTOBUF_SHA256, strip_prefix = PROTOBUF_STRIP_PREFIX, urls = PROTOBUF_URLS, ) tf_http_archive( name = "com_google_protobuf_cc", sha256 = PROTOBUF_SHA256, strip_prefix = PROTOBUF_STRIP_PREFIX, urls = PROTOBUF_URLS, ) tf_http_archive( name = "nsync", sha256 = "692f9b30e219f71a6371b98edd39cef3cbda35ac3abc4cd99ce19db430a5591a", strip_prefix = "nsync-1.20.1", system_build_file = clean_dep("//third_party/systemlibs:nsync.BUILD"), urls = [ "https://mirror.bazel.build/github.com/google/nsync/archive/1.20.1.tar.gz", "https://github.com/google/nsync/archive/1.20.1.tar.gz", ], ) tf_http_archive( name = "com_google_googletest", sha256 = "353ab86e35cea1cd386115279cf4b16695bbf21b897bfbf2721cf4cb5f64ade8", strip_prefix = "googletest-997d343dd680e541ef96ce71ee54a91daf2577a0", urls = [ "https://mirror.bazel.build/github.com/google/googletest/archive/997d343dd680e541ef96ce71ee54a91daf2577a0.zip", "https://github.com/google/googletest/archive/997d343dd680e541ef96ce71ee54a91daf2577a0.zip", ], ) tf_http_archive( name = "com_github_gflags_gflags", sha256 = "ae27cdbcd6a2f935baa78e4f21f675649271634c092b1be01469440495609d0e", strip_prefix = "gflags-2.2.1", urls = [ "https://mirror.bazel.build/github.com/gflags/gflags/archive/v2.2.1.tar.gz", "https://github.com/gflags/gflags/archive/v2.2.1.tar.gz", ], ) tf_http_archive( name = "pcre", build_file = clean_dep("//third_party:pcre.BUILD"), sha256 = "69acbc2fbdefb955d42a4c606dfde800c2885711d2979e356c0636efde9ec3b5", strip_prefix = "pcre-8.42", system_build_file = clean_dep("//third_party/systemlibs:pcre.BUILD"), urls = [ "https://mirror.bazel.build/ftp.exim.org/pub/pcre/pcre-8.42.tar.gz", "http://ftp.exim.org/pub/pcre/pcre-8.42.tar.gz", ], ) tf_http_archive( name = "swig", build_file = clean_dep("//third_party:swig.BUILD"), sha256 = "58a475dbbd4a4d7075e5fe86d4e54c9edde39847cdb96a3053d87cb64a23a453", strip_prefix = "swig-3.0.8", system_build_file = clean_dep("//third_party/systemlibs:swig.BUILD"), urls = [ "https://mirror.bazel.build/ufpr.dl.sourceforge.net/project/swig/swig/swig-3.0.8/swig-3.0.8.tar.gz", "http://ufpr.dl.sourceforge.net/project/swig/swig/swig-3.0.8/swig-3.0.8.tar.gz", "http://pilotfiber.dl.sourceforge.net/project/swig/swig/swig-3.0.8/swig-3.0.8.tar.gz", ], ) tf_http_archive( name = "curl", build_file = clean_dep("//third_party:curl.BUILD"), sha256 = "e9c37986337743f37fd14fe8737f246e97aec94b39d1b71e8a5973f72a9fc4f5", strip_prefix = "curl-7.60.0", system_build_file = clean_dep("//third_party/systemlibs:curl.BUILD"), urls = [ "https://mirror.bazel.build/curl.haxx.se/download/curl-7.60.0.tar.gz", "https://curl.haxx.se/download/curl-7.60.0.tar.gz", ], ) tf_http_archive( name = "grpc", sha256 = "50db9cf2221354485eb7c3bd55a4c27190caef7048a2a1a15fbe60a498f98b44", strip_prefix = "grpc-1.13.0", system_build_file = clean_dep("//third_party/systemlibs:grpc.BUILD"), urls = [ "https://mirror.bazel.build/github.com/grpc/grpc/archive/v1.13.0.tar.gz", "https://github.com/grpc/grpc/archive/v1.13.0.tar.gz", ], ) tf_http_archive( name = "linenoise", build_file = clean_dep("//third_party:linenoise.BUILD"), sha256 = "7f51f45887a3d31b4ce4fa5965210a5e64637ceac12720cfce7954d6a2e812f7", strip_prefix = "linenoise-c894b9e59f02203dbe4e2be657572cf88c4230c3", urls = [ "https://mirror.bazel.build/github.com/antirez/linenoise/archive/c894b9e59f02203dbe4e2be657572cf88c4230c3.tar.gz", "https://github.com/antirez/linenoise/archive/c894b9e59f02203dbe4e2be657572cf88c4230c3.tar.gz", ], ) # TODO(phawkins): currently, this rule uses an unofficial LLVM mirror. # Switch to an official source of snapshots if/when possible. tf_http_archive( name = "llvm", build_file = clean_dep("//third_party/llvm:llvm.autogenerated.BUILD"), sha256 = "286465fc41ade5c1c44e4a6dce9681106664fcdd12264dc9be63fc22bbee3c9c", strip_prefix = "llvm-0478924a3727c74fd482d07eed45a8347540576e", urls = [ "https://mirror.bazel.build/github.com/llvm-mirror/llvm/archive/0478924a3727c74fd482d07eed45a8347540576e.tar.gz", "https://github.com/llvm-mirror/llvm/archive/0478924a3727c74fd482d07eed45a8347540576e.tar.gz", ], ) tf_http_archive( name = "lmdb", build_file = clean_dep("//third_party:lmdb.BUILD"), sha256 = "f3927859882eb608868c8c31586bb7eb84562a40a6bf5cc3e13b6b564641ea28", strip_prefix = "lmdb-LMDB_0.9.22/libraries/liblmdb", system_build_file = clean_dep("//third_party/systemlibs:lmdb.BUILD"), urls = [ "https://mirror.bazel.build/github.com/LMDB/lmdb/archive/LMDB_0.9.22.tar.gz", "https://github.com/LMDB/lmdb/archive/LMDB_0.9.22.tar.gz", ], ) tf_http_archive( name = "jsoncpp_git", build_file = clean_dep("//third_party:jsoncpp.BUILD"), sha256 = "c49deac9e0933bcb7044f08516861a2d560988540b23de2ac1ad443b219afdb6", strip_prefix = "jsoncpp-1.8.4", system_build_file = clean_dep("//third_party/systemlibs:jsoncpp.BUILD"), urls = [ "https://mirror.bazel.build/github.com/open-source-parsers/jsoncpp/archive/1.8.4.tar.gz", "https://github.com/open-source-parsers/jsoncpp/archive/1.8.4.tar.gz", ], ) tf_http_archive( name = "boringssl", sha256 = "1188e29000013ed6517168600fc35a010d58c5d321846d6a6dfee74e4c788b45", strip_prefix = "boringssl-7f634429a04abc48e2eb041c81c5235816c96514", system_build_file = clean_dep("//third_party/systemlibs:boringssl.BUILD"), urls = [ "https://mirror.bazel.build/github.com/google/boringssl/archive/7f634429a04abc48e2eb041c81c5235816c96514.tar.gz", "https://github.com/google/boringssl/archive/7f634429a04abc48e2eb041c81c5235816c96514.tar.gz", ], ) tf_http_archive( name = "zlib_archive", build_file = clean_dep("//third_party:zlib.BUILD"), sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", strip_prefix = "zlib-1.2.11", system_build_file = clean_dep("//third_party/systemlibs:zlib.BUILD"), urls = [ "https://mirror.bazel.build/zlib.net/zlib-1.2.11.tar.gz", "https://zlib.net/zlib-1.2.11.tar.gz", ], ) tf_http_archive( name = "fft2d", build_file = clean_dep("//third_party/fft2d:fft2d.BUILD"), sha256 = "52bb637c70b971958ec79c9c8752b1df5ff0218a4db4510e60826e0cb79b5296", urls = [ "https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz", "http://www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz", ], ) tf_http_archive( name = "snappy", build_file = clean_dep("//third_party:snappy.BUILD"), sha256 = "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4", strip_prefix = "snappy-1.1.7", system_build_file = clean_dep("//third_party/systemlibs:snappy.BUILD"), urls = [ "https://mirror.bazel.build/github.com/google/snappy/archive/1.1.7.tar.gz", "https://github.com/google/snappy/archive/1.1.7.tar.gz", ], ) tf_http_archive( name = "nccl_archive", build_file = clean_dep("//third_party:nccl/archive.BUILD"), sha256 = "19132b5127fa8e02d95a09795866923f04064c8f1e0770b2b42ab551408882a4", strip_prefix = "nccl-f93fe9bfd94884cec2ba711897222e0df5569a53", urls = [ "https://mirror.bazel.build/github.com/nvidia/nccl/archive/f93fe9bfd94884cec2ba711897222e0df5569a53.tar.gz", "https://github.com/nvidia/nccl/archive/f93fe9bfd94884cec2ba711897222e0df5569a53.tar.gz", ], ) tf_http_archive( name = "kafka", build_file = clean_dep("//third_party:kafka/BUILD"), patch_file = clean_dep("//third_party/kafka:config.patch"), sha256 = "cc6ebbcd0a826eec1b8ce1f625ffe71b53ef3290f8192b6cae38412a958f4fd3", strip_prefix = "librdkafka-0.11.5", urls = [ "https://mirror.bazel.build/github.com/edenhill/librdkafka/archive/v0.11.5.tar.gz", "https://github.com/edenhill/librdkafka/archive/v0.11.5.tar.gz", ], ) java_import_external( name = "junit", jar_sha256 = "59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "http://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "http://maven.ibiblio.org/maven2/junit/junit/4.12/junit-4.12.jar", ], licenses = ["reciprocal"], # Common Public License Version 1.0 testonly_ = True, deps = ["@org_hamcrest_core"], ) java_import_external( name = "org_hamcrest_core", jar_sha256 = "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", "http://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", "http://maven.ibiblio.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", ], licenses = ["notice"], # New BSD License testonly_ = True, ) java_import_external( name = "com_google_testing_compile", jar_sha256 = "edc180fdcd9f740240da1a7a45673f46f59c5578d8cd3fbc912161f74b5aebb8", jar_urls = [ "http://mirror.bazel.build/repo1.maven.org/maven2/com/google/testing/compile/compile-testing/0.11/compile-testing-0.11.jar", "http://repo1.maven.org/maven2/com/google/testing/compile/compile-testing/0.11/compile-testing-0.11.jar", ], licenses = ["notice"], # New BSD License testonly_ = True, deps = ["@com_google_guava", "@com_google_truth"], ) java_import_external( name = "com_google_truth", jar_sha256 = "032eddc69652b0a1f8d458f999b4a9534965c646b8b5de0eba48ee69407051df", jar_urls = [ "http://mirror.bazel.build/repo1.maven.org/maven2/com/google/truth/truth/0.32/truth-0.32.jar", "http://repo1.maven.org/maven2/com/google/truth/truth/0.32/truth-0.32.jar", ], licenses = ["notice"], # Apache 2.0 testonly_ = True, deps = ["@com_google_guava"], ) java_import_external( name = "org_checkerframework_qual", jar_sha256 = "a17501717ef7c8dda4dba73ded50c0d7cde440fd721acfeacbf19786ceac1ed6", jar_urls = [ "http://mirror.bazel.build/repo1.maven.org/maven2/org/checkerframework/checker-qual/2.4.0/checker-qual-2.4.0.jar", "http://repo1.maven.org/maven2/org/checkerframework/checker-qual/2.4.0/checker-qual-2.4.0.jar", ], licenses = ["notice"], # Apache 2.0 ) java_import_external( name = "com_squareup_javapoet", jar_sha256 = "5bb5abdfe4366c15c0da3332c57d484e238bd48260d6f9d6acf2b08fdde1efea", jar_urls = [ "http://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/javapoet/1.9.0/javapoet-1.9.0.jar", "http://repo1.maven.org/maven2/com/squareup/javapoet/1.9.0/javapoet-1.9.0.jar", ], licenses = ["notice"], # Apache 2.0 ) tf_http_archive( name = "com_google_pprof", build_file = clean_dep("//third_party:pprof.BUILD"), sha256 = "e0928ca4aa10ea1e0551e2d7ce4d1d7ea2d84b2abbdef082b0da84268791d0c4", strip_prefix = "pprof-c0fb62ec88c411cc91194465e54db2632845b650", urls = [ "https://mirror.bazel.build/github.com/google/pprof/archive/c0fb62ec88c411cc91194465e54db2632845b650.tar.gz", "https://github.com/google/pprof/archive/c0fb62ec88c411cc91194465e54db2632845b650.tar.gz", ], ) tf_http_archive( name = "cub_archive", build_file = clean_dep("//third_party:cub.BUILD"), sha256 = "6bfa06ab52a650ae7ee6963143a0bbc667d6504822cbd9670369b598f18c58c3", strip_prefix = "cub-1.8.0", urls = [ "https://mirror.bazel.build/github.com/NVlabs/cub/archive/1.8.0.zip", "https://github.com/NVlabs/cub/archive/1.8.0.zip", ], ) tf_http_archive( name = "cython", build_file = clean_dep("//third_party:cython.BUILD"), delete = ["BUILD.bazel"], sha256 = "bccc9aa050ea02595b2440188813b936eaf345e85fb9692790cecfe095cf91aa", strip_prefix = "cython-0.28.4", system_build_file = clean_dep("//third_party/systemlibs:cython.BUILD"), urls = [ "https://mirror.bazel.build/github.com/cython/cython/archive/0.28.4.tar.gz", "https://github.com/cython/cython/archive/0.28.4.tar.gz", ], ) tf_http_archive( name = "bazel_toolchains", sha256 = "07dfbe80638eb1fe681f7c07e61b34b579c6710c691e49ee90ccdc6e9e75ebbb", strip_prefix = "bazel-toolchains-9a111bd82161c1fbe8ed17a593ca1023fd941c70", urls = [ "https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/archive/9a111bd82161c1fbe8ed17a593ca1023fd941c70.tar.gz", "https://github.com/bazelbuild/bazel-toolchains/archive/9a111bd82161c1fbe8ed17a593ca1023fd941c70.tar.gz", ], ) tf_http_archive( name = "arm_neon_2_x86_sse", build_file = clean_dep("//third_party:arm_neon_2_x86_sse.BUILD"), sha256 = "c8d90aa4357f8079d427e87a6f4c493da1fa4140aee926c05902d7ec1533d9a5", strip_prefix = "ARM_NEON_2_x86_SSE-0f77d9d182265259b135dad949230ecbf1a2633d", urls = [ "https://mirror.bazel.build/github.com/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz", "https://github.com/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz", ], ) tf_http_archive( name = "double_conversion", build_file = clean_dep("//third_party:double_conversion.BUILD"), sha256 = "2f7fbffac0d98d201ad0586f686034371a6d152ca67508ab611adc2386ad30de", strip_prefix = "double-conversion-3992066a95b823efc8ccc1baf82a1cfc73f6e9b8", system_build_file = clean_dep("//third_party/systemlibs:double_conversion.BUILD"), urls = [ "https://mirror.bazel.build/github.com/google/double-conversion/archive/3992066a95b823efc8ccc1baf82a1cfc73f6e9b8.zip", "https://github.com/google/double-conversion/archive/3992066a95b823efc8ccc1baf82a1cfc73f6e9b8.zip", ], ) tf_http_archive( name = "tflite_mobilenet", build_file = clean_dep("//third_party:tflite_mobilenet.BUILD"), sha256 = "23f814d1c076bdf03715dfb6cab3713aa4fbdf040fd5448c43196bd2e97a4c1b", urls = [ "https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/models/tflite/mobilenet_v1_224_android_quant_2017_11_08.zip", "https://storage.googleapis.com/download.tensorflow.org/models/tflite/mobilenet_v1_224_android_quant_2017_11_08.zip", ], ) tf_http_archive( name = "tflite_mobilenet_ssd", build_file = str(Label("//third_party:tflite_mobilenet.BUILD")), sha256 = "767057f2837a46d97882734b03428e8dd640b93236052b312b2f0e45613c1cf0", urls = [ "https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/models/tflite/mobilenet_ssd_tflite_v1.zip", "https://storage.googleapis.com/download.tensorflow.org/models/tflite/mobilenet_ssd_tflite_v1.zip", ], ) tf_http_archive( name = "tflite_mobilenet_ssd_quant", build_file = str(Label("//third_party:tflite_mobilenet.BUILD")), sha256 = "a809cd290b4d6a2e8a9d5dad076e0bd695b8091974e0eed1052b480b2f21b6dc", urls = [ "https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/models/tflite/coco_ssd_mobilenet_v1_0.75_quant_2018_06_29.zip", "https://storage.googleapis.com/download.tensorflow.org/models/tflite/coco_ssd_mobilenet_v1_0.75_quant_2018_06_29.zip", ], ) tf_http_archive( name = "tflite_mobilenet_ssd_quant_protobuf", build_file = str(Label("//third_party:tflite_mobilenet.BUILD")), sha256 = "09280972c5777f1aa775ef67cb4ac5d5ed21970acd8535aeca62450ef14f0d79", strip_prefix = "ssd_mobilenet_v1_quantized_300x300_coco14_sync_2018_07_18", urls = [ "https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_quantized_300x300_coco14_sync_2018_07_18.tar.gz", "http://storage.googleapis.com/download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_quantized_300x300_coco14_sync_2018_07_18.tar.gz", ], ) tf_http_archive( name = "tflite_conv_actions_frozen", build_file = str(Label("//third_party:tflite_mobilenet.BUILD")), sha256 = "d947b38cba389b5e2d0bfc3ea6cc49c784e187b41a071387b3742d1acac7691e", urls = [ "https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/models/tflite/conv_actions_tflite.zip", "https://storage.googleapis.com/download.tensorflow.org/models/tflite/conv_actions_tflite.zip", ], ) tf_http_archive( name = "tflite_smartreply", build_file = clean_dep("//third_party:tflite_smartreply.BUILD"), sha256 = "8980151b85a87a9c1a3bb1ed4748119e4a85abd3cb5744d83da4d4bd0fbeef7c", urls = [ "https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/models/tflite/smartreply_1.0_2017_11_01.zip", "https://storage.googleapis.com/download.tensorflow.org/models/tflite/smartreply_1.0_2017_11_01.zip", ], ) tf_http_archive( name = "tflite_ovic_testdata", build_file = clean_dep("//third_party:tflite_ovic_testdata.BUILD"), sha256 = "21288dccc517acee47fa9648d4d3da28bf0fef5381911ed7b4d2ee36366ffa20", strip_prefix = "ovic", urls = [ "https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/data/ovic_2018_10_23.zip", "https://storage.googleapis.com/download.tensorflow.org/data/ovic_2018_10_23.zip", ], ) tf_http_archive( name = "build_bazel_rules_android", sha256 = "cd06d15dd8bb59926e4d65f9003bfc20f9da4b2519985c27e190cddc8b7a7806", strip_prefix = "rules_android-0.1.1", urls = [ "https://mirror.bazel.build/github.com/bazelbuild/rules_android/archive/v0.1.1.zip", "https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip", ], ) tf_http_archive( name = "tbb", build_file = clean_dep("//third_party/ngraph:tbb.BUILD"), sha256 = "724686f90bcda78f13b76f297d964008737ccd6399328143c1c0093e73ae6a13", strip_prefix = "tbb-tbb_2018", urls = [ "https://mirror.bazel.build/github.com/01org/tbb/archive/tbb_2018.zip", "https://github.com/01org/tbb/archive/tbb_2018.zip", ], ) tf_http_archive( name = "ngraph", build_file = clean_dep("//third_party/ngraph:ngraph.BUILD"), sha256 = "2b28f9c9f063b96825a96d56d7f7978c9a1c55c9b25175c20dd49a8a77cb0305", strip_prefix = "ngraph-0.9.1", urls = [ "https://mirror.bazel.build/github.com/NervanaSystems/ngraph/archive/v0.9.1.tar.gz", "https://github.com/NervanaSystems/ngraph/archive/v0.9.1.tar.gz", ], ) tf_http_archive( name = "nlohmann_json_lib", build_file = clean_dep("//third_party/ngraph:nlohmann_json.BUILD"), sha256 = "9f3549824af3ca7e9707a2503959886362801fb4926b869789d6929098a79e47", strip_prefix = "json-3.1.1", urls = [ "https://mirror.bazel.build/github.com/nlohmann/json/archive/v3.1.1.tar.gz", "https://github.com/nlohmann/json/archive/v3.1.1.tar.gz", ], ) tf_http_archive( name = "ngraph_tf", build_file = clean_dep("//third_party/ngraph:ngraph_tf.BUILD"), sha256 = "89accbc702e68a09775f1011a99dd16561038fd1ce59d566d64450176abaae5c", strip_prefix = "ngraph-tf-0.7.0", urls = [ "https://mirror.bazel.build/github.com/NervanaSystems/ngraph-tf/archive/v0.7.0.tar.gz", "https://github.com/NervanaSystems/ngraph-tf/archive/v0.7.0.tar.gz", ], ) ############################################################################## # BIND DEFINITIONS # # Please do not add bind() definitions unless we have no other choice. # If that ends up being the case, please leave a comment explaining # why we can't depend on the canonical build target. # gRPC wants a cares dependency but its contents is not actually # important since we have set GRPC_ARES=0 in .bazelrc native.bind( name = "cares", actual = "@grpc//third_party/nanopb:nanopb", ) # Needed by Protobuf native.bind( name = "grpc_cpp_plugin", actual = "@grpc//:grpc_cpp_plugin", ) native.bind( name = "grpc_python_plugin", actual = "@grpc//:grpc_python_plugin", ) native.bind( name = "grpc_lib", actual = "@grpc//:grpc++", ) native.bind( name = "grpc_lib_unsecure", actual = "@grpc//:grpc++_unsecure", ) # Needed by gRPC native.bind( name = "libssl", actual = "@boringssl//:ssl", ) # Needed by gRPC native.bind( name = "nanopb", actual = "@grpc//third_party/nanopb:nanopb", ) # Needed by gRPC native.bind( name = "protobuf", actual = "@protobuf_archive//:protobuf", ) # gRPC expects //external:protobuf_clib and //external:protobuf_compiler # to point to Protobuf's compiler library. native.bind( name = "protobuf_clib", actual = "@protobuf_archive//:protoc_lib", ) # Needed by gRPC native.bind( name = "protobuf_headers", actual = "@protobuf_archive//:protobuf_headers", ) # Needed by Protobuf native.bind( name = "python_headers", actual = clean_dep("//third_party/python_runtime:headers"), ) # Needed by Protobuf native.bind( name = "six", actual = "@six_archive//:six", ) # Needed by gRPC native.bind( name = "zlib", actual = "@zlib_archive//:zlib", )
44.158696
177
0.671639
4cd9ffa64893fc5ec564054f7d553fc0a6c6b6de
2,967
py
Python
rdr_service/lib_fhir/fhirclient_1_0_6/models/schedule.py
all-of-us/raw-data-repository
d28ad957557587b03ff9c63d55dd55e0508f91d8
[ "BSD-3-Clause" ]
39
2017-10-13T19:16:27.000Z
2021-09-24T16:58:21.000Z
fhirclient/models/schedule.py
NematiLab/Streaming-Sepsis-Prediction-System-for-Intensive-Care-Units
fb5ad260fb8d264d85aea9e6c895d1700eea4d11
[ "Apache-2.0" ]
312
2017-09-08T15:42:13.000Z
2022-03-23T18:21:40.000Z
rdr_service/lib_fhir/fhirclient_1_0_6/models/schedule.py
all-of-us/raw-data-repository
d28ad957557587b03ff9c63d55dd55e0508f91d8
[ "BSD-3-Clause" ]
19
2017-09-15T13:58:00.000Z
2022-02-07T18:33:20.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Generated from FHIR 1.0.2.7202 (http://hl7.org/fhir/StructureDefinition/Schedule) on 2016-06-23. # 2016, SMART Health IT. from . import domainresource class Schedule(domainresource.DomainResource): """ A container for slot(s) of time that may be available for booking appointments. """ resource_name = "Schedule" def __init__(self, jsondict=None, strict=True): """ Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError """ self.actor = None """ The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson. Type `FHIRReference` referencing `Patient, Practitioner, RelatedPerson, Device, HealthcareService, Location` (represented as `dict` in JSON). """ self.comment = None """ Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated. Type `str`. """ self.identifier = None """ External Ids for this item. List of `Identifier` items (represented as `dict` in JSON). """ self.planningHorizon = None """ The period of time that the slots that are attached to this Schedule resource cover (even if none exist). These cover the amount of time that an organization's planning horizon; the interval for which they are currently accepting appointments. This does not define a "template" for planning outside these dates. Type `Period` (represented as `dict` in JSON). """ self.type = None """ The schedule type can be used for the categorization of healthcare services or other appointment types. List of `CodeableConcept` items (represented as `dict` in JSON). """ super(Schedule, self).__init__(jsondict=jsondict, strict=strict) def elementProperties(self): js = super(Schedule, self).elementProperties() js.extend([ ("actor", "actor", fhirreference.FHIRReference, False, None, True), ("comment", "comment", str, False, None, False), ("identifier", "identifier", identifier.Identifier, True, None, False), ("planningHorizon", "planningHorizon", period.Period, False, None, False), ("type", "type", codeableconcept.CodeableConcept, True, None, False), ]) return js from . import codeableconcept from . import fhirreference from . import identifier from . import period
41.208333
153
0.650826
e4fcd518a3262a9282cf4f308ccf0a10b286a732
9,927
py
Python
RoboticProcessAutomation/img_recog.py
bartmazur90/RoboticProcessAutomation
cc547223562a280a9bc780e919aff14a5524c949
[ "MIT" ]
null
null
null
RoboticProcessAutomation/img_recog.py
bartmazur90/RoboticProcessAutomation
cc547223562a280a9bc780e919aff14a5524c949
[ "MIT" ]
null
null
null
RoboticProcessAutomation/img_recog.py
bartmazur90/RoboticProcessAutomation
cc547223562a280a9bc780e919aff14a5524c949
[ "MIT" ]
null
null
null
from pyautogui import locateOnScreen, click, doubleClick, rightClick import time from pyscreeze import Box def click_img( target: str, target_region: tuple = (), target_confidence: float = 0.98, target_retries: int = 1, click_type: str = 'single', off_x: int = 0, off_y: int = 0, mode: str = "E", wait_before_retry: int = 1, click_before_retry: tuple = () ): ''' This function is to click provided img (visible on the screen). Args: target: (str) - path to the img. target_region: (tuple) - img region rectangle in [pixels] (x,y,width,height) target_confidence: (float) - treshold for img recognition, 0.01 - ultra low 0.99 - high target_retries: (int) - how many retries would be performed for click img click_type: (str) - supported ones: single,double,right off_x: (int) - x axis offset applied to click action, from center of the target img off_y: (int) - y axis offset applied to click action, from center of the target img mode: (str) - F -> return False E -> return Exception /default/ wait_before_retry: (int) - how long it would wait until each retry. click_before_retry: (tuple) - (x,y) to focus before retry Returns: (bool/Exception) - depends on Arg:mode ''' t_reg = None if target_region == () else target_region for _ in range(target_retries): img_rect = locateOnScreen( target, region = t_reg, confidence = target_confidence, grayscale=True ) if img_rect!= None: mouse_click(img_rect,click_type,off_x,off_y) return True if wait_before_retry >= 1: time.sleep(wait_before_retry) if click_before_retry != (): click(click_before_retry) else: print(f'{target} FAIL') if mode == "F": return False raise Exception def wait_img_appear(target: str, target_region: tuple = None, target_confidence: float = 0.98, timeout: float = 120, mode: str = "E" ): """ This function is to wait until provided img will appear on the screen. Args: target: (str) - path to the img. target_region: (tuple) - img region rectangle in [pixels] (x,y,width,height) target_confidence: (float) - treshold for img recognition, 0.01 - ultra low 0.99 - high timeout: (int) - how long it would wait for apperance [seconds] mode: (str) - F -> return False E -> return Exception /default/ Returns: (bool/Exception) - depends on arg:mode """ t_reg = None if target_region == () else target_region for _ in range(timeout): if locateOnScreen( target, region = t_reg, confidence = target_confidence, grayscale = True ) != None: return True time.sleep(0.9) else: if mode=="F": return False raise Exception def wait_img_disappear(target: str, target_region: tuple = None, target_confidence: float = 0.98, timeout: float = 120, mode: str = "E" ): """ This function is to wait until provided img will disappear from the screen. Args: target: (str) - path to the img. target_region: (tuple) - img region rectangle in [pixels] (x,y,width,height) target_confidence: (float) - treshold for img recognition, 0.01 - ultra low 0.99 - high timeout: (int) - how long it would wait for disapperance [seconds] mode: (str) - F -> return False E -> return Exception /default/ Returns: (bool/Exception) - depends on arg:mode """ t_reg = None if target_region == () else target_region for _ in range(timeout): if locateOnScreen( target, region = t_reg, confidence = target_confidence, grayscale = True ) == None: return True time.sleep(0.9) else: if mode=="F": return False raise Exception def click_img_2(target: str = None, target_region: tuple = None, target_confidence: float = 0.98, target_retries: int = 3, click_type: str = 'single', off_x: int = 0, off_y: int = 0, mode: str = "E", wait_before_retry: int = 1, click_before_retry: tuple = (), check: str = None, check_mode: str = "A", check_region: tuple = None, check_wait_before: float= 0.5, check_retries: int = 1, check_timeout: int = 3, check_confidence: float = 0.98, ): ''' This function is to click provided img (visible on the screen). And additionaly check for apperance/disapperane of another img. Args: target: (str) - path to the img. target_region: (tuple) - img region rectangle in [pixels] (x,y,width,height) target_confidence: (float) - treshold for img recognition, 0.01 - ultra low 0.99 - high target_retries: (int) - how many retries would be performed for click img click_type: (str) - supported ones: single,double,right off_x: (int) - x axis offset applied to click action, from center of the target img off_y: (int) - y axis offset applied to click action, from center of the target img mode: (str) - F -> return False E -> return Exception /default/ wait_before_retry: (int) - how long it would wait until each retry. click_before_retry: (tuple) - (x,y) to focus before retry check: (str) - path to img it would test for (dis)apperance check_mode: (str) - A - apperance D - disapperance check_region: (tuple) - img region rectangle in [pixels] (x,y,width,height) check_wait_before: (float) - how long it would wait until test check_retries: (int) - how many test retries would be performed each loop run check_timeout: (int) - to be passed into wait_img_a/d func check_confidence: (float) - to be passed into wait_img_a/d func Returns: (bool/Exception) - depends on Arg:mode ''' t_reg = None if target_region == () else target_region c_reg = None if check_region == () else check_region for _ in range(target_retries): t_rect = locateOnScreen(target, region = t_reg, confidence = target_confidence, grayscale=True ) if t_rect!= None: mouse_click(t_rect,click_type,off_x,off_y) if check != None: for _ in range(check_retries): time.sleep(check_wait_before) if check_mode == "A": if wait_img_appear( target = check, target_region= c_reg, target_confidence = check_confidence, timeout = check_timeout, mode = "F" ): return True if check_mode == "D": if wait_img_disappear( target = check, target_region= c_reg, target_confidence = check_confidence, timeout = check_timeout, mode = "F" ): return True else: return True if wait_before_retry >= 1: time.sleep(wait_before_retry) if click_before_retry != (): click(click_before_retry) else: print(f'{target} FAIL') if mode == "F": return False raise Exception ##################################### # 1. Img Recognition HELPER FUNCTIONS ##################################### def rect_center(target: Box) -> tuple: ''' This function is to calculate center of an rectangle Args: target: (Box) - pyscreeze.Box Returns: (tuple) - s/e ''' x = target.left + (target.width / 2) y = target.top + (target.height / 2) return (x, y) def mouse_click(img_rect: Box, click_type: str, off_x: int, off_y: int): ''' This function is to click on provided coordinates Args: img_rect: (Box) - pyscreeze.Box click_type: (str) - s/e off_x: (int) - s/e off_y: (int) - s/e ''' x, y = rect_center(img_rect) if click_type == 'single': click(x + off_x, y + off_y) elif click_type == 'double': doubleClick(x + off_x, y + off_y) elif click_type == 'right': rightClick(x + off_x, y + off_y) else: raise ValueError('clickType not supported')
39.86747
95
0.503173
9d3efc8e7e3e89be17c91ecf1c0a98a6f691fe68
1,927
py
Python
postman/management/commands/postman_cleanup.py
chhell/django-postman
dbd48a7a94f4abd4748c174cf052b8da6f66800b
[ "BSD-3-Clause" ]
null
null
null
postman/management/commands/postman_cleanup.py
chhell/django-postman
dbd48a7a94f4abd4748c174cf052b8da6f66800b
[ "BSD-3-Clause" ]
null
null
null
postman/management/commands/postman_cleanup.py
chhell/django-postman
dbd48a7a94f4abd4748c174cf052b8da6f66800b
[ "BSD-3-Clause" ]
null
null
null
from __future__ import unicode_literals from datetime import timedelta from django.core.management.base import BaseCommand from django.db.models import Max, Count, F, Q from django.utils.timezone import now from postman.models import Message ARGUMENT_ARGS = ('-d', '--days') ARGUMENT_KWARGS = {'default': 30 } class Command(BaseCommand): help = """Can be run as a cron job or directly to clean out old data from the database: Messages or conversations marked as deleted by both sender and recipient, more than a minimal number of days ago.""" def add_arguments(self, parser): parser.add_argument(*ARGUMENT_ARGS, type=int, help='The minimal number of days a message is kept marked as deleted, ' 'before to be considered for real deletion [default: %(default)s]', **ARGUMENT_KWARGS) # no more NoArgsCommand and handle_noargs with Dj >= 1.8 def handle(self, *args, **options): verbose = int(options.get('verbosity')) days = options.get('days') date = now() - timedelta(days=days) if verbose >= 1: self.stdout.write("Erase messages and conversations marked as deleted before {0}".format(date)) # for a conversation to be candidate, all messages must satisfy the criteria tpks = Message.objects.filter(thread__isnull=False).values('thread').annotate( cnt=Count('pk'), s_max=Max('sender_deleted_at'), s_cnt=Count('sender_deleted_at'), r_max=Max('recipient_deleted_at'), r_cnt=Count('recipient_deleted_at') ).order_by().filter( s_cnt=F('cnt'), r_cnt=F('cnt'), s_max__lte=date, r_max__lte=date ).values_list('thread', flat=True) Message.objects.filter( Q(thread__in=tpks) | Q(thread__isnull=True, sender_deleted_at__lte=date, recipient_deleted_at__lte=date) ).delete()
44.813953
107
0.663726
34d642ca2deae4a65a841833e24eea986e35b25a
1,745
py
Python
src/music/music_group.py
janbrrr/dndj
b0557481f301adf82cae74e5c619fff016402a36
[ "MIT" ]
8
2020-01-15T20:50:14.000Z
2021-08-09T15:36:56.000Z
src/music/music_group.py
janbrrr/dndj-discord
078ce5a0367134117f6184b2612648e9a25bcf9f
[ "MIT" ]
null
null
null
src/music/music_group.py
janbrrr/dndj-discord
078ce5a0367134117f6184b2612648e9a25bcf9f
[ "MIT" ]
2
2020-01-15T20:50:18.000Z
2021-12-18T13:09:46.000Z
from typing import Dict from src.music.track_list import TrackList class MusicGroup: def __init__(self, config: Dict): """ Initializes a `MusicGroup` instance. A ``MusicGroup`` groups multiple `TrackList` instances. For more information have a look at the `TrackList` class. The `config` parameter is expected to be a dictionary with the following keys: - "name": a descriptive name for the music group - "directory": the directory where the files for this group are (Optional) - "sort": whether to sort the track lists alphabetically (Optional, default=True) - "track_lists": a list of configs for `TrackList` instances. See `TrackList` class for more information :param config: `dict` """ self.name = config["name"] self.directory = config["directory"] if "directory" in config else None track_lists = [TrackList(track_list_config) for track_list_config in config["track_lists"]] if "sort" not in config or ("sort" in config and config["sort"]): track_lists = sorted(track_lists, key=lambda x: x.name) self.track_lists = tuple(track_lists) def __eq__(self, other): if isinstance(other, MusicGroup): attrs_are_the_same = self.name == other.name and self.directory == other.directory if not attrs_are_the_same: return False if len(self.track_lists) != len(other.track_lists): return False for my_track_list, other_track_list in zip(self.track_lists, other.track_lists): if my_track_list != other_track_list: return False return True return False
42.560976
112
0.643553
408bfa1c02010a90d1efaa64649eb2620bd978bf
3,725
py
Python
test/scanner/string.py
chrismalcolm/jspec
053835a562be5c4f304f7982a1ccbf60f1489ca9
[ "MIT" ]
2
2022-01-11T21:48:50.000Z
2022-01-12T12:23:45.000Z
test/scanner/string.py
chrismalcolm/jspec
053835a562be5c4f304f7982a1ccbf60f1489ca9
[ "MIT" ]
1
2021-12-29T20:11:57.000Z
2021-12-29T20:11:57.000Z
test/scanner/string.py
chrismalcolm/jspec
053835a562be5c4f304f7982a1ccbf60f1489ca9
[ "MIT" ]
null
null
null
"""JSPEC Testing Module for scanning JSPEC documents for ``JSPECTestScannerString``. """ from test.scanner import JSPECTestScanner from jspec.entity import ( JSPEC, JSPECString, ) class JSPECTestScannerString(JSPECTestScanner): """Class for testing the behaviour when using the ``scan`` method for strings. A valid JSPEC string is any sequence of characters enclosed inside a pair of double quotes. """ def test_scanner_string_good(self): """Test examples of good matches. The ``scan`` method should return a matching ``JSPEC`` with a ``JSPECString`` as its element. """ test_cases = [ { "name": "Basic string", "doc": '"field"', "want": JSPEC( JSPECString("field") ) }, { "name": "Uppercase string", "doc": '"ABCD"', "want": JSPEC( JSPECString("ABCD") ) }, { "name": "Mixed case string", "doc": '"AxByCzD"', "want": JSPEC( JSPECString("AxByCzD") ) }, { "name": "Digit string", "doc": '"1234567890"', "want": JSPEC( JSPECString("1234567890") ) }, { "name": "Mixed characters", "doc": '"_1AbC$@vW;{:[(<*...>)]}"', "want": JSPEC( JSPECString("_1AbC$@vW;{:[(<*...>)]}") ) }, ] self._good_match(test_cases) def test_scanner_string_bad(self): """Test examples of bad matches. The ``scan`` method should not return a matching ``JSPEC`` with the specified ``JSPECString`` as its element. """ test_cases = [ { "name": "Misspelled", "doc": '"field"', "notwant": JSPEC( JSPECString("feld") ) }, { "name": "Uppercase to lowercase", "doc": '"ABCD"', "notwant": JSPEC( JSPECString("abcd") ) }, { "name": "Lowercase to uppercase", "doc": '"wxyz"', "notwant": JSPEC( JSPECString("WXYZ") ) }, { "name": "Digits as a number", "doc": '123', "notwant": JSPEC( JSPECString("123") ) }, ] self._bad_match(test_cases) def test_scanner_string_error(self): """Test examples of error matches. The ``scan`` method should raise an error, associated with attempting to scan for a ``JSPEC`` with a ``JSPECString`` as its element. """ test_cases = [ { "name": "Missing first double quote", "doc": 'field"', "errmsg": "Expecting JSPEC term", "errpos": 0, }, { "name": "Missing final double quote", "doc": '"field', "errmsg": "Unterminated string", "errpos": 0, }, { "name": "Missing final double quote", "doc": "'field'", "errmsg": "Expecting JSPEC term", "errpos": 0, }, ] self._error_match(test_cases)
29.8
77
0.408054
dd86ec60d4019be50ef36fdc854ea593da1853a6
1,893
py
Python
MNIST-Image-Generation-Reference/training.py
Abhishek-Aditya-bs/Anime-Face-Generation-Pytorch
1766259212141b9b04c2fcdc2aef501efb802e3e
[ "MIT" ]
null
null
null
MNIST-Image-Generation-Reference/training.py
Abhishek-Aditya-bs/Anime-Face-Generation-Pytorch
1766259212141b9b04c2fcdc2aef501efb802e3e
[ "MIT" ]
null
null
null
MNIST-Image-Generation-Reference/training.py
Abhishek-Aditya-bs/Anime-Face-Generation-Pytorch
1766259212141b9b04c2fcdc2aef501efb802e3e
[ "MIT" ]
null
null
null
import torch import torchvision from discriminator import D, d_optimizer from generator import G, g_optimizer from parameters import * from IPython.display import Image from torchvision.utils import save_image from dataLoader import denorm import os def reset_grad(): d_optimizer.zero_grad() g_optimizer.zero_grad() def train_generator(): # Generate fake images and calculate loss z = torch.randn(batch_size, latent_size).to(device) fake_images = G(z) labels = torch.ones(batch_size, 1).to(device) g_loss = criterion(D(fake_images), labels) # Backprop and optimize reset_grad() g_loss.backward() g_optimizer.step() return g_loss, fake_images def train_discriminator(images): # Create the labels which are later used as input for the BCE loss real_labels = torch.ones(batch_size, 1).to(device) fake_labels = torch.zeros(batch_size, 1).to(device) # Loss for real images outputs = D(images) d_loss_real = criterion(outputs, real_labels) real_score = outputs # Loss for fake images z = torch.randn(batch_size, latent_size).to(device) fake_images = G(z) outputs = D(fake_images) d_loss_fake = criterion(outputs, fake_labels) fake_score = outputs # Combine losses d_loss = d_loss_real + d_loss_fake # Reset gradients reset_grad() # Compute gradients d_loss.backward() # Adjust the parameters using backprop d_optimizer.step() return d_loss, real_score, fake_score sample_vectors = torch.randn(batch_size, latent_size).to(device) def save_fake_images(index): fake_images = G(sample_vectors) fake_images = fake_images.reshape(fake_images.size(0), 1, 28, 28) fake_fname = 'fake_images-{0:0=4d}.png'.format(index) print('Saving', fake_fname) save_image(denorm(fake_images), os.path.join(sample_dir, fake_fname), nrow=10)
27.838235
82
0.71738
02f9641de0c68fde26218d963b79c6fac87eaa25
1,844
py
Python
core/data/vote_info.py
mofengboy/Chain-of-all-beings
b7f9750391a702b83728118f5db533ecb4d38bf0
[ "Apache-2.0" ]
2
2022-03-26T15:20:40.000Z
2022-03-26T15:24:02.000Z
core/data/vote_info.py
mofengboy/Chain-of-all-beings
b7f9750391a702b83728118f5db533ecb4d38bf0
[ "Apache-2.0" ]
null
null
null
core/data/vote_info.py
mofengboy/Chain-of-all-beings
b7f9750391a702b83728118f5db533ecb4d38bf0
[ "Apache-2.0" ]
1
2022-03-26T15:20:45.000Z
2022-03-26T15:20:45.000Z
import hashlib class WaitVote: def __init__(self): self.electionPeriod = 0 self.toNodeId = "" self.blockId = "" self.vote = 0 self.simpleUserPk = "" self.signature = "" self.voteType = 1 # 1为推荐区块投票,2为标记区块投票 def setInfo(self, election_period, to_node_id, block_id, vote, simple_user_pk, vote_type): self.electionPeriod = election_period self.toNodeId = to_node_id self.blockId = block_id self.vote = vote self.simpleUserPk = simple_user_pk self.voteType = vote_type def setSignature(self, signature): self.signature = signature def getInfo(self): return { "election_period": self.electionPeriod, "to_node_id": self.toNodeId, "block_id": self.blockId, "vote": self.vote, "simple_user_pk": self.simpleUserPk, "vote_type": self.voteType } def getInfoOfSignature(self): info_of_signature = "{'election_period': " + str( self.electionPeriod) + ", 'to_node_id': " + self.toNodeId + ", 'block_id': " + self.blockId + ", 'vote': " + str( self.vote) + ", 'simple_user_pk': " + self.simpleUserPk + ", 'vote_type': " + str(self.voteType) + "}" return info_of_signature def getSignature(self): return self.signature def getMessage(self): return { "election_period": self.electionPeriod, "to_node_id": self.toNodeId, "block_id": self.blockId, "vote": self.vote, "simple_user_pk": self.simpleUserPk, "signature": self.signature, "vote_type": self.voteType } def getVoteInfoDigest(self): return hashlib.sha256(str(self.getMessage()).encode("utf-8")).hexdigest()
32.350877
125
0.584599
d70c65a012a99db29d3edfa87f315cbb496683fc
3,401
py
Python
nipype/pipeline/plugins/tests/test_callback.py
grlee77/nipype
73f3a733ac1b7d9b09ec32a387905a9302423b87
[ "BSD-3-Clause" ]
null
null
null
nipype/pipeline/plugins/tests/test_callback.py
grlee77/nipype
73f3a733ac1b7d9b09ec32a387905a9302423b87
[ "BSD-3-Clause" ]
null
null
null
nipype/pipeline/plugins/tests/test_callback.py
grlee77/nipype
73f3a733ac1b7d9b09ec32a387905a9302423b87
[ "BSD-3-Clause" ]
null
null
null
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for workflow callbacks """ from tempfile import mkdtemp from shutil import rmtree from nipype.testing import assert_equal import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe def func(): return def bad_func(): raise Exception class Status: def __init__(self): self.statuses = [] def callback(self, node, status): self.statuses.append((node, status)) def test_callback_normal(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution'] = {'crashdump_dir': wf.base_dir} wf.run(plugin="Linear", plugin_args={'status_callback': so.callback}) assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'end' rmtree(wf.base_dir) def test_callback_exception(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=bad_func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution'] = {'crashdump_dir': wf.base_dir} try: wf.run(plugin="Linear", plugin_args={'status_callback': so.callback}) except: pass assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'exception' rmtree(wf.base_dir) def test_callback_multiproc_normal(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution']['crashdump_dir'] = wf.base_dir wf.config['execution']['poll_sleep_duration'] = 2 wf.run(plugin='MultiProc', plugin_args={'status_callback': so.callback}) assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'end' rmtree(wf.base_dir) def test_callback_multiproc_exception(): so = Status() wf = pe.Workflow(name='test', base_dir=mkdtemp()) f_node = pe.Node(niu.Function(function=bad_func, input_names=[], output_names=[]), name='f_node') wf.add_nodes([f_node]) wf.config['execution']['crashdump_dir'] = wf.base_dir wf.config['execution']['poll_sleep_duration'] = 2 try: wf.run(plugin='MultiProc', plugin_args={'status_callback': so.callback}) except: pass assert_equal(len(so.statuses), 2) for (n, s) in so.statuses: yield assert_equal, n.name, 'f_node' yield assert_equal, so.statuses[0][1], 'start' yield assert_equal, so.statuses[1][1], 'exception' rmtree(wf.base_dir)
32.084906
77
0.620994
0b2a15e3a0cac964e9e0f97976f6bcfc56b911b5
1,447
py
Python
samples.py
NovelleP/dict_query
83a44119093df452890b0449103fce00aa85dc6b
[ "MIT" ]
2
2020-11-22T17:00:05.000Z
2020-11-22T17:16:10.000Z
samples.py
NovelleP/dict_query
83a44119093df452890b0449103fce00aa85dc6b
[ "MIT" ]
null
null
null
samples.py
NovelleP/dict_query
83a44119093df452890b0449103fce00aa85dc6b
[ "MIT" ]
null
null
null
from dict_query import DictQuery if __name__ == '__main__': d = { 'a': { 'a1': ['1', '2'], 'a2': {'a21': '1'} }, 'b': 2, 'c': { 'c1': 3, 'c2': 4 }, 'd': { 'd1': { 'flag': 'Si', 'flag2': '1', 'd12': 'as' }, 'd2': { 'flag': 'a', 'flag2': '2', 'd22': 'as' }, 'd3': { 'flag': 'No', 'flag2': '3', 'd32': 'as' }, 'd4': { 'flag': 'Si', 'flag2': '4', 'd42': 'as' } } } dict_query = DictQuery(d) print(dict_query.get('a/a1[@>1]')) print(dict_query.get('a/a1')) print(dict_query.get('b')) print(dict_query.get('@/a/a2')) print(dict_query.get('d[@.flag=Si]')) print(dict_query.get('d[@.flag>=Si]/d4')) print(dict_query.get('d[this#flag>=Si]$d4', path_separator='$', filterpath_separator='#', currentval_name='this')) data = { 'a': { 'a1': { 'flag': 'no', 'a12': '11' }, 'a2': { 'flag': 'Yes', 'a12': '12' } } } dict_query = DictQuery(data) print(dict_query.get('a[@.flag=Yes]'))
23.721311
118
0.320663
42228138f3b868684cda59e1e4a21f8b1f45802d
8,111
py
Python
test/functional/wallet_listreceivedby.py
markblundeberg/bitcoin-abc
449bfe2bb4b2edc17e2c8618f04bce1bc05fe466
[ "MIT" ]
1
2019-02-18T02:18:33.000Z
2019-02-18T02:18:33.000Z
test/functional/wallet_listreceivedby.py
markblundeberg/bitcoin-abc
449bfe2bb4b2edc17e2c8618f04bce1bc05fe466
[ "MIT" ]
null
null
null
test/functional/wallet_listreceivedby.py
markblundeberg/bitcoin-abc
449bfe2bb4b2edc17e2c8618f04bce1bc05fe466
[ "MIT" ]
2
2019-05-26T05:28:14.000Z
2019-06-11T01:13:08.000Z
#!/usr/bin/env python3 # Copyright (c) 2014-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the listreceivedbyaddress RPC.""" from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_array_result, assert_equal, assert_raises_rpc_error, sync_blocks, ) class ReceivedByTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.extra_args = [['-deprecatedrpc=accounts']] * 2 def import_deterministic_coinbase_privkeys(self): assert_equal(0, len(self.nodes[1].listreceivedbyaddress( minconf=0, include_empty=True, include_watchonly=True))) super().import_deterministic_coinbase_privkeys() self.num_cb_reward_addresses = len(self.nodes[1].listreceivedbyaddress( minconf=0, include_empty=True, include_watchonly=True)) def run_test(self): # Generate block to get out of IBD self.nodes[0].generate(1) sync_blocks(self.nodes) self.log.info("listreceivedbyaddress Test") # Send from node 0 to 1 addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 0.1) self.sync_all() # Check not listed in listreceivedbyaddress because has 0 confirmations assert_array_result(self.nodes[1].listreceivedbyaddress(), {"address": addr}, {}, True) # Bury Tx under 10 block so it will be returned by listreceivedbyaddress self.nodes[1].generate(10) self.sync_all() assert_array_result(self.nodes[1].listreceivedbyaddress(), {"address": addr}, {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]}) # With min confidence < 10 assert_array_result(self.nodes[1].listreceivedbyaddress(5), {"address": addr}, {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]}) # With min confidence > 10, should not find Tx assert_array_result(self.nodes[1].listreceivedbyaddress(11), { "address": addr}, {}, True) # Empty Tx empty_addr = self.nodes[1].getnewaddress() assert_array_result(self.nodes[1].listreceivedbyaddress(0, True), {"address": empty_addr}, {"address": empty_addr, "label": "", "amount": 0, "confirmations": 0, "txids": []}) # Test Address filtering # Only on addr expected = {"address": addr, "label": "", "amount": Decimal( "0.1"), "confirmations": 10, "txids": [txid, ]} res = self.nodes[1].listreceivedbyaddress( minconf=0, include_empty=True, include_watchonly=True, address_filter=addr) assert_array_result(res, {"address": addr}, expected) assert_equal(len(res), 1) # Error on invalid address assert_raises_rpc_error(-4, "address_filter parameter was invalid", self.nodes[1].listreceivedbyaddress, minconf=0, include_empty=True, include_watchonly=True, address_filter="bamboozling") # Another address receive money res = self.nodes[1].listreceivedbyaddress(0, True, True) # Right now 2 entries assert_equal(len(res), 2 + self.num_cb_reward_addresses) other_addr = self.nodes[1].getnewaddress() txid2 = self.nodes[0].sendtoaddress(other_addr, 0.1) self.nodes[0].generate(1) self.sync_all() # Same test as above should still pass expected = {"address": addr, "label": "", "amount": Decimal( "0.1"), "confirmations": 11, "txids": [txid, ]} res = self.nodes[1].listreceivedbyaddress(0, True, True, addr) assert_array_result(res, {"address": addr}, expected) assert_equal(len(res), 1) # Same test as above but with other_addr should still pass expected = {"address": other_addr, "label": "", "amount": Decimal( "0.1"), "confirmations": 1, "txids": [txid2, ]} res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr) assert_array_result(res, {"address": other_addr}, expected) assert_equal(len(res), 1) # Should be two entries though without filter res = self.nodes[1].listreceivedbyaddress(0, True, True) # Became 3 entries assert_equal(len(res), 3 + self.num_cb_reward_addresses) # Not on random addr # note on node[0]! just a random addr other_addr = self.nodes[0].getnewaddress() res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr) assert_equal(len(res), 0) self.log.info("getreceivedbyaddress Test") # Send from node 0 to 1 addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 0.1) self.sync_all() # Check balance is 0 because of 0 confirmations balance = self.nodes[1].getreceivedbyaddress(addr) assert_equal(balance, Decimal("0.0")) # Check balance is 0.1 balance = self.nodes[1].getreceivedbyaddress(addr, 0) assert_equal(balance, Decimal("0.1")) # Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress self.nodes[1].generate(10) self.sync_all() balance = self.nodes[1].getreceivedbyaddress(addr) assert_equal(balance, Decimal("0.1")) # Trying to getreceivedby for an address the wallet doesn't own should return an error assert_raises_rpc_error(-4, "Address not found in wallet", self.nodes[0].getreceivedbyaddress, addr) self.log.info("listreceivedbylabel + getreceivedbylabel Test") # set pre-state addrArr = self.nodes[1].getnewaddress() label = self.nodes[1].getaccount(addrArr) received_by_label_json = [ r for r in self.nodes[1].listreceivedbylabel() if r["label"] == label][0] balance_by_label = self.nodes[1].getreceivedbylabel(label) txid = self.nodes[0].sendtoaddress(addr, 0.1) self.sync_all() # listreceivedbylabel should return received_by_label_json because of 0 confirmations assert_array_result(self.nodes[1].listreceivedbylabel(), {"label": label}, received_by_label_json) # getreceivedbyaddress should return same balance because of 0 confirmations balance = self.nodes[1].getreceivedbylabel(label) assert_equal(balance, balance_by_label) self.nodes[1].generate(10) self.sync_all() # listreceivedbylabel should return updated received list assert_array_result(self.nodes[1].listreceivedbylabel(), {"label": label}, {"label": received_by_label_json["label"], "amount": (received_by_label_json["amount"] + Decimal("0.1"))}) # getreceivedbylabel should return updated receive total balance = self.nodes[1].getreceivedbylabel(label) assert_equal(balance, balance_by_label + Decimal("0.1")) # Create a new label named "mynewlabel" that has a 0 balance self.nodes[1].getlabeladdress(label="mynewlabel", force=True) received_by_label_json = [r for r in self.nodes[1].listreceivedbylabel( 0, True) if r["label"] == "mynewlabel"][0] # Test includeempty of listreceivedbylabel assert_equal(received_by_label_json["amount"], Decimal("0.0")) # Test getreceivedbylabel for 0 amount labels balance = self.nodes[1].getreceivedbylabel("mynewlabel") assert_equal(balance, Decimal("0.0")) if __name__ == '__main__': ReceivedByTest().main()
44.812155
153
0.625694
add9d6de21c2e83581186c7fa628457ac7a252bc
106
py
Python
src/the_tale/the_tale/common/meta_relations/conf.py
Alacrate/the-tale
43b211f3a99e93964e95abc20a8ed649a205ffcf
[ "BSD-3-Clause" ]
85
2017-11-21T12:22:02.000Z
2022-03-27T23:07:17.000Z
src/the_tale/the_tale/common/meta_relations/conf.py
Alacrate/the-tale
43b211f3a99e93964e95abc20a8ed649a205ffcf
[ "BSD-3-Clause" ]
545
2017-11-04T14:15:04.000Z
2022-03-27T14:19:27.000Z
src/the_tale/the_tale/common/meta_relations/conf.py
Alacrate/the-tale
43b211f3a99e93964e95abc20a8ed649a205ffcf
[ "BSD-3-Clause" ]
45
2017-11-11T12:36:30.000Z
2022-02-25T06:10:44.000Z
import smart_imports smart_imports.all() settings = utils_app_settings.app_settings('META_RELATIONS')
13.25
60
0.820755
e0a40d1f9104961ba49e7e45e8fd096b3e59197b
5,116
py
Python
xfel/command_line/upload_mtz.py
BobWangRobot/cctbx_project
bb3cd05f628ce7e4340a5de7fe40813316bb050f
[ "BSD-3-Clause-LBNL" ]
null
null
null
xfel/command_line/upload_mtz.py
BobWangRobot/cctbx_project
bb3cd05f628ce7e4340a5de7fe40813316bb050f
[ "BSD-3-Clause-LBNL" ]
null
null
null
xfel/command_line/upload_mtz.py
BobWangRobot/cctbx_project
bb3cd05f628ce7e4340a5de7fe40813316bb050f
[ "BSD-3-Clause-LBNL" ]
null
null
null
from __future__ import absolute_import, division, print_function # LIBTBX_SET_DISPATCHER_NAME cctbx.xfel.upload_mtz from libtbx.phil import parse from dials.util import Sorry import os, sys import re help_message = """ Upload an .mtz file and merging log to a shared Google Drive folder. """ phil_str = """ drive { credential_file = None .type = path .help = Credential file (json format) for a Google Cloud service account shared_folder_id = None .type = str .help = Id string of the destination folder. If the folder url is \ https://drive.google.com/drive/u/0/folders/1NlJkfL6CMd1NZIl6Duy23i4G1RM9cNH- , \ then the id is 1NlJkfL6CMd1NZIl6Duy23i4G1RM9cNH- . } input { mtz_file = None .type = path .help = Location of the mtz file to upload log_file = None .type = path .help = Location of the log file to upload. If None, guess from mtz name. version = None .type = int .help = Dataset version number. If None, guess from mtz name. } """ phil_scope = parse(phil_str) def _get_root_and_version(mtz_fname): """ find and return the dataset name and version string from an mtz filename """ regex = re.compile(r'(.*)_(v\d{3})_all.mtz$') hit = regex.search(mtz_fname) assert hit is not None assert len(hit.groups()) == 2 return hit.groups() def _get_log_fname(mtz_fname): """ convert an mtz filename to the corresponding main log filename """ regex = re.compile(r'(.*)_all.mtz$') hit = regex.search(mtz_fname) assert hit is not None assert len(hit.groups()) == 1 return hit.groups()[0] + '_main.log' class pydrive2_interface: """ Wrapper for uploading versioned mtzs and logs using Pydrive2. Constructed from a service account credentials file and the Google Drive id of the top-level destination folder. """ def __init__(self, cred_file, folder_id): try: from pydrive2.auth import ServiceAccountCredentials, GoogleAuth from pydrive2.drive import GoogleDrive except ImportError: raise Sorry("Pydrive2 not found. Try:\n$ conda install pydrive2 -c conda-forge") gauth = GoogleAuth() scope = ['https://www.googleapis.com/auth/drive'] gauth.credentials = ServiceAccountCredentials.from_json_keyfile_name( cred_file, scope ) self.drive = GoogleDrive(gauth) self.top_folder_id = folder_id def _fetch_or_create_folder(self, fname, parent_id): query = { "q": "'{}' in parents and title='{}'".format(parent_id, fname), "supportsTeamDrives": "true", "includeItemsFromAllDrives": "true", "corpora": "allDrives" } hits = self.drive.ListFile(query).GetList() if hits: assert len(hits)==1 return hits[0]['id'] else: query = { "title": fname, "mimeType": "application/vnd.google-apps.folder", "parents": [{"kind": "drive#fileLink", "id": parent_id}] } f = self.drive.CreateFile(query) f.Upload() return f['id'] def _upload_detail(self, file_path, parent_id): title = os.path.split(file_path)[1] query = { "title": title, "parents": [{"kind": "drive#fileLink", "id": parent_id}] } f = self.drive.CreateFile(query) f.SetContentFile(file_path) f.Upload() def upload(self, folder_list, files): """ Upload from the given file paths to a folder defined by the hierarchy in folder_list. So if `folders` is ['a', 'b'] and `files` is [f1, f2], then inside the folder defined by self.folder_id, we create nested folder a/b/ and upload f1 and f2 to that folder. """ current_folder_id = self.top_folder_id for fname in folder_list: current_folder_id = self._fetch_or_create_folder(fname, current_folder_id) for file in files: self._upload_detail(file, current_folder_id) def run(args): user_phil = [] if '--help' in args or '-h' in args: print(help_message) phil_scope.show() return for arg in args: try: user_phil.append(parse(arg)) except Exception as e: raise Sorry("Unrecognized argument %s"%arg) params = phil_scope.fetch(sources=user_phil).extract() run_with_preparsed(params) def run_with_preparsed(params): assert params.drive.credential_file is not None assert params.drive.shared_folder_id is not None assert params.input.mtz_file is not None mtz_dirname, mtz_fname = os.path.split(params.input.mtz_file) mtz_path = params.input.mtz_file if params.input.version is not None: dataset_root = _get_root_and_version(mtz_fname)[0] version_str = "v{:03d}".format(params.input.version) else: dataset_root, version_str = _get_root_and_version(mtz_fname) if params.input.log_file is not None: log_path = params.input.log_file else: log_fname = _get_log_fname(mtz_fname) log_path = os.path.join(mtz_dirname, log_fname) drive = pydrive2_interface( params.drive.credential_file, params.drive.shared_folder_id ) folders = [dataset_root, version_str] files = [mtz_path, log_path] drive.upload(folders, files) if __name__=="__main__": run(sys.argv[1:])
28.903955
86
0.687647
f52543e6855ee73fc8b3922c1ad3a21f47e830b6
951
py
Python
kubernetes/test/test_v1_node_system_info.py
L3T/python
b6e4ae81a2afb49f668a142eb7d1c6e2571ef478
[ "Apache-2.0" ]
2
2020-06-21T08:03:18.000Z
2020-06-21T09:53:29.000Z
kubernetes/test/test_v1_node_system_info.py
L3T/python
b6e4ae81a2afb49f668a142eb7d1c6e2571ef478
[ "Apache-2.0" ]
null
null
null
kubernetes/test/test_v1_node_system_info.py
L3T/python
b6e4ae81a2afb49f668a142eb7d1c6e2571ef478
[ "Apache-2.0" ]
1
2020-06-21T08:03:17.000Z
2020-06-21T08:03:17.000Z
# coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 OpenAPI spec version: release-1.16 Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import unittest import kubernetes.client from kubernetes.client.models.v1_node_system_info import V1NodeSystemInfo # noqa: E501 from kubernetes.client.rest import ApiException class TestV1NodeSystemInfo(unittest.TestCase): """V1NodeSystemInfo unit test stubs""" def setUp(self): pass def tearDown(self): pass def testV1NodeSystemInfo(self): """Test V1NodeSystemInfo""" # FIXME: construct object with mandatory attributes with example values # model = kubernetes.client.models.v1_node_system_info.V1NodeSystemInfo() # noqa: E501 pass if __name__ == '__main__': unittest.main()
23.775
124
0.721346
78e46fcc50e4850d2094ec07289d835448802f31
22,559
py
Python
kedro/pipeline/node.py
andromeida-maritime-solutions-pvt-ltd/kedro
005cec83ca52a8cdadc9cb428ec1c4cd86682da3
[ "Apache-2.0" ]
3
2019-06-06T15:36:10.000Z
2019-06-09T22:27:55.000Z
kedro/pipeline/node.py
andromeida-maritime-solutions-pvt-ltd/kedro
005cec83ca52a8cdadc9cb428ec1c4cd86682da3
[ "Apache-2.0" ]
null
null
null
kedro/pipeline/node.py
andromeida-maritime-solutions-pvt-ltd/kedro
005cec83ca52a8cdadc9cb428ec1c4cd86682da3
[ "Apache-2.0" ]
null
null
null
# Copyright 2018-2019 QuantumBlack Visual Analytics Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND # NONINFRINGEMENT. IN NO EVENT WILL THE LICENSOR OR OTHER CONTRIBUTORS # BE LIABLE FOR ANY CLAIM, DAMAGES, OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF, OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # The QuantumBlack Visual Analytics Limited (“QuantumBlack”) name and logo # (either separately or in combination, “QuantumBlack Trademarks”) are # trademarks of QuantumBlack. The License does not grant you any right or # license to the QuantumBlack Trademarks. You may not use the QuantumBlack # Trademarks or any confusingly similar mark as a trademark for your product, # or use the QuantumBlack Trademarks in any other manner that might cause # confusion in the marketplace, including but not limited to in advertising, # on websites, or on software. # # See the License for the specific language governing permissions and # limitations under the License. """This module provides user-friendly functions for creating nodes as parts of Kedro pipelines. """ import copy import inspect import logging from collections import Counter from functools import reduce from typing import Any, Callable, Dict, Iterable, List, Set, Union class Node: """``Node`` is an auxiliary class facilitating the operations required to run user-provided functions as part of Kedro pipelines. """ # pylint: disable=W9016 def __init__( self, func: Callable, inputs: Union[None, str, List[str], Dict[str, str]], outputs: Union[None, str, List[str], Dict[str, str]], *, name: str = None, tags: Iterable[str] = None, decorators: Iterable[Callable] = None ): """Create a node in the pipeline by providing a function to be called along with variable names for inputs and/or outputs. Args: func: A function that corresponds to the node logic. The function should have at least one input or output. inputs: The name or the list of the names of variables used as inputs to the function. The number of names should match the number of arguments in the definition of the provided function. When Dict[str, str] is provided, variable names will be mapped to function argument names. outputs: The name or the list of the names of variables used as outputs to the function. The number of names should match the number of outputs returned by the provided function. When Dict[str, str] is provided, variable names will be mapped to the named outputs the function returns. name: Optional node name to be used when displaying the node in logs or any other visualisations. tags: Optional set of tags to be applied to the node. decorators: Optional list of decorators to be applied to the node. Raises: ValueError: Raised in the following cases: a) When the provided arguments do not conform to the format suggested by the type hint of the argument. b) When the node produces multiple outputs with the same name. c) An input has the same name as an output. """ if not callable(func): raise ValueError( _node_error_message( "first argument must be a " "function, not `{}`.".format(type(func).__name__) ) ) if inputs and not isinstance(inputs, (list, dict, str)): raise ValueError( _node_error_message( "`inputs` type must be one of [String, List, Dict, None], " "not `{}`.".format(type(inputs).__name__) ) ) if outputs and not isinstance(outputs, (list, dict, str)): raise ValueError( _node_error_message( "`outputs` type must be one of [String, List, Dict, None], " "not `{}`.".format(type(outputs).__name__) ) ) if not inputs and not outputs: raise ValueError( _node_error_message("it must have some `inputs` or `outputs`.") ) self._validate_inputs(func, inputs) self._func = func self._inputs = inputs self._outputs = outputs self._name = name self._tags = set([] if tags is None else tags) self._decorators = decorators or [] self._validate_unique_outputs() self._validate_inputs_dif_than_outputs() def tag(self, tags: Iterable[str]) -> "Node": """Create a new ``Node`` which is an exact copy of the current one, but with more tags added to it. Args: tags: The tags to be added to the new node. Returns: A copy of the current ``Node`` object with the tags added. """ return Node( self._func, self._inputs, self._outputs, name=self._name, tags=set(self._tags) | set(tags), decorators=self._decorators, ) @property def tags(self) -> Set[str]: """Return the tags assigned to the node. Returns: Return the set of all assigned tags to the node. """ return set(self._tags) @property def _logger(self): return logging.getLogger(__name__) def decorate(self, *decorators: Callable) -> "Node": """Create a new ``Node`` by applying the provided decorators to the underlying function. If no decorators are passed, it will return a new ``Node`` object, but with no changes to the function. Args: decorators: List of decorators to be applied on the node function. Decorators will be applied from right to left. Returns: A new ``Node`` object with the decorators applied to the function. Example: :: >>> >>> from functools import wraps >>> >>> >>> def apply_f(func: Callable) -> Callable: >>> @wraps(func) >>> def with_f(*args, **kwargs): >>> args = ["f({})".format(a) for a in args] >>> return func(*args, **kwargs) >>> return with_f >>> >>> >>> def apply_g(func: Callable) -> Callable: >>> @wraps(func) >>> def with_g(*args, **kwargs): >>> args = ["g({})".format(a) for a in args] >>> return func(*args, **kwargs) >>> return with_g >>> >>> >>> def apply_h(func: Callable) -> Callable: >>> @wraps(func) >>> def with_h(*args, **kwargs): >>> args = ["h({})".format(a) for a in args] >>> return func(*args, **kwargs) >>> return with_h >>> >>> >>> def apply_fg(func: Callable) -> Callable: >>> @wraps(func) >>> def with_fg(*args, **kwargs): >>> args = ["fg({})".format(a) for a in args] >>> return func(*args, **kwargs) >>> return with_fg >>> >>> >>> def identity(value): >>> return value >>> >>> >>> # using it as a regular python decorator >>> @apply_f >>> def decorated_identity(value): >>> return value >>> >>> >>> # wrapping the node function >>> old_node = node(apply_g(decorated_identity), 'input', 'output', >>> name='node') >>> # using the .decorate() method to apply multiple decorators >>> new_node = old_node.decorate(apply_h, apply_fg) >>> result = new_node.run(dict(input=1)) >>> >>> assert old_node.name == new_node.name >>> assert "output" in result >>> assert result['output'] == "f(g(fg(h(1))))" """ decorators = self._decorators + list(reversed(decorators)) return Node( self._func, self._inputs, self._outputs, name=self._name, tags=self.tags, decorators=decorators, ) @property def name(self) -> str: # pragma: no-cover """Node's name. Returns: Node's name if provided or the name of its function. """ return self._name if self._name else str(self) @property def inputs(self) -> List[str]: """Return node inputs as a list preserving the original order if possible. Returns: Node input names as a list. """ return self._to_list(self._inputs) @property def outputs(self) -> List[str]: """Return node outputs as a list preserving the original order if possible. Returns: Node output names as a list. """ return self._to_list(self._outputs) @staticmethod def _to_list(element: Union[None, str, List[str], Dict[str, str]]) -> List: """Make a list out of node inputs/outputs. Returns: List[str]: Node input/output names as a list to standardise. """ if element is None: return list() if isinstance(element, str): return [element] if isinstance(element, dict): return list(element.values()) return element def run(self, inputs: Dict[str, Any] = None) -> Dict[str, Any]: """Run this node using the provided inputs and return its results in a dictionary. Args: inputs: Dictionary of inputs as specified at the creation of the node. Raises: ValueError: In the following cases: a) The node function inputs are incompatible with the node input definition. Example 1: node definition input is a list of 2 DataFrames, whereas only 1 was provided or 2 different ones were provided. b) The node function outputs are incompatible with the node output definition. Example 1: node function definition is a dictionary, whereas function returns a list. Example 2: node definition output is a list of 5 strings, whereas the function returns a list of 4 objects. Exception: Any exception thrown during execution of the node. Returns: All produced node outputs are returned in a dictionary, where the keys are defined by the node outputs. """ self._logger.info("Running node: %s", str(self)) outputs = None if not (inputs is None or isinstance(inputs, dict)): raise ValueError( "Node.run() expects a dictionary or None, " "but got {} instead".format(type(inputs)) ) try: inputs = dict() if inputs is None else inputs if not self._inputs: outputs = self._run_no_inputs(inputs) elif isinstance(self._inputs, str): outputs = self._run_one_input(inputs) elif isinstance(self._inputs, list): outputs = self._run_with_list(inputs) elif isinstance(self._inputs, dict): outputs = self._run_with_dict(inputs) return self._outputs_to_dictionary(outputs) # purposely catch all exceptions except Exception as exc: self._logger.error("Node `%s` failed with error: \n%s", str(self), str(exc)) raise exc @property def _decorated_func(self): return reduce(lambda g, f: f(g), self._decorators, self._func) def _run_no_inputs(self, inputs: Dict[str, Any]): if inputs: raise ValueError( "Node {} expected no inputs, " "but got the following {} input(s) instead: {}".format( str(self), len(inputs), list(sorted(inputs.keys())) ) ) return self._decorated_func() def _run_one_input(self, inputs: Dict[str, Any]): if len(inputs) != 1 or self._inputs not in inputs: raise ValueError( "Node {} expected one input named '{}', " "but got the following {} input(s) instead: {}".format( str(self), self._inputs, len(inputs), list(sorted(inputs.keys())) ) ) return self._decorated_func(inputs[self._inputs]) def _run_with_list(self, inputs: Dict[str, Any]): all_available = set(self._inputs).issubset(inputs.keys()) if len(self._inputs) != len(inputs) or not all_available: # This can be split in future into two cases, one successful raise ValueError( "Node {} expected {} input(s) {}, " "but got the following {} input(s) instead: {}.".format( str(self), len(self._inputs), self._inputs, len(inputs), list(sorted(inputs.keys())), ) ) # Ensure the function gets the inputs in the correct order return self._decorated_func(*[inputs[item] for item in self._inputs]) def _run_with_dict(self, inputs: Dict[str, Any]): all_available = set(self._inputs.values()).issubset(inputs.keys()) if len(set(self._inputs.values())) != len(inputs) or not all_available: # This can be split in future into two cases, one successful raise ValueError( "Node {} expected {} input(s) {}, " "but got the following {} input(s) instead: {}.".format( str(self), len(set(self._inputs.values())), list(sorted(set(self._inputs.values()))), len(inputs), list(sorted(inputs.keys())), ) ) kwargs = {arg: inputs[alias] for arg, alias in self._inputs.items()} return self._decorated_func(**kwargs) def _outputs_to_dictionary(self, outputs): def _from_dict(): if set(self._outputs.keys()) != set(outputs.keys()): raise ValueError( "Failed to save outputs of node {}.\n" "The node's output keys {} do not " "match with the returned output's keys {}.".format( str(self), set(outputs.keys()), set(self._outputs.keys()) ) ) return {name: outputs[key] for key, name in self._outputs.items()} def _from_list(): if not isinstance(outputs, list): raise ValueError( "Failed to save outputs of node {}.\n" "The node definition contains a list of " "outputs {}, whereas the node function " "returned a `{}`.".format( str(self), self._outputs, type(outputs).__name__ ) ) if len(outputs) != len(self._outputs): raise ValueError( "Failed to save outputs of node {}.\n" "The node function returned {} output(s), " "whereas the node definition contains {} " "output(s).".format(str(self), len(outputs), len(self._outputs)) ) return dict(zip(self._outputs, outputs)) if isinstance(self._outputs, dict) and not isinstance(outputs, dict): raise ValueError( "Failed to save outputs of node {}.\n" "The node output is a dictionary, whereas the " "function output is not.".format(str(self)) ) if self._outputs is None: return {} if isinstance(self._outputs, str): return {self._outputs: outputs} if isinstance(self._outputs, dict): return _from_dict() return _from_list() def _validate_inputs(self, func, inputs): # inspect does not support built-in Python functions written in C. # Thus we only validate func if it is not built-in. if not inspect.isbuiltin(func): args, kwargs = self._process_inputs_for_bind(inputs) try: inspect.signature(func).bind(*args, **kwargs) except Exception as exc: func_args = inspect.signature(func).parameters.keys() raise TypeError( "Inputs of function expected {}, but got {}".format( str(list(func_args)), str(inputs) ) ) from exc @staticmethod def _process_inputs_for_bind(inputs: Union[None, str, List[str], Dict[str, str]]): # Safeguard that we do not mutate list inputs inputs = copy.copy(inputs) args = [] kwargs = {} if isinstance(inputs, str): args = [inputs] elif isinstance(inputs, list): args = inputs elif isinstance(inputs, dict): kwargs = inputs return args, kwargs def _validate_unique_outputs(self): diff = Counter(self.outputs) - Counter(set(self.outputs)) if diff: raise ValueError( "Failed to create node {} due to duplicate" " output(s) {}.\nNode outputs must be unique.".format( str(self), set(diff.keys()) ) ) def _validate_inputs_dif_than_outputs(self): common_in_out = set(self.inputs).intersection(set(self.outputs)) if common_in_out: raise ValueError( "Failed to create node {}.\n" "A node cannot have the same inputs and outputs: " "{}".format(str(self), common_in_out) ) def __str__(self): def _sorted_set_to_str(xset): return "[" + ",".join([name for name in sorted(xset)]) + "]" out_str = _sorted_set_to_str(self.outputs) if self._outputs else "None" in_str = _sorted_set_to_str(self.inputs) if self._inputs else "None" prefix = self._name + ": " if self._name else "" return prefix + "{}({}) -> {}".format(self._func.__name__, in_str, out_str) def __repr__(self): # pragma: no cover return "Node({}, {!r}, {!r}, {!r})".format( self._func.__name__, self._inputs, self._outputs, self._name ) def __eq__(self, other): # pragma: no cover keys = {"_inputs", "_outputs", "_func", "_name"} return all(self.__dict__[k] == other.__dict__[k] for k in keys) def __hash__(self): return hash((tuple(self.inputs), tuple(self.outputs), self._name)) def _node_error_message(msg) -> str: return ( "Invalid Node definition: {}\n" "Format should be: node(function, inputs, outputs)" ).format(msg) def node( # pylint: disable=W9016 func: Callable, inputs: Union[None, str, List[str], Dict[str, str]], outputs: Union[None, str, List[str], Dict[str, str]], *, name: str = None, tags: Iterable[str] = None ) -> Node: """Create a node in the pipeline by providing a function to be called along with variable names for inputs and/or outputs. Args: func: A function that corresponds to the node logic. The function should have at least one input or output. inputs: The name or the list of the names of variables used as inputs to the function. The number of names should match the number of arguments in the definition of the provided function. When Dict[str, str] is provided, variable names will be mapped to function argument names. outputs: The name or the list of the names of variables used as outputs to the function. The number of names should match the number of outputs returned by the provided function. When Dict[str, str] is provided, variable names will be mapped to the named outputs the function returns. name: Optional node name to be used when displaying the node in logs or any other visualisations. tags: Optional set of tags to be applied to the node. Returns: A Node object with mapped inputs, outputs and function. Example: :: >>> import pandas as pd >>> import numpy as np >>> >>> def clean_data(cars: pd.DataFrame, >>> boats: pd.DataFrame) -> Dict[str, pd.DataFrame]: >>> return dict(cars_df=cars.dropna(), boats_df=boats.dropna()) >>> >>> def halve_dataframe(data: pd.DataFrame) -> List[pd.DataFrame]: >>> return np.array_split(data, 2) >>> >>> nodes = [ >>> node(clean_data, >>> inputs=['cars2017', 'boats2017'], >>> outputs=dict(cars_df='clean_cars2017', >>> boats_df='clean_boats2017')), >>> node(halve_dataframe, >>> 'clean_cars2017', >>> ['train_cars2017', 'test_cars2017']), >>> node(halve_dataframe, >>> dict(data='clean_boats2017'), >>> ['train_boats2017', 'test_boats2017']) >>> ] """ return Node(func, inputs, outputs, name=name, tags=tags)
38.431005
88
0.553571
fa17193925619017346b2c8102c910230d5a38b1
3,012
py
Python
tests/contract/KT1FtKk4EzjwwkT31VqbKvPAG1dpmaDpct3V/test_ftkk4e.py
bantalon/pytezos
e538fb47a0879e70af3c0c074348a5edc3a94372
[ "MIT" ]
null
null
null
tests/contract/KT1FtKk4EzjwwkT31VqbKvPAG1dpmaDpct3V/test_ftkk4e.py
bantalon/pytezos
e538fb47a0879e70af3c0c074348a5edc3a94372
[ "MIT" ]
null
null
null
tests/contract/KT1FtKk4EzjwwkT31VqbKvPAG1dpmaDpct3V/test_ftkk4e.py
bantalon/pytezos
e538fb47a0879e70af3c0c074348a5edc3a94372
[ "MIT" ]
null
null
null
from unittest import TestCase from os.path import dirname, join import json from pytezos.michelson.micheline import get_script_section from pytezos.michelson.types.base import MichelsonType from pytezos.michelson.program import MichelsonProgram from pytezos.michelson.format import micheline_to_michelson from pytezos.michelson.parse import michelson_to_micheline from pytezos.michelson.forge import forge_micheline, unforge_micheline folder = 'dexter_usdtz_xtz' class MainnetContractTestCaseFTKK4E(TestCase): @classmethod def setUpClass(cls): with open(join(dirname(__file__), f'', '__script__.json')) as f: script = json.loads(f.read()) cls.program = MichelsonProgram.match(script['code']) cls.script = script with open(join(dirname(__file__), f'', '__entrypoints__.json')) as f: entrypoints = json.loads(f.read()) cls.entrypoints = entrypoints # cls.maxDiff = None def test_parameter_type_ftkk4e(self): type_expr = self.program.parameter.as_micheline_expr() self.assertEqual( get_script_section(self.script, 'parameter'), type_expr, 'micheline -> type -> micheline') def test_entrypoints_ftkk4e(self): ep_types = self.program.parameter.list_entrypoints() self.assertEqual(len(self.entrypoints['entrypoints']) + 1, len(ep_types)) for name, ep_type in ep_types.items(): if name not in ['default', 'root']: expected_type = MichelsonType.match(self.entrypoints['entrypoints'][name]) expected_type.assert_type_equal(ep_type) def test_storage_type_ftkk4e(self): type_expr = self.program.storage.as_micheline_expr() self.assertEqual( get_script_section(self.script, 'storage'), type_expr, 'micheline -> type -> micheline') def test_storage_encoding_ftkk4e(self): val = self.program.storage.from_micheline_value(self.script['storage']) val_expr = val.to_micheline_value(mode='legacy_optimized') self.assertEqual(self.script['storage'], val_expr, 'micheline -> value -> micheline') val_ = self.program.storage.from_python_object(val.to_python_object()) val_expr_ = val_.to_micheline_value(mode='legacy_optimized') self.assertEqual(self.script['storage'], val_expr_, 'value -> pyobj -> value -> micheline') def test_script_parsing_formatting(self): actual = michelson_to_micheline(micheline_to_michelson(self.script['code'])) self.assertEqual(self.script['code'], actual) def test_storage_forging(self): expected = self.script['storage'] actual = unforge_micheline(forge_micheline(expected)) self.assertEqual(expected, actual) expected = self.program.storage.from_micheline_value(expected).to_micheline_value(mode='readable') actual = unforge_micheline(forge_micheline(expected)) self.assertEqual(expected, actual)
40.702703
106
0.699867
1b2f9b27a105cee42cf5d72d7bfc44fcf6f7851a
580
py
Python
L1Trigger/TrackFindingTracklet/python/ProducerTrackBuilderChannel_cfi.py
dally96/cmssw
c37b9bfa391850cb349c71190b0bbb2d04224cc8
[ "Apache-2.0" ]
1
2022-02-24T15:01:09.000Z
2022-02-24T15:01:09.000Z
L1Trigger/TrackFindingTracklet/python/ProducerTrackBuilderChannel_cfi.py
dally96/cmssw
c37b9bfa391850cb349c71190b0bbb2d04224cc8
[ "Apache-2.0" ]
8
2022-03-03T14:24:02.000Z
2022-03-22T19:53:45.000Z
L1Trigger/TrackFindingTracklet/python/ProducerTrackBuilderChannel_cfi.py
dally96/cmssw
c37b9bfa391850cb349c71190b0bbb2d04224cc8
[ "Apache-2.0" ]
null
null
null
import FWCore.ParameterSet.Config as cms TrackBuilderChannel_params = cms.PSet ( UseDuplicateRemoval = cms.bool ( True ), # use tracklet seed type as channel id if False, binned track pt used if True NumSeedTypes = cms.int32 ( 8 ), # number of used seed types in tracklet algorithm #PtBoundaries = cms.vdouble( 1.8, 2.16, 2.7, 3.6, 5.4, 10.8 ), # pt Boundaries in GeV, last boundary is infinity PtBoundaries = cms.vdouble( 1.34 ), # pt Boundaries in GeV, last boundary is infinity )
58
150
0.617241
00c6e63eb1f557a22b2f810e5bf0f034eb74b0b1
7,349
py
Python
nomadgram/images/views.py
marrywill/nomadgram
c10c3f4cb1dfc833d2ef1e74caeee0ed420eacdd
[ "MIT" ]
2
2018-07-13T00:50:15.000Z
2019-08-30T09:58:44.000Z
nomadgram/images/views.py
marrywill/nomadgram
c10c3f4cb1dfc833d2ef1e74caeee0ed420eacdd
[ "MIT" ]
null
null
null
nomadgram/images/views.py
marrywill/nomadgram
c10c3f4cb1dfc833d2ef1e74caeee0ed420eacdd
[ "MIT" ]
2
2017-11-01T19:03:42.000Z
2019-11-15T02:10:12.000Z
from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from . import models, serializers from nomadgram.notifications import views as notification_views from nomadgram.users import models as user_models from nomadgram.users import serializers as user_serializers class Images(APIView): def get(self, request, format=None): user = request.user following_users = user.following.all() image_list = [] for following_user in following_users: user_images = following_user.images.all()[:2] for image in user_images: image_list.append(image) my_images = user.images.all()[:2] for image in my_images: image_list.append(image) sorted_list = sorted( image_list, key=lambda image: image.created_at, reverse=True) serializer = serializers.ImageSerializer( sorted_list, many=True, context={'request': request}) return Response(serializer.data) def post(self, request, format=None): user = request.user serializer = serializers.InputImageSerializer(data=request.data) if serializer.is_valid(): serializer.save(creator=user) return Response(data=serializer.data, status=status.HTTP_201_CREATED) else: print(serializer.errors) return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) class LikeImage(APIView): def get(self, request, image_id, format=None): likes = models.Like.objects.filter(image__id=image_id) like_creators_ids = likes.values('creator_id') users = user_models.User.objects.filter(id__in=like_creators_ids) serializer = user_serializers.ListUserSerializer(users, many=True) return Response(data=serializer.data, status=status.HTTP_200_OK) def post(self, request, image_id, format=None): user = request.user try: found_image = models.Image.objects.get(id=image_id) except models.Image.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) try: preexisting_like = models.Like.objects.get( creator = user, image = found_image ) return Response(status=status.HTTP_304_NOT_MODIFIED) except models.Like.DoesNotExist: new_like = models.Like.objects.create( creator = user, image = found_image ) new_like.save() notification_views.create_notification(user, found_image.creator, 'like', found_image) return Response(status=status.HTTP_201_CREATED) class UnLikeImage(APIView): def delete(self, request, image_id, format=None): user = request.user try: found_image = models.Image.objects.get(id=image_id) except models.Image.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) try: preexisting_like = models.Like.objects.get( creator = user, image = found_image ) preexisting_like.delete() return Response(status=status.HTTP_204_NO_CONTENT) except models.Like.DoesNotExist: return Response(status=status.HTTP_304_NOT_MODIFIED) class CommentOnImage(APIView): def post(self, request, image_id, format=None): user = request.user try: found_image = models.Image.objects.get(id=image_id) except models.Image.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) serializer = serializers.CommentSerializer(data=request.data) if serializer.is_valid(): serializer.save(creator=user, image=found_image) notification_views.create_notification(user, found_image.creator, 'comment', found_image, serializer.data['message']) return Response(data=serializer.data, status=status.HTTP_201_CREATED) else: return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) class Comment(APIView): def delete(self, request, comment_id, format=None): user = request.user try: comment = models.Comment.objects.get(id=comment_id, creator=user) comment.delete() return Response(status=status.HTTP_204_NO_CONTENT) except models.Comment.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) class Search(APIView): def get(self, request, format=None): hashtags = request.query_params.get('hashtags', None) if hashtags is not None: hashtags = hashtags.split(",") images = models.Image.objects.filter(tags__name__in=hashtags).distinct() serializer = serializers.CountImageSerializer(images, many=True) return Response(data=serializer.data, status=status.HTTP_200_OK) else: return Response(status=status.HTTP_400_BAD_REQUEST) class ModerateComments(APIView): def delete(self, request, image_id, comment_id, format=None): user = request.user try: comment_to_delete = models.Comment.objects.get(id=comment_id, image__id=image_id, image__creator=user) comment_to_delete.delete() except models.Comment.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) return Response(status=status.HTTP_204_NO_CONTENT) class ImageDetail(APIView): def find_own_image(self, image_id, user): try: image = models.Image.objects.get(id=image_id, creator=user) return image except models.Image.DoesNotExist: return None def get(self, request, image_id, format=None): try: image = models.Image.objects.get(id=image_id) except models.Image.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) serializer = serializers.ImageSerializer(image) return Response(data=serializer.data, status=status.HTTP_200_OK) def put(self, request, image_id, format=None): user = request.user image = self.find_own_image(image_id) if image is None: return Response(status=status.HTTP_400_BAD_REQUEST) serializer = serializers.InputImageSerializer(image, data=request.data, partial=True) if serializer.is_valid(): serializer.save(creator=user) return Response(data=serializer.data, status=status.HTTP_204_NO_CONTENT) else: return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) def delete(self, request, image_id, format=None): user = request.user image = self.find_own_image(image_id, user) if image is None: return Response(status=status.HTTP_400_BAD_REQUEST) image.delete() return Response(status=status.HTTP_204_NO_CONTENT)
27.732075
129
0.638862
c9b94966c34255be522dedf6cba93b83bc2736d2
496
py
Python
reventlov/bot_plugin.py
EDyO/reventlov
2c2a399dbbe0166ddf5845ef801c542ed263e2d4
[ "MIT" ]
null
null
null
reventlov/bot_plugin.py
EDyO/reventlov
2c2a399dbbe0166ddf5845ef801c542ed263e2d4
[ "MIT" ]
20
2018-05-12T10:40:40.000Z
2018-05-27T18:52:01.000Z
reventlov/bot_plugin.py
EDyO/reventlov
2c2a399dbbe0166ddf5845ef801c542ed263e2d4
[ "MIT" ]
1
2018-05-06T00:33:46.000Z
2018-05-06T00:33:46.000Z
from telegram.ext import CommandHandler class BotPlugin(object): @property def commands(self): return [ handler for handler in self.handlers if handler.__class__ == CommandHandler ] def add_handlers(self, dispatcher): for handler in self.handlers: dispatcher.add_handler(handler) def remove_handlers(self, dispatcher): for handler in self.handlers: dispatcher.remove_handler(handler)
24.8
50
0.635081
1e4be94d5408eb2d87e2a987f7b8d46034bc2e97
1,292
py
Python
awesome/parsers/nodejs.py
jcardibo/awesome-finder
fa13fbad98319537e8db48b861f5e82e27cadb93
[ "MIT" ]
248
2017-09-11T00:17:15.000Z
2022-03-30T16:44:21.000Z
awesome/parsers/nodejs.py
jcardibo/awesome-finder
fa13fbad98319537e8db48b861f5e82e27cadb93
[ "MIT" ]
13
2017-09-13T00:07:47.000Z
2021-06-07T15:22:55.000Z
awesome/parsers/nodejs.py
jcardibo/awesome-finder
fa13fbad98319537e8db48b861f5e82e27cadb93
[ "MIT" ]
29
2017-09-12T19:44:55.000Z
2022-01-17T00:20:28.000Z
from . import AbstractAwesomeParser class AwesomeNodejsParser(AbstractAwesomeParser): AWESOME_TITLE = 'nodejs' AWESOME_README_URL = 'https://raw.githubusercontent.com/sindresorhus/awesome-nodejs/master/readme.md' def find_content(self): readme = self.read_readme() content = readme.split('\n\n## Packages')[1] lines = [] for line in content.split('\n'): lines.append(line) return lines def parse_awesome_content(self, content): awesome_blocks = [] for line in content: # Parse the header title if line.startswith('###'): plain_title = self.parse_category_title(line) awesome_blocks.append({ 'type': 'category', 'line': plain_title, }) # Parse the list item elif line.strip().startswith('- ['): plain_line, link = self.parse_link_line(line) awesome_blocks.append({ 'type': 'awesome', 'line': plain_line, 'link': link, }) # Ignore last useless parts elif line.startswith('## License'): break return awesome_blocks
34
105
0.533282
f6f92d98f5e82a724645aca67b8cc3266b0ee3e6
4,891
py
Python
osm_nbi/html_out.py
ed1000/osm_nbi_flask
d942b4d8aecea0a10b07a466d1694151636d9e7b
[ "Apache-2.0" ]
null
null
null
osm_nbi/html_out.py
ed1000/osm_nbi_flask
d942b4d8aecea0a10b07a466d1694151636d9e7b
[ "Apache-2.0" ]
null
null
null
osm_nbi/html_out.py
ed1000/osm_nbi_flask
d942b4d8aecea0a10b07a466d1694151636d9e7b
[ "Apache-2.0" ]
1
2022-03-15T15:45:57.000Z
2022-03-15T15:45:57.000Z
""" Contains html text in variables to make and html response """ import yaml from http import HTTPStatus __author__ = "Alfonso Tierno <[email protected]>" html_start = """ <!DOCTYPE html> <html> <head> <link href="/osm/static/style.css" rel="stylesheet"> <title>Welcome to OSM</title> </head> <body> <div id="osm_topmenu"> <div> <a href="https://osm.etsi.org"> <img src="/osm/static/OSM-logo.png" height="42" width="100" style="vertical-align:middle"> </a> <a>( {} )</a> <a href="/osm/vnfpkgm/v1/vnf_packages">VNFDs </a> <a href="/osm/nsd/v1/ns_descriptors">NSDs </a> <a href="/osm/nslcm/v1/ns_instances">NSs </a> <a href="/osm/user/v1">USERs </a> <a href="/osm/project/v1">PROJECTs </a> <a href="/osm/token/v1">TOKENs </a> <a href="/osm/token/v1?METHOD=DELETE">logout </a> </div> </div> """ html_body = """ <h1>{item}</h1> """ html_end = """ </body> </html> """ html_body_error = "<h2> Error <pre>{}</pre> </h2>" html_auth2 = """ <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html> <head><META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <link href="/osm/static/style.css" rel="stylesheet"> <title>OSM Login</title> </head> <body> <div id="osm_header"> <div> <a href="https://osm.etsi.org"> <h1><img src="/osm/static/OSM-logo.png" style="vertical-align:middle"></h1> </a> </div> </div> <div id="osm_error_message"> <h1>{error}</h1> </div> <div class="gerritBody" id="osm_body"> <h1>Sign in to OSM</h1> <form action="/osm/token/v1" id="login_form" method="POST"> <table style="border: 0;"> <tr><th>Username</th><td><input id="f_user" name="username" size="25" tabindex="1" type="text"></td></tr> <tr><th>Password</th><td><input id="f_pass" name="password" size="25" tabindex="2" type="password"></td></tr> <tr><td><input tabindex="3" type="submit" value="Sign In"></td></tr> </table> </form> <div style="clear: both; margin-top: 15px; padding-top: 2px; margin-bottom: 15px;"> <div id="osm_footer"> <div></div> </div> </div> </div> <script src="/osm/static/login.js"> </script> </body> </html> """ html_upload_body = """ <form action="/osm{}" method="post" enctype="multipart/form-data"> <h3> <table style="border: 0;"> <tr> <td> Upload {} descriptor (tar.gz) file: <input type="file" name="descriptor_file"/> </td> <td> <input type="submit" value="Upload"/> </td> </tr> </table> </h3> </form> """ def format(data, request, response, session): """ Format a nice html response, depending on the data :param data: :param request: cherrypy request :param response: cherrypy response :return: string with teh html response """ response.headers["Content-Type"] = 'text/html' if response.status == HTTPStatus.UNAUTHORIZED.value: if response.headers.get("WWW-Authenticate") and request.config.get("auth.allow_basic_authentication"): response.headers["WWW-Authenticate"] = "Basic" + response.headers["WWW-Authenticate"][6:] return else: return html_auth2.format(error=data) body = html_body.format(item=request.path_info) if response.status and response.status > 202: body += html_body_error.format(yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False)) elif isinstance(data, (list, tuple)): if request.path_info == "/vnfpkgm/v1/vnf_packages": body += html_upload_body.format("VNFD", request.path_info) elif request.path_info == "/nsd/v1/ns_descriptors": body += html_upload_body.format("NSD", request.path_info) for k in data: data_id = k.pop("_id", None) body += '<p> <a href="/osm/{url}/{id}">{id}</a>: {t} </p>'.format(url=request.path_info, id=data_id, t=k) elif isinstance(data, dict): if "Location" in response.headers: body += '<a href="{}"> show </a>'.format(response.headers["Location"]) else: body += '<a href="/osm/{}?METHOD=DELETE"> <img src="/osm/static/delete.png" height="25" width="25"> </a>'.format(request.path_info) body += "<pre>" + yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False) + "</pre>" else: body = str(data) user_text = " " if session: if session.get("username"): user_text += "user: {}".format(session.get("username")) if session.get("project_id"): user_text += ", project: {}".format(session.get("project_id")) return html_start.format(user_text) + body + html_end #yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False) # tags=False, # encoding='utf-8', allow_unicode=True)
35.442029
143
0.611736
b5b21d3eb048ac0efa766614a0dcbdc3ce293264
6,068
py
Python
pointcloud/run/zand/deep.py
stpsomad/thesis
e7ef1d1ab2118e95cfda7e98a0e952c8d1c393d0
[ "0BSD" ]
1
2019-03-18T05:50:41.000Z
2019-03-18T05:50:41.000Z
pointcloud/run/zand/deep.py
stpsomad/thesis
e7ef1d1ab2118e95cfda7e98a0e952c8d1c393d0
[ "0BSD" ]
null
null
null
pointcloud/run/zand/deep.py
stpsomad/thesis
e7ef1d1ab2118e95cfda7e98a0e952c8d1c393d0
[ "0BSD" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Mon Apr 04 14:38:34 2016 @author: Stella Psomadaki """ from pointcloud.AbstractBulkLoader import BulkLoader from pointcloud.AbstractQuerier import Querier import time from tabulate import tabulate import pointcloud.oracleTools as ora import os ########################### dataset = 'zandmotor' integrations = ['dxyt', 'dxyzt'] scaling = '10000' repeatQueries = 6 parallels = [0, 8] fresh_reloads = [True, False] maxRanges = [200, 1000000] ########################### if dataset == 'zandmotor': bench = 3 elif dataset == 'coastline': bench = 4 path = os.getcwd() benchmark = ['mini', 'medium', 'full'] hloading = ['approach', 'preparation', 'loading', 'closing', 'size[MB]', 'points'] hquery = ["id", "prep.", 'insert', 'ranges', 'Levels', 'fetching', "decoding", 'storing', "Appr.pts", "Fin.pts", "FinFilt", "time", 'extra%', 'total'] fh = open('integrated_{0}.txt'.format(time.strftime("%d%m%Y")), 'a') fh.write('Benchmark executed on \n') fh.write(time.strftime("%d/%m/%Y")) fh.write('\n') fh.write( """CASE: Integrated approach (deep) with scale of 10,000 Different approaches examined are: * z as an aatribute and as part of the morton key * parallel execution of 8 and no parallel * fresh reload of the datasets and not * querying with max_Ranges 200 (for comparison) and 1,000,000 The queries are repeated 6 times --START--\n\n\n""") for fresh_reload in fresh_reloads: for parallel in parallels: for integr in integrations: loadings = [] queries = [] for i in range(1,bench + 1): #================================================================ # Loading Phase #================================================================ configuration = path + '/ini/' + dataset + '/' + integr + '_' + scaling + "_{0}_{1}".format(parallel, fresh_reload) + '_part' + str(i) + '.ini' bulk = BulkLoader(configuration) connection = bulk.getConnection() cursor = connection.cursor() if i == 1: cursor.execute('SELECT table_name FROM all_tables WHERE table_name = :1',[bulk.iotTableName.upper(),]) length = len(cursor.fetchall()) if length: cursor.execute("DROP TABLE " + bulk.iotTableName + " PURGE") cursor.execute('SELECT table_name FROM all_tables WHERE table_name = :1',[bulk.metaTable.upper(),]) length = len(cursor.fetchall()) if length: cursor.execute("DROP TABLE " + bulk.metaTable + " PURGE") loading = [] loading.append(benchmark[i - 1]) start = time.time() bulk.preparation() loading.append(round(time.time() - start, 2)) start = time.time() bulk.loading() loading.append(round(time.time() - start, 2)) start = time.time() bulk.closing() loading.append(round(time.time() - start, 2)) size, points = bulk.statistics() loading.append(round(size,2)) loading.append(int(points)) loadings.append(loading) #================================================================ # Querying Phase #================================================================ querier = Querier(configuration) connection = querier.getConnection() cursor = connection.cursor() cursor.execute('SELECT table_name FROM all_tables WHERE table_name = :1',[querier.queriesTable.upper(),]) length = len(cursor.fetchall()) if not length: os.system('python -m pointcloud.queryTab {0}'.format(configuration)) for maxRange in maxRanges: querier.maxRanges = maxRange sublist = [] for num in querier.ids: for j in range(repeatQueries): start = time.time() lst = querier.query(num) lst.append(round(time.time() - start, 2)) lst.append(round((lst[7] - lst[8])/float(lst[8])*100,2)) lst.append(round(lst[1]+lst[4]+lst[5]+lst[6]+lst[9],2)) lst.insert(0, num) sublist.append(lst) ora.dropTable(cursor, querier.queryTable + '_' + str(num)) ora.dropTable(cursor, querier.rangeTable + str(num)) queries.append(sublist) print 'maximum ranges: {0}\n'.format(maxRange) print tabulate(sublist, hquery, tablefmt="plain") print print tabulate(loadings, hloading, tablefmt="plain") for i in queries: print print tabulate(i, hquery, tablefmt="plain") fh.write('integration: {0}\nreload:{1}\nparallel:{2}\n\n'.format(integr, fresh_reload, parallel)) fh.write('\n---LOADING---\n') fh.write(tabulate(loadings, hloading, tablefmt="plain")) fh.write('\n') fh.write('\n---QUERYING---\n') for i in queries: fh.write(tabulate(i, hquery, tablefmt="plain")) fh.write('\n') fh.write('\n') fh.close()
41.848276
160
0.464733
32a2ea40b4a6f3956a720c6884e9d44e9ddecb65
192
py
Python
normal_distribution.py
rein-chihaya/dise_and_normal_distribution
b0f642c1900da66f3d57b3ab0f6556a3d41455b1
[ "MIT" ]
null
null
null
normal_distribution.py
rein-chihaya/dise_and_normal_distribution
b0f642c1900da66f3d57b3ab0f6556a3d41455b1
[ "MIT" ]
null
null
null
normal_distribution.py
rein-chihaya/dise_and_normal_distribution
b0f642c1900da66f3d57b3ab0f6556a3d41455b1
[ "MIT" ]
null
null
null
#coding: UTF-8 from scipy import stats from numpy.random import * import matplotlib.pyplot as plt NUM=10000; # 描画範囲の指定 r = randn(NUM) # 横軸の変数。縦軸の変数。 plt.hist(r, bins=100) # 描画実行 plt.show()
12.8
31
0.71875
bd5e92687989b5c1fbcc842489791723ead234ea
1,026
py
Python
whoahqa/views/__init__.py
onaio/who-adolescent-hqa
108a7e60b025d0723247f5f02eab2c4d41f5a02a
[ "Apache-2.0" ]
null
null
null
whoahqa/views/__init__.py
onaio/who-adolescent-hqa
108a7e60b025d0723247f5f02eab2c4d41f5a02a
[ "Apache-2.0" ]
2
2018-01-09T08:58:11.000Z
2019-01-18T09:20:14.000Z
whoahqa/views/__init__.py
onaio/who-adolescent-hqa
108a7e60b025d0723247f5f02eab2c4d41f5a02a
[ "Apache-2.0" ]
null
null
null
from whoahqa.views.auth import oauth_authorize, oauth_callback # noqa from whoahqa.views.clinics import ClinicViews # noqa from whoahqa.views.default_views import default # noqa from whoahqa.views.default_views import set_locale # noqa from whoahqa.views.request_methods import get_request_user, can_list_clinics # noqa from whoahqa.views.request_methods import can_view_clinics # noqa from whoahqa.views.request_methods import is_super_user # noqa from whoahqa.views.request_methods import can_access_clinics # noqa from whoahqa.views.request_methods import can_view_municipality # noqa from whoahqa.views.request_methods import can_create_period # noqa from whoahqa.views.request_methods import can_view_state # noqa from whoahqa.views.request_methods import can_list_state # noqa from whoahqa.views.submissions import SubmissionViews # noqa from whoahqa.views.users import UserViews # noqa from whoahqa.views.municipalities import MunicipalityViews # noqa from whoahqa.views.states import StateViews # noqa
60.352941
84
0.840156
87835a70373a4e9a0e558de357614f8b141e9a9f
2,615
py
Python
pytorch3d/renderer/mesh/renderer.py
martinruenz/pytorch3d
7f1e63aed1252ba8145d4a66ce2272331d60cdae
[ "BSD-3-Clause" ]
3
2022-03-09T08:12:54.000Z
2022-03-10T01:57:03.000Z
pytorch3d/renderer/mesh/renderer.py
martinruenz/pytorch3d
7f1e63aed1252ba8145d4a66ce2272331d60cdae
[ "BSD-3-Clause" ]
null
null
null
pytorch3d/renderer/mesh/renderer.py
martinruenz/pytorch3d
7f1e63aed1252ba8145d4a66ce2272331d60cdae
[ "BSD-3-Clause" ]
1
2020-11-27T11:52:45.000Z
2020-11-27T11:52:45.000Z
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. import torch import torch.nn as nn from .rasterizer import Fragments from .utils import _clip_barycentric_coordinates, _interpolate_zbuf # A renderer class should be initialized with a # function for rasterization and a function for shading. # The rasterizer should: # - transform inputs from world -> screen space # - rasterize inputs # - return fragments # The shader can take fragments as input along with any other properties of # the scene and generate images. # E.g. rasterize inputs and then shade # # fragments = self.rasterize(meshes) # images = self.shader(fragments, meshes) # return images class MeshRenderer(nn.Module): """ A class for rendering a batch of heterogeneous meshes. The class should be initialized with a rasterizer and shader class which each have a forward function. """ def __init__(self, rasterizer, shader): super().__init__() self.rasterizer = rasterizer self.shader = shader def forward(self, meshes_world, **kwargs) -> torch.Tensor: """ Render a batch of images from a batch of meshes by rasterizing and then shading. NOTE: If the blur radius for rasterization is > 0.0, some pixels can have one or more barycentric coordinates lying outside the range [0, 1]. For a pixel with out of bounds barycentric coordinates with respect to a face f, clipping is required before interpolating the texture uv coordinates and z buffer so that the colors and depths are limited to the range for the corresponding face. """ fragments = self.rasterizer(meshes_world, **kwargs) raster_settings = kwargs.get("raster_settings", self.rasterizer.raster_settings) if raster_settings.blur_radius > 0.0: # TODO: potentially move barycentric clipping to the rasterizer # if no downstream functions requires unclipped values. # This will avoid unnecssary re-interpolation of the z buffer. clipped_bary_coords = _clip_barycentric_coordinates(fragments.bary_coords) clipped_zbuf = _interpolate_zbuf( fragments.pix_to_face, clipped_bary_coords, meshes_world ) fragments = Fragments( bary_coords=clipped_bary_coords, zbuf=clipped_zbuf, dists=fragments.dists, pix_to_face=fragments.pix_to_face, ) images = self.shader(fragments, meshes_world, **kwargs) return images
37.357143
88
0.68413
66dfd417be656f6e291d02b002a4836fb1b00eb3
2,982
py
Python
torchflare/experiments/scheduler_utilities.py
earlbabson/torchflare
15db06d313a53a3ec4640869335ba87730562b28
[ "Apache-2.0" ]
1
2021-04-28T19:57:57.000Z
2021-04-28T19:57:57.000Z
torchflare/experiments/scheduler_utilities.py
earlbabson/torchflare
15db06d313a53a3ec4640869335ba87730562b28
[ "Apache-2.0" ]
null
null
null
torchflare/experiments/scheduler_utilities.py
earlbabson/torchflare
15db06d313a53a3ec4640869335ba87730562b28
[ "Apache-2.0" ]
null
null
null
"""Implements scheduler utilities.""" import torch import transformers # Structure of dictionary is as follows: # 'key' -> scheduler , 'step_on_batch' -> whether to step after batch. # Set True is step is after batch. scheduler_step = { "LambdaLR": False, "MultiplicativeLR": False, "StepLR": False, "MultiStepLR": False, "ExponentialLR": False, "CosineAnnealingLR": True, "ReduceLROnPlateau": False, "CyclicLR": True, "OneCycleLR": True, "CosineAnnealingWarmRestarts": True, "get_constant_schedule": True, "get_constant_schedule_with_warmup": True, "get_cosine_schedule_with_warmup": True, "get_cosine_with_hard_restarts_schedule_with_warmup": True, "get_linear_schedule_with_warmup": True, "get_polynomial_decay_schedule_with_warmup": True, } def get_scheduler(scheduler): """Method to get scheduler from pytorch/transformers. Args: scheduler: The scheduler to be used. Returns: scheduler. Raises: ValueError: If scheduler is not found raises value error. """ if isinstance(scheduler, str): try: if scheduler.startswith("get_"): sch = getattr(transformers, scheduler.lower()) else: dir_sch = dir(torch.optim.lr_scheduler) opts = [o.lower() for o in dir_sch] str_idx = opts.index(scheduler.lower()) sch = getattr(torch.optim.lr_scheduler, dir_sch[str_idx]) return sch except ValueError: raise ValueError( "Invalid scheduler string input, must match schedulers available in pytorch or transformers" ) elif hasattr(scheduler, "step"): return scheduler else: raise ValueError("Invalid scheduler input") class LRScheduler: """Class around standard scheduler to decide when to step.""" def __init__(self, scheduler, **kwargs): """Constructor method. Args: scheduler : The scheduler. **kwargs: named arguments for a scheduler. """ self.scheduler = get_scheduler(scheduler)(**kwargs) self.step_on_batch = scheduler_step[scheduler] self.exp = None def set_experiment(self, exp): # noqa self.exp = exp def _scheduler_step(self): if isinstance(self.scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau): val = self.exp.exp_logs.get(self.exp.val_key + self.exp.main_metic) self.scheduler.step(val) else: self.scheduler.step() def step(self, current_state): """Method to perform the scheduler step. Args: current_state: The current state of experiment. """ if self.step_on_batch and "batch" in current_state.value: self._scheduler_step() elif self.step_on_batch is False and "epoch" in current_state.value: self._scheduler_step()
28.4
108
0.634474
4fafb83c6bf1e718418a2af65d4ba403297e76bc
7,190
py
Python
platform.py
Mar10us/chubby75
36dd1c9e1f347aff6c2f8342f2ef24e135012cf7
[ "CC0-1.0" ]
2
2019-10-07T06:10:28.000Z
2020-06-17T21:41:54.000Z
platform.py
Mar10us/chubby75
36dd1c9e1f347aff6c2f8342f2ef24e135012cf7
[ "CC0-1.0" ]
null
null
null
platform.py
Mar10us/chubby75
36dd1c9e1f347aff6c2f8342f2ef24e135012cf7
[ "CC0-1.0" ]
null
null
null
from litex.build.generic_platform import * from litex.build.xilinx import XilinxPlatform _io = [ # clock ("clk25", 0, Pins("M9"), IOStandard("LVCMOS33")), # led ("user_led", 0, Pins("F7"), IOStandard("LVCMOS33")), # serial ("serial", 0, Subsignal("tx", Pins("H5")), Subsignal("rx", Pins("G6")), IOStandard("LVCMOS33") ), # ethernet ("eth_clocks", 0, Subsignal("tx", Pins("D1")), Subsignal("rx", Pins("F1")), IOStandard("LVCMOS33") ), ("eth", 0, Subsignal("rx_ctl", Pins("H1")), Subsignal("rx_data", Pins("F2 F4 G1 G3")), Subsignal("tx_ctl", Pins("E4")), Subsignal("tx_data", Pins("E3 E2 E1 F3")), IOStandard("LVCMOS33") ), ("eth_clocks", 1, Subsignal("tx", Pins("J1")), Subsignal("rx", Pins("K3")), IOStandard("LVCMOS33") ), ("eth", 1, Subsignal("rx_ctl", Pins("M3")), Subsignal("rx_data", Pins("L1 L3 M1 M2")), Subsignal("tx_ctl", Pins("H2")), Subsignal("tx_data", Pins("J3 K1 K2 H3")), IOStandard("LVCMOS33") ), # sdram ("sdram_clock", 0, Pins("K11"), IOStandard("LVCMOS33"), Misc("SLEW=FAST")), ("sdram_clock", 1, Pins("K12"), IOStandard("LVCMOS33"), Misc("SLEW=FAST")), ("sdram", 0, Subsignal("a", Pins("L16 M14 M16 K14 J12 J13 J11 H13 H11 G12")), Subsignal("dq", Pins( "C15 C16 D14 E15 E16 F14 F16 G14", "G11 E12 H14 G16 F15 D16 B16 B15", "N16 P16 P15 R15 R16 R14 T14 R12", "T12 T13 T15 M13 N14 M15 L12 L13")), Subsignal("we_n", Pins("H16")), Subsignal("ras_n", Pins("J14")), Subsignal("cas_n", Pins("H15")), Subsignal("cs_n", Pins("J16")), Subsignal("ba", Pins("K16 K15")), IOStandard("LVCMOS33"), Misc("SLEW=FAST") ), # Direction pin for buffers U600 to U607. 1 is input, 0 is output. ("bufdir", 0, Pins("F13"), IOStandard("LVCMOS33")), ] _connectors = [ # Lower connector on board. Pin 1 marked with silkscreen layer, pins then # alternating through the two rows of the connector. ("J600", { # Buffered through U610, shared with J601 4: "J6", # Buffered through U608, shared with J601 6: "A11", # Buffered through U600 7: "P4", 8: "R1", 9: "M4", 10: "L5", 11: "M5", 12: "K6", 13: "T4", 14: "P5", # Buffered through U604 15: "P6", 16: "M7", 17: "N6", 18: "M6", 19: "L7", 20: "L8", 21: "P7", 22: "N8", # Buffered through U601 23: "M12", 24: "N11", 25: "M11", 26: "M10", 27: "L10", 28: "N9", 29: "P11", 30: "T11", # Buffered through U605 31: "R9", 32: "T9", 33: "T8", 34: "R7", 35: "T7", 36: "T6", 37: "R5", 38: "T5", # Buffered through U608, shared with J601 39: "A12", 40: "B12", 41: "A13", 42: "C13", 43: "A14", 44: "B14", 45: "C11", # Shared with J601 47: "E13", }), # Upper connector on board. Same numbering as J600. ("J601", { # Buffered through U610, shared with J601 4: "J6", # Buffered through U609, shared with J601 6: "A11", # Buffered through U603 7: "D3", 8: "C3", 9: "B3", 10: "D5", 11: "A4", 12: "B2", 13: "A2", 14: "A3", # Buffered through U607 15: "A5", 16: "A6", 17: "A7", 18: "A8", 19: "B8", 20: "A9", 21: "A10", 22: "B10", # Buffered through U602 23: "E11", 24: "D12", 25: "D11", 26: "E10", 27: "D9", 28: "F9", 29: "D8", 30: "E8", # Buffered through U606 31: "E7", 32: "D6", 33: "E6", 34: "C9", 35: "C8", 36: "C7", 37: "C6", 38: "B6", # Buffered through U609, shared with J600 39: "A12", 40: "B12", 41: "A13", 42: "C13", 43: "A14", 44: "B14", 45: "C11", # Shared with J600 47: "E13", }) ] # Extension for HUB75e 'hat' (marked "Huidu Hub75E-10 Support 1/32 ") hub75e = [ ("hub75_control", 0, # bank select (a, b, c, d, e) Subsignal("bank", Pins("J601:42 J601:41 J601:40 J601:39 J600:6")), Subsignal("oe", Pins("J600:45")), Subsignal("stb", Pins("J601:43")), Subsignal("clk", Pins("J601:44")), IOStandard("LVCMOS33"), ), # J1 ("hub75_chain", 0, Subsignal("r", Pins("J601:38 J601:35")), Subsignal("g", Pins("J601:37 J601:34")), Subsignal("b", Pins("J601:36 J601:33")), IOStandard("LVCMOS33"), ), # J2 ("hub75_chain", 1, Subsignal("r", Pins("J601:32 J601:29")), Subsignal("g", Pins("J601:31 J601:28")), Subsignal("b", Pins("J601:30 J601:27")), IOStandard("LVCMOS33"), ), # J3 ("hub75_chain", 2, Subsignal("r", Pins("J601:26 J601:23")), Subsignal("g", Pins("J601:25 J601:22")), Subsignal("b", Pins("J601:24 J601:21")), IOStandard("LVCMOS33"), ), # J4 ("hub75_chain", 3, Subsignal("r", Pins("J601:20 J601:17")), Subsignal("g", Pins("J601:19 J601:16")), Subsignal("b", Pins("J601:18 J601:15")), IOStandard("LVCMOS33"), ), # J5 ("hub75_chain", 4, Subsignal("r", Pins("J601:14 J601:11")), Subsignal("g", Pins("J601:13 J601:10")), Subsignal("b", Pins("J601:12 J601:9")), IOStandard("LVCMOS33"), ), # J6 ("hub75_chain", 5, Subsignal("r", Pins("J600:38 J600:35")), Subsignal("g", Pins("J600:37 J600:34")), Subsignal("b", Pins("J600:36 J600:33")), IOStandard("LVCMOS33"), ), # J7 ("hub75_chain", 6, Subsignal("r", Pins("J600:32 J600:29")), Subsignal("g", Pins("J600:31 J600:28")), Subsignal("b", Pins("J600:30 J600:27")), IOStandard("LVCMOS33"), ), # J8 ("hub75_chain", 7, Subsignal("r", Pins("J600:26 J600:23")), Subsignal("g", Pins("J600:25 J600:22")), Subsignal("b", Pins("J600:24 J600:21")), IOStandard("LVCMOS33"), ), # J9 ("hub75_chain", 8, Subsignal("r", Pins("J600:20 J600:17")), Subsignal("g", Pins("J600:19 J600:16")), Subsignal("b", Pins("J600:18 J600:15")), IOStandard("LVCMOS33"), ), # J10 ("hub75_chain", 9, Subsignal("r", Pins("J600:14 J600:11")), Subsignal("g", Pins("J600:13 J600:10")), Subsignal("b", Pins("J600:12 J600:9")), IOStandard("LVCMOS33"), ), ] class Platform(XilinxPlatform): default_clk_name = "clk25" default_clk_period = 40.00 def __init__(self): XilinxPlatform.__init__(self, "xc6slx16-2-ftg256", _io, _connectors)
25.770609
79
0.474826
8634c7a3a4ae82687c7244611cced79086aa95cd
71
py
Python
tgbot/__main__.py
dannofx/tgbot
2c6a69f11359121c533fa4867bb38a6fbd63601b
[ "BSD-3-Clause" ]
4
2015-09-16T19:08:55.000Z
2021-02-08T16:11:10.000Z
tgbot/__main__.py
dannofx/tgbot
2c6a69f11359121c533fa4867bb38a6fbd63601b
[ "BSD-3-Clause" ]
null
null
null
tgbot/__main__.py
dannofx/tgbot
2c6a69f11359121c533fa4867bb38a6fbd63601b
[ "BSD-3-Clause" ]
2
2015-09-16T19:16:54.000Z
2015-09-16T20:40:02.000Z
if __name__ == '__main__': from tgbot.tgbot import main main()
17.75
32
0.647887
26ff2e4a404504cc38268da688d1e9c50b785022
436
py
Python
TRANSFORM/Resources/Scripts/jModelica/HeatAndMassTransfer/Examples/ExamplesFrom_NellisAndKlein/Example_1_8_1_PipeInARoof/part_a_UseExtendedSurface.py
greenwoodms/TRANSFORM-Library
dc152d4f0298d3f18385f2ea33645d87d7812915
[ "Apache-2.0" ]
29
2018-04-24T17:06:19.000Z
2021-11-21T05:17:28.000Z
TRANSFORM/Resources/Scripts/jModelica/HeatAndMassTransfer/Examples/ExamplesFrom_NellisAndKlein/Example_1_8_1_PipeInARoof/part_a_UseExtendedSurface.py
greenwoodms/TRANSFORM-Library
dc152d4f0298d3f18385f2ea33645d87d7812915
[ "Apache-2.0" ]
13
2018-04-05T08:34:27.000Z
2021-10-04T14:24:41.000Z
TRANSFORM/Resources/Scripts/jModelica/HeatAndMassTransfer/Examples/ExamplesFrom_NellisAndKlein/Example_1_8_1_PipeInARoof/part_a_UseExtendedSurface.py
greenwoodms/TRANSFORM-Library
dc152d4f0298d3f18385f2ea33645d87d7812915
[ "Apache-2.0" ]
17
2018-08-06T22:18:01.000Z
2022-01-29T21:38:17.000Z
from pymodelica import compile_fmu from pyfmi import load_fmu libPath = r'C:\Users\vmg\Documents\Modelica\TRANSFORM-Library/TRANSFORM' modelName = 'TRANSFORM.HeatAndMassTransfer.Examples.ExamplesFrom_NellisAndKlein.Example_1_8_1_PipeInARoof.part_a_UseExtendedSurface' fmu = compile_fmu(modelName,libPath,target='cs') model = load_fmu(fmu) opts = model.simulate_options() opts['time_limit'] = 60 results=model.simulate(options=opts)
31.142857
132
0.827982