prompt
large_stringlengths
72
9.34k
completion
large_stringlengths
0
7.61k
<|file_name|>winprocess.py<|end_file_name|><|fim▁begin|>""" Windows Process Control winprocess.run launches a child process and returns the exit code. Optionally, it can: redirect stdin, stdout & stderr to files run the command as another user limit the process's running time control the process window (location, size, window state, desktop) Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 extensions. This code is free for any purpose, with no warranty of any kind. -- John B. Dell'Aquila <[email protected]> """ import win32api, win32process, win32security import win32event, win32con, msvcrt, win32gui def logonUser(loginString): """ Login as specified user and return handle. loginString: 'Domain\nUser\nPassword'; for local login use . or empty string as domain e.g. '.\nadministrator\nsecret_password' """ domain, user, passwd = loginString.split('\n') return win32security.LogonUser( user, domain, passwd, win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT ) class Process: """ A Windows process. """ def __init__(self, cmd, login=None, hStdin=None, hStdout=None, hStderr=None, show=1, xy=None, xySize=None, desktop=None): """ Create a Windows process. cmd: command to run login: run as user 'Domain\nUser\nPassword' hStdin, hStdout, hStderr: handles for process I/O; default is caller's stdin, stdout & stderr show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) xy: window offset (x, y) of upper left corner in pixels xySize: window size (width, height) in pixels desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' None = inherit current desktop '' = create new desktop if necessary User calling login requires additional privileges: Act as part of the operating system [not needed on Windows XP] Increase quotas Replace a process level token Login string must EITHER be an administrator's account (ordinary user can't access current desktop - see Microsoft Q165194) OR use desktop='' to run another desktop invisibly (may be very slow to startup & finalize). """ si = win32process.STARTUPINFO() si.dwFlags = (win32con.STARTF_USESTDHANDLES ^ win32con.STARTF_USESHOWWINDOW) if hStdin is None: si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) else: si.hStdInput = hStdin if hStdout is None: si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) else: si.hStdOutput = hStdout if hStderr is None: si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) else: si.hStdError = hStderr si.wShowWindow = show if xy is not None: si.dwX, si.dwY = xy si.dwFlags ^= win32con.STARTF_USEPOSITION if xySize is not None: si.dwXSize, si.dwYSize = xySize si.dwFlags ^= win32con.STARTF_USESIZE if desktop is not None: si.lpDesktop = desktop procArgs = (None, # appName cmd, # commandLine None, # processAttributes None, # threadAttributes 1, # bInheritHandles win32process.CREATE_NEW_CONSOLE, # dwCreationFlags None, # newEnvironment None, # currentDirectory si) # startupinfo if login is not None: hUser = logonUser(login) win32security.ImpersonateLoggedOnUser(hUser) procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) win32security.RevertToSelf() else: procHandles = win32process.CreateProcess(*procArgs) self.hProcess, self.hThread, self.PId, self.TId = procHandles def <|fim_middle|>(self, mSec=None): """ Wait for process to finish or for specified number of milliseconds to elapse. """ if mSec is None: mSec = win32event.INFINITE return win32event.WaitForSingleObject(self.hProcess, mSec) def kill(self, gracePeriod=5000): """ Kill process. Try for an orderly shutdown via WM_CLOSE. If still running after gracePeriod (5 sec. default), terminate. """ win32gui.EnumWindows(self.__close__, 0) if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: win32process.TerminateProcess(self.hProcess, 0) win32api.Sleep(100) # wait for resources to be released def __close__(self, hwnd, dummy): """ EnumWindows callback - sends WM_CLOSE to any window owned by this process. """ TId, PId = win32process.GetWindowThreadProcessId(hwnd) if PId == self.PId: win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) def exitCode(self): """ Return process exit code. """ return win32process.GetExitCodeProcess(self.hProcess) def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): """ Run cmd as a child process and return exit code. mSec: terminate cmd after specified number of milliseconds stdin, stdout, stderr: file objects for child I/O (use hStdin etc. to attach handles instead of files); default is caller's stdin, stdout & stderr; kw: see Process.__init__ for more keyword options """ if stdin is not None: kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno()) if stdout is not None: kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno()) if stderr is not None: kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno()) child = Process(cmd, **kw) if child.wait(mSec) != win32event.WAIT_OBJECT_0: child.kill() raise WindowsError, 'process timeout exceeded' return child.exitCode() if __name__ == '__main__': # Pipe commands to a shell and display the output in notepad print 'Testing winprocess.py...' import tempfile timeoutSeconds = 15 cmdString = """\ REM Test of winprocess.py piping commands to a shell.\r REM This window will close in %d seconds.\r vol\r net user\r _this_is_a_test_of_stderr_\r """ % timeoutSeconds cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile() cmd.write(cmdString) cmd.seek(0) print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd, stdout=out, stderr=out) cmd.close() print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name, show=win32con.SW_MAXIMIZE, mSec=timeoutSeconds*1000) out.close() <|fim▁end|>
wait
<|file_name|>winprocess.py<|end_file_name|><|fim▁begin|>""" Windows Process Control winprocess.run launches a child process and returns the exit code. Optionally, it can: redirect stdin, stdout & stderr to files run the command as another user limit the process's running time control the process window (location, size, window state, desktop) Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 extensions. This code is free for any purpose, with no warranty of any kind. -- John B. Dell'Aquila <[email protected]> """ import win32api, win32process, win32security import win32event, win32con, msvcrt, win32gui def logonUser(loginString): """ Login as specified user and return handle. loginString: 'Domain\nUser\nPassword'; for local login use . or empty string as domain e.g. '.\nadministrator\nsecret_password' """ domain, user, passwd = loginString.split('\n') return win32security.LogonUser( user, domain, passwd, win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT ) class Process: """ A Windows process. """ def __init__(self, cmd, login=None, hStdin=None, hStdout=None, hStderr=None, show=1, xy=None, xySize=None, desktop=None): """ Create a Windows process. cmd: command to run login: run as user 'Domain\nUser\nPassword' hStdin, hStdout, hStderr: handles for process I/O; default is caller's stdin, stdout & stderr show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) xy: window offset (x, y) of upper left corner in pixels xySize: window size (width, height) in pixels desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' None = inherit current desktop '' = create new desktop if necessary User calling login requires additional privileges: Act as part of the operating system [not needed on Windows XP] Increase quotas Replace a process level token Login string must EITHER be an administrator's account (ordinary user can't access current desktop - see Microsoft Q165194) OR use desktop='' to run another desktop invisibly (may be very slow to startup & finalize). """ si = win32process.STARTUPINFO() si.dwFlags = (win32con.STARTF_USESTDHANDLES ^ win32con.STARTF_USESHOWWINDOW) if hStdin is None: si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) else: si.hStdInput = hStdin if hStdout is None: si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) else: si.hStdOutput = hStdout if hStderr is None: si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) else: si.hStdError = hStderr si.wShowWindow = show if xy is not None: si.dwX, si.dwY = xy si.dwFlags ^= win32con.STARTF_USEPOSITION if xySize is not None: si.dwXSize, si.dwYSize = xySize si.dwFlags ^= win32con.STARTF_USESIZE if desktop is not None: si.lpDesktop = desktop procArgs = (None, # appName cmd, # commandLine None, # processAttributes None, # threadAttributes 1, # bInheritHandles win32process.CREATE_NEW_CONSOLE, # dwCreationFlags None, # newEnvironment None, # currentDirectory si) # startupinfo if login is not None: hUser = logonUser(login) win32security.ImpersonateLoggedOnUser(hUser) procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) win32security.RevertToSelf() else: procHandles = win32process.CreateProcess(*procArgs) self.hProcess, self.hThread, self.PId, self.TId = procHandles def wait(self, mSec=None): """ Wait for process to finish or for specified number of milliseconds to elapse. """ if mSec is None: mSec = win32event.INFINITE return win32event.WaitForSingleObject(self.hProcess, mSec) def <|fim_middle|>(self, gracePeriod=5000): """ Kill process. Try for an orderly shutdown via WM_CLOSE. If still running after gracePeriod (5 sec. default), terminate. """ win32gui.EnumWindows(self.__close__, 0) if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: win32process.TerminateProcess(self.hProcess, 0) win32api.Sleep(100) # wait for resources to be released def __close__(self, hwnd, dummy): """ EnumWindows callback - sends WM_CLOSE to any window owned by this process. """ TId, PId = win32process.GetWindowThreadProcessId(hwnd) if PId == self.PId: win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) def exitCode(self): """ Return process exit code. """ return win32process.GetExitCodeProcess(self.hProcess) def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): """ Run cmd as a child process and return exit code. mSec: terminate cmd after specified number of milliseconds stdin, stdout, stderr: file objects for child I/O (use hStdin etc. to attach handles instead of files); default is caller's stdin, stdout & stderr; kw: see Process.__init__ for more keyword options """ if stdin is not None: kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno()) if stdout is not None: kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno()) if stderr is not None: kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno()) child = Process(cmd, **kw) if child.wait(mSec) != win32event.WAIT_OBJECT_0: child.kill() raise WindowsError, 'process timeout exceeded' return child.exitCode() if __name__ == '__main__': # Pipe commands to a shell and display the output in notepad print 'Testing winprocess.py...' import tempfile timeoutSeconds = 15 cmdString = """\ REM Test of winprocess.py piping commands to a shell.\r REM This window will close in %d seconds.\r vol\r net user\r _this_is_a_test_of_stderr_\r """ % timeoutSeconds cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile() cmd.write(cmdString) cmd.seek(0) print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd, stdout=out, stderr=out) cmd.close() print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name, show=win32con.SW_MAXIMIZE, mSec=timeoutSeconds*1000) out.close() <|fim▁end|>
kill
<|file_name|>winprocess.py<|end_file_name|><|fim▁begin|>""" Windows Process Control winprocess.run launches a child process and returns the exit code. Optionally, it can: redirect stdin, stdout & stderr to files run the command as another user limit the process's running time control the process window (location, size, window state, desktop) Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 extensions. This code is free for any purpose, with no warranty of any kind. -- John B. Dell'Aquila <[email protected]> """ import win32api, win32process, win32security import win32event, win32con, msvcrt, win32gui def logonUser(loginString): """ Login as specified user and return handle. loginString: 'Domain\nUser\nPassword'; for local login use . or empty string as domain e.g. '.\nadministrator\nsecret_password' """ domain, user, passwd = loginString.split('\n') return win32security.LogonUser( user, domain, passwd, win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT ) class Process: """ A Windows process. """ def __init__(self, cmd, login=None, hStdin=None, hStdout=None, hStderr=None, show=1, xy=None, xySize=None, desktop=None): """ Create a Windows process. cmd: command to run login: run as user 'Domain\nUser\nPassword' hStdin, hStdout, hStderr: handles for process I/O; default is caller's stdin, stdout & stderr show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) xy: window offset (x, y) of upper left corner in pixels xySize: window size (width, height) in pixels desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' None = inherit current desktop '' = create new desktop if necessary User calling login requires additional privileges: Act as part of the operating system [not needed on Windows XP] Increase quotas Replace a process level token Login string must EITHER be an administrator's account (ordinary user can't access current desktop - see Microsoft Q165194) OR use desktop='' to run another desktop invisibly (may be very slow to startup & finalize). """ si = win32process.STARTUPINFO() si.dwFlags = (win32con.STARTF_USESTDHANDLES ^ win32con.STARTF_USESHOWWINDOW) if hStdin is None: si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) else: si.hStdInput = hStdin if hStdout is None: si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) else: si.hStdOutput = hStdout if hStderr is None: si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) else: si.hStdError = hStderr si.wShowWindow = show if xy is not None: si.dwX, si.dwY = xy si.dwFlags ^= win32con.STARTF_USEPOSITION if xySize is not None: si.dwXSize, si.dwYSize = xySize si.dwFlags ^= win32con.STARTF_USESIZE if desktop is not None: si.lpDesktop = desktop procArgs = (None, # appName cmd, # commandLine None, # processAttributes None, # threadAttributes 1, # bInheritHandles win32process.CREATE_NEW_CONSOLE, # dwCreationFlags None, # newEnvironment None, # currentDirectory si) # startupinfo if login is not None: hUser = logonUser(login) win32security.ImpersonateLoggedOnUser(hUser) procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) win32security.RevertToSelf() else: procHandles = win32process.CreateProcess(*procArgs) self.hProcess, self.hThread, self.PId, self.TId = procHandles def wait(self, mSec=None): """ Wait for process to finish or for specified number of milliseconds to elapse. """ if mSec is None: mSec = win32event.INFINITE return win32event.WaitForSingleObject(self.hProcess, mSec) def kill(self, gracePeriod=5000): """ Kill process. Try for an orderly shutdown via WM_CLOSE. If still running after gracePeriod (5 sec. default), terminate. """ win32gui.EnumWindows(self.__close__, 0) if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: win32process.TerminateProcess(self.hProcess, 0) win32api.Sleep(100) # wait for resources to be released def <|fim_middle|>(self, hwnd, dummy): """ EnumWindows callback - sends WM_CLOSE to any window owned by this process. """ TId, PId = win32process.GetWindowThreadProcessId(hwnd) if PId == self.PId: win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) def exitCode(self): """ Return process exit code. """ return win32process.GetExitCodeProcess(self.hProcess) def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): """ Run cmd as a child process and return exit code. mSec: terminate cmd after specified number of milliseconds stdin, stdout, stderr: file objects for child I/O (use hStdin etc. to attach handles instead of files); default is caller's stdin, stdout & stderr; kw: see Process.__init__ for more keyword options """ if stdin is not None: kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno()) if stdout is not None: kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno()) if stderr is not None: kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno()) child = Process(cmd, **kw) if child.wait(mSec) != win32event.WAIT_OBJECT_0: child.kill() raise WindowsError, 'process timeout exceeded' return child.exitCode() if __name__ == '__main__': # Pipe commands to a shell and display the output in notepad print 'Testing winprocess.py...' import tempfile timeoutSeconds = 15 cmdString = """\ REM Test of winprocess.py piping commands to a shell.\r REM This window will close in %d seconds.\r vol\r net user\r _this_is_a_test_of_stderr_\r """ % timeoutSeconds cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile() cmd.write(cmdString) cmd.seek(0) print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd, stdout=out, stderr=out) cmd.close() print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name, show=win32con.SW_MAXIMIZE, mSec=timeoutSeconds*1000) out.close() <|fim▁end|>
__close__
<|file_name|>winprocess.py<|end_file_name|><|fim▁begin|>""" Windows Process Control winprocess.run launches a child process and returns the exit code. Optionally, it can: redirect stdin, stdout & stderr to files run the command as another user limit the process's running time control the process window (location, size, window state, desktop) Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 extensions. This code is free for any purpose, with no warranty of any kind. -- John B. Dell'Aquila <[email protected]> """ import win32api, win32process, win32security import win32event, win32con, msvcrt, win32gui def logonUser(loginString): """ Login as specified user and return handle. loginString: 'Domain\nUser\nPassword'; for local login use . or empty string as domain e.g. '.\nadministrator\nsecret_password' """ domain, user, passwd = loginString.split('\n') return win32security.LogonUser( user, domain, passwd, win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT ) class Process: """ A Windows process. """ def __init__(self, cmd, login=None, hStdin=None, hStdout=None, hStderr=None, show=1, xy=None, xySize=None, desktop=None): """ Create a Windows process. cmd: command to run login: run as user 'Domain\nUser\nPassword' hStdin, hStdout, hStderr: handles for process I/O; default is caller's stdin, stdout & stderr show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) xy: window offset (x, y) of upper left corner in pixels xySize: window size (width, height) in pixels desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' None = inherit current desktop '' = create new desktop if necessary User calling login requires additional privileges: Act as part of the operating system [not needed on Windows XP] Increase quotas Replace a process level token Login string must EITHER be an administrator's account (ordinary user can't access current desktop - see Microsoft Q165194) OR use desktop='' to run another desktop invisibly (may be very slow to startup & finalize). """ si = win32process.STARTUPINFO() si.dwFlags = (win32con.STARTF_USESTDHANDLES ^ win32con.STARTF_USESHOWWINDOW) if hStdin is None: si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) else: si.hStdInput = hStdin if hStdout is None: si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) else: si.hStdOutput = hStdout if hStderr is None: si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) else: si.hStdError = hStderr si.wShowWindow = show if xy is not None: si.dwX, si.dwY = xy si.dwFlags ^= win32con.STARTF_USEPOSITION if xySize is not None: si.dwXSize, si.dwYSize = xySize si.dwFlags ^= win32con.STARTF_USESIZE if desktop is not None: si.lpDesktop = desktop procArgs = (None, # appName cmd, # commandLine None, # processAttributes None, # threadAttributes 1, # bInheritHandles win32process.CREATE_NEW_CONSOLE, # dwCreationFlags None, # newEnvironment None, # currentDirectory si) # startupinfo if login is not None: hUser = logonUser(login) win32security.ImpersonateLoggedOnUser(hUser) procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) win32security.RevertToSelf() else: procHandles = win32process.CreateProcess(*procArgs) self.hProcess, self.hThread, self.PId, self.TId = procHandles def wait(self, mSec=None): """ Wait for process to finish or for specified number of milliseconds to elapse. """ if mSec is None: mSec = win32event.INFINITE return win32event.WaitForSingleObject(self.hProcess, mSec) def kill(self, gracePeriod=5000): """ Kill process. Try for an orderly shutdown via WM_CLOSE. If still running after gracePeriod (5 sec. default), terminate. """ win32gui.EnumWindows(self.__close__, 0) if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: win32process.TerminateProcess(self.hProcess, 0) win32api.Sleep(100) # wait for resources to be released def __close__(self, hwnd, dummy): """ EnumWindows callback - sends WM_CLOSE to any window owned by this process. """ TId, PId = win32process.GetWindowThreadProcessId(hwnd) if PId == self.PId: win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) def <|fim_middle|>(self): """ Return process exit code. """ return win32process.GetExitCodeProcess(self.hProcess) def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): """ Run cmd as a child process and return exit code. mSec: terminate cmd after specified number of milliseconds stdin, stdout, stderr: file objects for child I/O (use hStdin etc. to attach handles instead of files); default is caller's stdin, stdout & stderr; kw: see Process.__init__ for more keyword options """ if stdin is not None: kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno()) if stdout is not None: kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno()) if stderr is not None: kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno()) child = Process(cmd, **kw) if child.wait(mSec) != win32event.WAIT_OBJECT_0: child.kill() raise WindowsError, 'process timeout exceeded' return child.exitCode() if __name__ == '__main__': # Pipe commands to a shell and display the output in notepad print 'Testing winprocess.py...' import tempfile timeoutSeconds = 15 cmdString = """\ REM Test of winprocess.py piping commands to a shell.\r REM This window will close in %d seconds.\r vol\r net user\r _this_is_a_test_of_stderr_\r """ % timeoutSeconds cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile() cmd.write(cmdString) cmd.seek(0) print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd, stdout=out, stderr=out) cmd.close() print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name, show=win32con.SW_MAXIMIZE, mSec=timeoutSeconds*1000) out.close() <|fim▁end|>
exitCode
<|file_name|>winprocess.py<|end_file_name|><|fim▁begin|>""" Windows Process Control winprocess.run launches a child process and returns the exit code. Optionally, it can: redirect stdin, stdout & stderr to files run the command as another user limit the process's running time control the process window (location, size, window state, desktop) Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 extensions. This code is free for any purpose, with no warranty of any kind. -- John B. Dell'Aquila <[email protected]> """ import win32api, win32process, win32security import win32event, win32con, msvcrt, win32gui def logonUser(loginString): """ Login as specified user and return handle. loginString: 'Domain\nUser\nPassword'; for local login use . or empty string as domain e.g. '.\nadministrator\nsecret_password' """ domain, user, passwd = loginString.split('\n') return win32security.LogonUser( user, domain, passwd, win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT ) class Process: """ A Windows process. """ def __init__(self, cmd, login=None, hStdin=None, hStdout=None, hStderr=None, show=1, xy=None, xySize=None, desktop=None): """ Create a Windows process. cmd: command to run login: run as user 'Domain\nUser\nPassword' hStdin, hStdout, hStderr: handles for process I/O; default is caller's stdin, stdout & stderr show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) xy: window offset (x, y) of upper left corner in pixels xySize: window size (width, height) in pixels desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' None = inherit current desktop '' = create new desktop if necessary User calling login requires additional privileges: Act as part of the operating system [not needed on Windows XP] Increase quotas Replace a process level token Login string must EITHER be an administrator's account (ordinary user can't access current desktop - see Microsoft Q165194) OR use desktop='' to run another desktop invisibly (may be very slow to startup & finalize). """ si = win32process.STARTUPINFO() si.dwFlags = (win32con.STARTF_USESTDHANDLES ^ win32con.STARTF_USESHOWWINDOW) if hStdin is None: si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) else: si.hStdInput = hStdin if hStdout is None: si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) else: si.hStdOutput = hStdout if hStderr is None: si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) else: si.hStdError = hStderr si.wShowWindow = show if xy is not None: si.dwX, si.dwY = xy si.dwFlags ^= win32con.STARTF_USEPOSITION if xySize is not None: si.dwXSize, si.dwYSize = xySize si.dwFlags ^= win32con.STARTF_USESIZE if desktop is not None: si.lpDesktop = desktop procArgs = (None, # appName cmd, # commandLine None, # processAttributes None, # threadAttributes 1, # bInheritHandles win32process.CREATE_NEW_CONSOLE, # dwCreationFlags None, # newEnvironment None, # currentDirectory si) # startupinfo if login is not None: hUser = logonUser(login) win32security.ImpersonateLoggedOnUser(hUser) procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) win32security.RevertToSelf() else: procHandles = win32process.CreateProcess(*procArgs) self.hProcess, self.hThread, self.PId, self.TId = procHandles def wait(self, mSec=None): """ Wait for process to finish or for specified number of milliseconds to elapse. """ if mSec is None: mSec = win32event.INFINITE return win32event.WaitForSingleObject(self.hProcess, mSec) def kill(self, gracePeriod=5000): """ Kill process. Try for an orderly shutdown via WM_CLOSE. If still running after gracePeriod (5 sec. default), terminate. """ win32gui.EnumWindows(self.__close__, 0) if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: win32process.TerminateProcess(self.hProcess, 0) win32api.Sleep(100) # wait for resources to be released def __close__(self, hwnd, dummy): """ EnumWindows callback - sends WM_CLOSE to any window owned by this process. """ TId, PId = win32process.GetWindowThreadProcessId(hwnd) if PId == self.PId: win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) def exitCode(self): """ Return process exit code. """ return win32process.GetExitCodeProcess(self.hProcess) def <|fim_middle|>(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): """ Run cmd as a child process and return exit code. mSec: terminate cmd after specified number of milliseconds stdin, stdout, stderr: file objects for child I/O (use hStdin etc. to attach handles instead of files); default is caller's stdin, stdout & stderr; kw: see Process.__init__ for more keyword options """ if stdin is not None: kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno()) if stdout is not None: kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno()) if stderr is not None: kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno()) child = Process(cmd, **kw) if child.wait(mSec) != win32event.WAIT_OBJECT_0: child.kill() raise WindowsError, 'process timeout exceeded' return child.exitCode() if __name__ == '__main__': # Pipe commands to a shell and display the output in notepad print 'Testing winprocess.py...' import tempfile timeoutSeconds = 15 cmdString = """\ REM Test of winprocess.py piping commands to a shell.\r REM This window will close in %d seconds.\r vol\r net user\r _this_is_a_test_of_stderr_\r """ % timeoutSeconds cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile() cmd.write(cmdString) cmd.seek(0) print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd, stdout=out, stderr=out) cmd.close() print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name, show=win32con.SW_MAXIMIZE, mSec=timeoutSeconds*1000) out.close() <|fim▁end|>
run
<|file_name|>hash_util.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*- """ Desc: django util. Note: --------------------------------------- # 2016/04/30 kangtian created """ from hashlib import md5 def gen_md5(content_str):<|fim▁hole|><|fim▁end|>
m = md5() m.update(content_str) return m.hexdigest()
<|file_name|>hash_util.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*- """ Desc: django util. Note: --------------------------------------- # 2016/04/30 kangtian created """ from hashlib import md5 def gen_md5(content_str): <|fim_middle|> <|fim▁end|>
m = md5() m.update(content_str) return m.hexdigest()
<|file_name|>hash_util.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*- """ Desc: django util. Note: --------------------------------------- # 2016/04/30 kangtian created """ from hashlib import md5 def <|fim_middle|>(content_str): m = md5() m.update(content_str) return m.hexdigest() <|fim▁end|>
gen_md5
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by<|fim▁hole|># # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text)<|fim▁end|>
# the Free Software Foundation, either version 3 of the License, or # (at your option) any later version.
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): <|fim_middle|> def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
""" Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text))
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): <|fim_middle|> def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
""" Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text)
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): <|fim_middle|> def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
""" Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): <|fim_middle|> def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
""" Describe this matcher """ description.append('Control with label {0}'.format(self.text))
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): <|fim_middle|> def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
""" Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text))
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): <|fim_middle|> <|fim▁end|>
""" Check if Widget has label with given text """ return LabelMatcher(text)
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): <|fim_middle|> else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
self.text = text
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: <|fim_middle|> def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
self.text = wrap_matcher(text)
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): <|fim_middle|> return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
return True
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def <|fim_middle|>(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
__init__
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def <|fim_middle|>(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
_matches
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def <|fim_middle|>(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
describe_to
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def <|fim_middle|>(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def has_label(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
describe_mismatch
<|file_name|>label.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2012 Tuukka Turto # # This file is part of satin-python. # # pyherc is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyherc is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satin-python. If not, see <http://www.gnu.org/licenses/>. """ Module for testing labels """ from hamcrest.core.base_matcher import BaseMatcher from hamcrest.core.helpers.wrap_matcher import wrap_matcher from .enumerators import all_widgets class LabelMatcher(BaseMatcher): """ Check if Widget has label with given text """ def __init__(self, text): """ Default constructor """ super(LabelMatcher, self).__init__() if hasattr(text, 'matches'): self.text = text else: self.text = wrap_matcher(text) def _matches(self, item): """ Check if matcher matches item :param item: object to match against :returns: True if matching, otherwise False :rtype: Boolean """ widgets = all_widgets(item) for widget in widgets: if hasattr(widget, 'text') and self.text.matches(widget.text()): return True return False def describe_to(self, description): """ Describe this matcher """ description.append('Control with label {0}'.format(self.text)) def describe_mismatch(self, item, mismatch_description): """ Describe this mismatch """ mismatch_description.append( 'QLabel with text {0} was not found'.format(self.text)) def <|fim_middle|>(text): """ Check if Widget has label with given text """ return LabelMatcher(text) <|fim▁end|>
has_label
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar><|fim▁hole|># it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name)<|fim▁end|>
# # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): <|fim_middle|> <|fim▁end|>
def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): <|fim_middle|> def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
self.name = name self.host = host self.port = port
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): <|fim_middle|> def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database")
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): <|fim_middle|> def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id']
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): <|fim_middle|> def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur]
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): <|fim_middle|> def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id))
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): <|fim_middle|> def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur]
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): <|fim_middle|> def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
if doc.id: return self.update(doc) else: return self.insert(doc)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): <|fim_middle|> def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): <|fim_middle|> def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): <|fim_middle|> def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): <|fim_middle|> def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): <|fim_middle|> def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): <|fim_middle|> def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): <|fim_middle|> def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): <|fim_middle|> <|fim▁end|>
return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': <|fim_middle|> if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
id = int(id)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: <|fim_middle|> else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
spec = {pk: id}
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: <|fim_middle|> fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
spec = {pk: id, 'data': {'$exists': 1}}
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: <|fim_middle|> raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
return Document(**row)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: <|fim_middle|> else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
return self.update(doc)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: <|fim_middle|> def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
return self.insert(doc)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: <|fim_middle|> elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
pk = 'id' id = doc.id doc.synced = False set = doc
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: <|fim_middle|> elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced}
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: <|fim_middle|> pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
return self.insert(doc, synced=True)
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: <|fim_middle|> else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced}
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: <|fim_middle|> try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
raise DatabaseException('Incorrect update process')
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: <|fim_middle|> if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
raise DatabaseException('Document does not have id')
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: <|fim_middle|> doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: <|fim_middle|> return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
return row['tid']
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: <|fim_middle|> if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
for id in row['update']: docs['updated'].append(self.get(id, deleted=True))
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: <|fim_middle|> to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
for id in row['commit']: docs['committed'].append(self.get(id, deleted=True))
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def <|fim_middle|>(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
__init__
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def <|fim_middle|>(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
connect
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def <|fim_middle|>(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
get_next_id
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def <|fim_middle|>(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
search
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def <|fim_middle|>(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
get
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def <|fim_middle|>(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
getall
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def <|fim_middle|>(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
save
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def <|fim_middle|>(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
insert
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def <|fim_middle|>(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
update
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def <|fim_middle|>(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
delete
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def <|fim_middle|>(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
save_last_sync
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def <|fim_middle|>(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
get_docs_to_commit
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def <|fim_middle|>(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
get_last_tid
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def <|fim_middle|>(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def __str__(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
status
<|file_name|>mongo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Author: Leo Vidarte <http://nerdlabs.com.ar> # # This file is part of lai-client. # # lai-client is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 # as published by the Free Software Foundation. # # lai-client is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with lai-client. If not, see <http://www.gnu.org/licenses/>. import pymongo from pymongo.errors import AutoReconnect from lai.db.base import DBBase from lai.database import UPDATE_PROCESS, COMMIT_PROCESS from lai.database import DatabaseException, NotFoundError from lai import Document class DBMongo(DBBase): def __init__(self, name, host='127.0.0.1', port=27017): self.name = name self.host = host self.port = port def connect(self): try: self.connection = pymongo.Connection(self.host, self.port) self.db = self.connection[self.name] except AutoReconnect: raise DatabaseException("It's not possible connect to the database") def get_next_id(self): try: query = {'_id': 'last_id'} update = {'$inc': {'id': 1}} fn = self.db.internal.find_and_modify row = fn(query, update, upsert=True, new=True) except Exception as e: raise DatabaseException(e) return row['id'] def search(self, regex): try: spec = {'$or': [{'data.content' : {'$regex': regex, '$options': 'im'}}, {'data.description': {'$regex': regex, '$options': 'im'}}]} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def get(self, id, pk='id', deleted=False): try: if pk == 'id': id = int(id) if deleted: spec = {pk: id} else: spec = {pk: id, 'data': {'$exists': 1}} fields = {'_id': 0} row = self.db.docs.find_one(spec, fields) except Exception as e: raise DatabaseException(e) if row: return Document(**row) raise NotFoundError('%s %s not found' % (pk, id)) def getall(self): try: spec = {'data': {'$exists': 1}} fields = {'_id': 0} sort = [('tid', 1)] cur = self.db.docs.find(spec, fields, sort=sort) except Exception as e: raise DatabaseException(e) return [Document(**row) for row in cur] def save(self, doc): if doc.id: return self.update(doc) else: return self.insert(doc) def insert(self, doc, synced=False): doc.id = self.get_next_id() doc.synced = synced try: self.db.docs.insert(doc) except Exception as e: raise DatabaseException(e) return doc def update(self, doc, process=None): if process is None: pk = 'id' id = doc.id doc.synced = False set = doc elif process == UPDATE_PROCESS: if self.db.docs.find({'sid': doc.sid}).count() == 0: return self.insert(doc, synced=True) pk = 'sid' id = doc.sid doc.synced = not doc.merged() # must be commited if was merged doc.merged(False) set = {'tid': doc.tid, 'data': doc.data, 'user': doc.user, 'public': doc.public, 'synced': doc.synced} elif process == COMMIT_PROCESS: pk = 'id' id = doc.id doc.synced = True set = {'sid': doc.sid, 'tid': doc.tid, 'synced': doc.synced} else: raise DatabaseException('Incorrect update process') try: rs = self.db.docs.update({pk: id}, {'$set': set}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return doc def delete(self, doc): if doc.id is None: raise DatabaseException('Document does not have id') if doc.sid is None: try: rs = self.db.docs.remove({'id': doc.id}, safe=True) assert rs['n'] == 1 except Exception as e: raise DatabaseException(e) return None doc.data = None return self.update(doc) def save_last_sync(self, ids, process): try: spec = {'_id': 'last_sync'} document = {'$set': {process: ids}} self.db.internal.update(spec, document, upsert=True) except Exception as e: raise DatabaseException(e) def get_docs_to_commit(self): try: spec = {'synced': False} fields = {'_id': 0} cur = self.db.docs.find(spec, fields) except Exception as e: raise DatabaseException(e) return list(cur) def get_last_tid(self): try: spec = {'tid': {'$gt': 0}} sort = [('tid', -1)] row = self.db.docs.find_one(spec, sort=sort) except Exception as e: raise DatabaseException(e) if row: return row['tid'] return 0 def status(self): docs = {'updated' : [], 'committed': [], 'to_commit': []} row = self.db.internal.find_one({'_id': 'last_sync'}) if row and 'update' in row: for id in row['update']: docs['updated'].append(self.get(id, deleted=True)) if row and 'commit' in row: for id in row['commit']: docs['committed'].append(self.get(id, deleted=True)) to_commit = self.get_docs_to_commit() for row in to_commit: doc = Document(**row) docs['to_commit'].append(doc) return docs def <|fim_middle|>(self): return "%s://%s:%s/%s" % ('mongo', self.host, self.port, self.name) <|fim▁end|>
__str__
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self):<|fim▁hole|> def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, ))<|fim▁end|>
return self.fc
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): <|fim_middle|> <|fim▁end|>
def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, ))
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): <|fim_middle|> def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg")
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): <|fim_middle|> def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
return self.fc
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): <|fim_middle|> def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, ))
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): <|fim_middle|> def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
return self.ppm
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): <|fim_middle|> def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm)
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): <|fim_middle|> def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
return self.samp_rate_in
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): <|fim_middle|> def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, ))
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): <|fim_middle|> def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
return self.samp_rate_out
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): <|fim_middle|> <|fim▁end|>
self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, ))
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def <|fim_middle|>(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
__init__
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def <|fim_middle|>(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
get_fc
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def <|fim_middle|>(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
set_fc
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def <|fim_middle|>(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
get_ppm
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def <|fim_middle|>(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
set_ppm
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def <|fim_middle|>(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
get_samp_rate_in
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def <|fim_middle|>(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
set_samp_rate_in
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def <|fim_middle|>(self): return self.samp_rate_out def set_samp_rate_out(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
get_samp_rate_out
<|file_name|>clock_offset_corrector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## # Gnuradio Python Flow Graph # Title: Clock offset corrector # Author: Piotr Krysik # Generated: Wed Nov 19 08:38:40 2014 ################################################## from gnuradio import blocks from gnuradio import filter from gnuradio import gr from gnuradio.filter import firdes import grgsm import math class clock_offset_corrector(gr.hier_block2): def __init__(self, fc=936.6e6, ppm=0, samp_rate_in=1625000.0/6.0*4.0): gr.hier_block2.__init__( self, "Clock offset corrector", gr.io_signature(1, 1, gr.sizeof_gr_complex*1), gr.io_signature(1, 1, gr.sizeof_gr_complex*1), ) ################################################## # Parameters ################################################## self.fc = fc self.ppm = ppm self.samp_rate_in = samp_rate_in ################################################## # Variables ################################################## self.samp_rate_out = samp_rate_out = samp_rate_in ################################################## # Blocks ################################################## self.ppm_in = None;self.message_port_register_hier_out("ppm_in") self.gsm_controlled_rotator_cc_0 = grgsm.controlled_rotator_cc(0,samp_rate_out) self.gsm_controlled_const_source_f_0 = grgsm.controlled_const_source_f(ppm) self.fractional_resampler_xx_0 = filter.fractional_resampler_cc(0, samp_rate_in/samp_rate_out) self.blocks_multiply_const_vxx_0_0 = blocks.multiply_const_vff((1.0e-6*samp_rate_in/samp_rate_out, )) self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vff((fc/samp_rate_out*(2*math.pi)/1e6, )) self.blocks_add_const_vxx_0 = blocks.add_const_vff((samp_rate_in/samp_rate_out, )) ################################################## # Connections ################################################## self.connect((self, 0), (self.fractional_resampler_xx_0, 0)) self.connect((self.fractional_resampler_xx_0, 0), (self.gsm_controlled_rotator_cc_0, 0)) self.connect((self.blocks_add_const_vxx_0, 0), (self.fractional_resampler_xx_0, 1)) self.connect((self.blocks_multiply_const_vxx_0_0, 0), (self.blocks_add_const_vxx_0, 0)) self.connect((self.blocks_multiply_const_vxx_0, 0), (self.gsm_controlled_rotator_cc_0, 1)) self.connect((self.gsm_controlled_rotator_cc_0, 0), (self, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0_0, 0)) self.connect((self.gsm_controlled_const_source_f_0, 0), (self.blocks_multiply_const_vxx_0, 0)) ################################################## # Asynch Message Connections ################################################## self.msg_connect(self, "ppm_in", self.gsm_controlled_const_source_f_0, "constant_msg") def get_fc(self): return self.fc def set_fc(self, fc): self.fc = fc self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) def get_ppm(self): return self.ppm def set_ppm(self, ppm): self.ppm = ppm self.gsm_controlled_const_source_f_0.set_constant(self.ppm) def get_samp_rate_in(self): return self.samp_rate_in def set_samp_rate_in(self, samp_rate_in): self.samp_rate_in = samp_rate_in self.set_samp_rate_out(self.samp_rate_in) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) def get_samp_rate_out(self): return self.samp_rate_out def <|fim_middle|>(self, samp_rate_out): self.samp_rate_out = samp_rate_out self.blocks_multiply_const_vxx_0.set_k((self.fc/self.samp_rate_out*(2*math.pi)/1e6, )) self.fractional_resampler_xx_0.set_resamp_ratio(self.samp_rate_in/self.samp_rate_out) self.blocks_multiply_const_vxx_0_0.set_k((1.0e-6*self.samp_rate_in/self.samp_rate_out, )) self.gsm_controlled_rotator_cc_0.set_samp_rate(self.samp_rate_out) self.blocks_add_const_vxx_0.set_k((self.samp_rate_in/self.samp_rate_out, )) <|fim▁end|>
set_samp_rate_out
<|file_name|>shared_ith_shirt_s09.py<|end_file_name|><|fim▁begin|>#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Tangible() result.template = "object/tangible/wearables/ithorian/shared_ith_shirt_s09.iff"<|fim▁hole|> result.stfName("wearables_name","ith_shirt_s09") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result<|fim▁end|>
result.attribute_template_id = 11
<|file_name|>shared_ith_shirt_s09.py<|end_file_name|><|fim▁begin|>#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): <|fim_middle|> <|fim▁end|>
result = Tangible() result.template = "object/tangible/wearables/ithorian/shared_ith_shirt_s09.iff" result.attribute_template_id = 11 result.stfName("wearables_name","ith_shirt_s09") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
<|file_name|>shared_ith_shirt_s09.py<|end_file_name|><|fim▁begin|>#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def <|fim_middle|>(kernel): result = Tangible() result.template = "object/tangible/wearables/ithorian/shared_ith_shirt_s09.iff" result.attribute_template_id = 11 result.stfName("wearables_name","ith_shirt_s09") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result<|fim▁end|>
create
<|file_name|>malware_bazaar_search.py<|end_file_name|><|fim▁begin|>#python imports import sys import os import time import datetime import subprocess import json import requests from termcolor import colored #third-party imports #No third-party imports #programmer generated imports from logger import logger from fileio import fileio ''' ***BEGIN DESCRIPTION*** Type: Search - Description: Searches for any available data on a target against the Abuse.ch Malware Bazaar database. ***END DESCRIPTION*** ''' def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' reputation_dump = '' reputation_output_data = '' malwarebazaar = '' if (POE.logging == True): newlogentry = 'Module: malware_bazaar_search' LOG.WriteStrongLog(POE.logdir, POE.targetfilename, newlogentry) if (POE.SHA256 == ''): print (colored('\r\n[x] Unable to execute Malware Bazaar Search - hash value must be SHA256.', 'red', attrs=['bold'])) newlogentry = 'Unable to execute Malware Bazaar Search - hash value must be SHA256' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) return -1 global json query_status = '' first_seen = '' last_seen = '' signature = '' sig_count = 0 output = POE.logdir + 'MalwareBazaarSearch.json' FI = fileio() print (colored('\r\n[*] Running abuse.ch Malware Bazaar Search against: ' + POE.target, 'white', attrs=['bold'])) malwarebazaar = "https://mb-api.abuse.ch/api/v1/" #API URL data = { #Our header params 'query': 'get_info', 'hash': POE.SHA256, } response_dump = requests.post(malwarebazaar, data=data, timeout=15) # Give us the results as JSON if (POE.debug == True): print (response_dump) try: FI.WriteLogFile(output, response_dump.content.decode("utf-8", "ignore")) print (colored('[*] Malware Bazaar data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold'])) if ((POE.logging == True) and (POE.nolinksummary == False)): newlogentry = 'Malware Bazaar data has been generated to file here: <a href=\"' + output + '\"> Malware Bazaar Host Output </a>' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except: print (colored('[x] Unable to write Malware Bazaar data to file', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'Unable to write Malware Bazaar data to file' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) POE.csv_line += 'N/A,' return -1 try: #Open the file we just downloaded print ('[-] Reading Malware Bazaar file: ' + output.strip()) with open(output.strip(), 'rb') as read_file: data = json.load(read_file, cls=None) read_file.close() # Check what kind of results we have query_status = data["query_status"] print ('[*] query_status: ' + query_status) if (query_status == 'ok'): with open(output.strip(), 'r') as read_file: for string in read_file: if (POE.debug == True): print ('[DEBUG] string: ' + string.strip()) if ('first_seen' in string): first_seen = string.strip() if ('last_seen' in string): last_seen = string.strip() if (('signature' in string) and (sig_count == 0)): signature = string.strip() sig_count += 1 print ('[*] Sample ' + first_seen.replace(',','')) print ('[*] Sample ' + last_seen.replace(',','')) print ('[*] Sample ' + signature.replace(',','')) if (POE.logging == True): newlogentry = 'Sample ' + first_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + last_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + signature.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'hash_not_found'): print (colored('[-] The hash value has not been found...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'no_results'): print (colored('[-] No results available for host...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Something weird happened... else: print (colored('[x] An error has occurred...', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'An error has occurred...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except Exception as e: print (colored('[x] Error: ' + str(e) + ' Terminating...', 'red', attrs=['bold'])) read_file.close() return -1 #Clean up before returning read_file.close()<|fim▁hole|><|fim▁end|>
return 0
<|file_name|>malware_bazaar_search.py<|end_file_name|><|fim▁begin|>#python imports import sys import os import time import datetime import subprocess import json import requests from termcolor import colored #third-party imports #No third-party imports #programmer generated imports from logger import logger from fileio import fileio ''' ***BEGIN DESCRIPTION*** Type: Search - Description: Searches for any available data on a target against the Abuse.ch Malware Bazaar database. ***END DESCRIPTION*** ''' def POE(POE): <|fim_middle|> <|fim▁end|>
if (POE.logging == True): LOG = logger() newlogentry = '' reputation_dump = '' reputation_output_data = '' malwarebazaar = '' if (POE.logging == True): newlogentry = 'Module: malware_bazaar_search' LOG.WriteStrongLog(POE.logdir, POE.targetfilename, newlogentry) if (POE.SHA256 == ''): print (colored('\r\n[x] Unable to execute Malware Bazaar Search - hash value must be SHA256.', 'red', attrs=['bold'])) newlogentry = 'Unable to execute Malware Bazaar Search - hash value must be SHA256' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) return -1 global json query_status = '' first_seen = '' last_seen = '' signature = '' sig_count = 0 output = POE.logdir + 'MalwareBazaarSearch.json' FI = fileio() print (colored('\r\n[*] Running abuse.ch Malware Bazaar Search against: ' + POE.target, 'white', attrs=['bold'])) malwarebazaar = "https://mb-api.abuse.ch/api/v1/" #API URL data = { #Our header params 'query': 'get_info', 'hash': POE.SHA256, } response_dump = requests.post(malwarebazaar, data=data, timeout=15) # Give us the results as JSON if (POE.debug == True): print (response_dump) try: FI.WriteLogFile(output, response_dump.content.decode("utf-8", "ignore")) print (colored('[*] Malware Bazaar data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold'])) if ((POE.logging == True) and (POE.nolinksummary == False)): newlogentry = 'Malware Bazaar data has been generated to file here: <a href=\"' + output + '\"> Malware Bazaar Host Output </a>' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except: print (colored('[x] Unable to write Malware Bazaar data to file', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'Unable to write Malware Bazaar data to file' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) POE.csv_line += 'N/A,' return -1 try: #Open the file we just downloaded print ('[-] Reading Malware Bazaar file: ' + output.strip()) with open(output.strip(), 'rb') as read_file: data = json.load(read_file, cls=None) read_file.close() # Check what kind of results we have query_status = data["query_status"] print ('[*] query_status: ' + query_status) if (query_status == 'ok'): with open(output.strip(), 'r') as read_file: for string in read_file: if (POE.debug == True): print ('[DEBUG] string: ' + string.strip()) if ('first_seen' in string): first_seen = string.strip() if ('last_seen' in string): last_seen = string.strip() if (('signature' in string) and (sig_count == 0)): signature = string.strip() sig_count += 1 print ('[*] Sample ' + first_seen.replace(',','')) print ('[*] Sample ' + last_seen.replace(',','')) print ('[*] Sample ' + signature.replace(',','')) if (POE.logging == True): newlogentry = 'Sample ' + first_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + last_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + signature.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'hash_not_found'): print (colored('[-] The hash value has not been found...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'no_results'): print (colored('[-] No results available for host...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Something weird happened... else: print (colored('[x] An error has occurred...', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'An error has occurred...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except Exception as e: print (colored('[x] Error: ' + str(e) + ' Terminating...', 'red', attrs=['bold'])) read_file.close() return -1 #Clean up before returning read_file.close() return 0
<|file_name|>malware_bazaar_search.py<|end_file_name|><|fim▁begin|>#python imports import sys import os import time import datetime import subprocess import json import requests from termcolor import colored #third-party imports #No third-party imports #programmer generated imports from logger import logger from fileio import fileio ''' ***BEGIN DESCRIPTION*** Type: Search - Description: Searches for any available data on a target against the Abuse.ch Malware Bazaar database. ***END DESCRIPTION*** ''' def POE(POE): if (POE.logging == True): <|fim_middle|> newlogentry = '' reputation_dump = '' reputation_output_data = '' malwarebazaar = '' if (POE.logging == True): newlogentry = 'Module: malware_bazaar_search' LOG.WriteStrongLog(POE.logdir, POE.targetfilename, newlogentry) if (POE.SHA256 == ''): print (colored('\r\n[x] Unable to execute Malware Bazaar Search - hash value must be SHA256.', 'red', attrs=['bold'])) newlogentry = 'Unable to execute Malware Bazaar Search - hash value must be SHA256' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) return -1 global json query_status = '' first_seen = '' last_seen = '' signature = '' sig_count = 0 output = POE.logdir + 'MalwareBazaarSearch.json' FI = fileio() print (colored('\r\n[*] Running abuse.ch Malware Bazaar Search against: ' + POE.target, 'white', attrs=['bold'])) malwarebazaar = "https://mb-api.abuse.ch/api/v1/" #API URL data = { #Our header params 'query': 'get_info', 'hash': POE.SHA256, } response_dump = requests.post(malwarebazaar, data=data, timeout=15) # Give us the results as JSON if (POE.debug == True): print (response_dump) try: FI.WriteLogFile(output, response_dump.content.decode("utf-8", "ignore")) print (colored('[*] Malware Bazaar data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold'])) if ((POE.logging == True) and (POE.nolinksummary == False)): newlogentry = 'Malware Bazaar data has been generated to file here: <a href=\"' + output + '\"> Malware Bazaar Host Output </a>' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except: print (colored('[x] Unable to write Malware Bazaar data to file', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'Unable to write Malware Bazaar data to file' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) POE.csv_line += 'N/A,' return -1 try: #Open the file we just downloaded print ('[-] Reading Malware Bazaar file: ' + output.strip()) with open(output.strip(), 'rb') as read_file: data = json.load(read_file, cls=None) read_file.close() # Check what kind of results we have query_status = data["query_status"] print ('[*] query_status: ' + query_status) if (query_status == 'ok'): with open(output.strip(), 'r') as read_file: for string in read_file: if (POE.debug == True): print ('[DEBUG] string: ' + string.strip()) if ('first_seen' in string): first_seen = string.strip() if ('last_seen' in string): last_seen = string.strip() if (('signature' in string) and (sig_count == 0)): signature = string.strip() sig_count += 1 print ('[*] Sample ' + first_seen.replace(',','')) print ('[*] Sample ' + last_seen.replace(',','')) print ('[*] Sample ' + signature.replace(',','')) if (POE.logging == True): newlogentry = 'Sample ' + first_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + last_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + signature.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'hash_not_found'): print (colored('[-] The hash value has not been found...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'no_results'): print (colored('[-] No results available for host...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Something weird happened... else: print (colored('[x] An error has occurred...', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'An error has occurred...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except Exception as e: print (colored('[x] Error: ' + str(e) + ' Terminating...', 'red', attrs=['bold'])) read_file.close() return -1 #Clean up before returning read_file.close() return 0 <|fim▁end|>
LOG = logger()
<|file_name|>malware_bazaar_search.py<|end_file_name|><|fim▁begin|>#python imports import sys import os import time import datetime import subprocess import json import requests from termcolor import colored #third-party imports #No third-party imports #programmer generated imports from logger import logger from fileio import fileio ''' ***BEGIN DESCRIPTION*** Type: Search - Description: Searches for any available data on a target against the Abuse.ch Malware Bazaar database. ***END DESCRIPTION*** ''' def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' reputation_dump = '' reputation_output_data = '' malwarebazaar = '' if (POE.logging == True): <|fim_middle|> if (POE.SHA256 == ''): print (colored('\r\n[x] Unable to execute Malware Bazaar Search - hash value must be SHA256.', 'red', attrs=['bold'])) newlogentry = 'Unable to execute Malware Bazaar Search - hash value must be SHA256' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) return -1 global json query_status = '' first_seen = '' last_seen = '' signature = '' sig_count = 0 output = POE.logdir + 'MalwareBazaarSearch.json' FI = fileio() print (colored('\r\n[*] Running abuse.ch Malware Bazaar Search against: ' + POE.target, 'white', attrs=['bold'])) malwarebazaar = "https://mb-api.abuse.ch/api/v1/" #API URL data = { #Our header params 'query': 'get_info', 'hash': POE.SHA256, } response_dump = requests.post(malwarebazaar, data=data, timeout=15) # Give us the results as JSON if (POE.debug == True): print (response_dump) try: FI.WriteLogFile(output, response_dump.content.decode("utf-8", "ignore")) print (colored('[*] Malware Bazaar data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold'])) if ((POE.logging == True) and (POE.nolinksummary == False)): newlogentry = 'Malware Bazaar data has been generated to file here: <a href=\"' + output + '\"> Malware Bazaar Host Output </a>' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except: print (colored('[x] Unable to write Malware Bazaar data to file', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'Unable to write Malware Bazaar data to file' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) POE.csv_line += 'N/A,' return -1 try: #Open the file we just downloaded print ('[-] Reading Malware Bazaar file: ' + output.strip()) with open(output.strip(), 'rb') as read_file: data = json.load(read_file, cls=None) read_file.close() # Check what kind of results we have query_status = data["query_status"] print ('[*] query_status: ' + query_status) if (query_status == 'ok'): with open(output.strip(), 'r') as read_file: for string in read_file: if (POE.debug == True): print ('[DEBUG] string: ' + string.strip()) if ('first_seen' in string): first_seen = string.strip() if ('last_seen' in string): last_seen = string.strip() if (('signature' in string) and (sig_count == 0)): signature = string.strip() sig_count += 1 print ('[*] Sample ' + first_seen.replace(',','')) print ('[*] Sample ' + last_seen.replace(',','')) print ('[*] Sample ' + signature.replace(',','')) if (POE.logging == True): newlogentry = 'Sample ' + first_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + last_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + signature.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'hash_not_found'): print (colored('[-] The hash value has not been found...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'no_results'): print (colored('[-] No results available for host...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Something weird happened... else: print (colored('[x] An error has occurred...', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'An error has occurred...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except Exception as e: print (colored('[x] Error: ' + str(e) + ' Terminating...', 'red', attrs=['bold'])) read_file.close() return -1 #Clean up before returning read_file.close() return 0 <|fim▁end|>
newlogentry = 'Module: malware_bazaar_search' LOG.WriteStrongLog(POE.logdir, POE.targetfilename, newlogentry)
<|file_name|>malware_bazaar_search.py<|end_file_name|><|fim▁begin|>#python imports import sys import os import time import datetime import subprocess import json import requests from termcolor import colored #third-party imports #No third-party imports #programmer generated imports from logger import logger from fileio import fileio ''' ***BEGIN DESCRIPTION*** Type: Search - Description: Searches for any available data on a target against the Abuse.ch Malware Bazaar database. ***END DESCRIPTION*** ''' def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' reputation_dump = '' reputation_output_data = '' malwarebazaar = '' if (POE.logging == True): newlogentry = 'Module: malware_bazaar_search' LOG.WriteStrongLog(POE.logdir, POE.targetfilename, newlogentry) if (POE.SHA256 == ''): <|fim_middle|> global json query_status = '' first_seen = '' last_seen = '' signature = '' sig_count = 0 output = POE.logdir + 'MalwareBazaarSearch.json' FI = fileio() print (colored('\r\n[*] Running abuse.ch Malware Bazaar Search against: ' + POE.target, 'white', attrs=['bold'])) malwarebazaar = "https://mb-api.abuse.ch/api/v1/" #API URL data = { #Our header params 'query': 'get_info', 'hash': POE.SHA256, } response_dump = requests.post(malwarebazaar, data=data, timeout=15) # Give us the results as JSON if (POE.debug == True): print (response_dump) try: FI.WriteLogFile(output, response_dump.content.decode("utf-8", "ignore")) print (colored('[*] Malware Bazaar data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold'])) if ((POE.logging == True) and (POE.nolinksummary == False)): newlogentry = 'Malware Bazaar data has been generated to file here: <a href=\"' + output + '\"> Malware Bazaar Host Output </a>' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except: print (colored('[x] Unable to write Malware Bazaar data to file', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'Unable to write Malware Bazaar data to file' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) POE.csv_line += 'N/A,' return -1 try: #Open the file we just downloaded print ('[-] Reading Malware Bazaar file: ' + output.strip()) with open(output.strip(), 'rb') as read_file: data = json.load(read_file, cls=None) read_file.close() # Check what kind of results we have query_status = data["query_status"] print ('[*] query_status: ' + query_status) if (query_status == 'ok'): with open(output.strip(), 'r') as read_file: for string in read_file: if (POE.debug == True): print ('[DEBUG] string: ' + string.strip()) if ('first_seen' in string): first_seen = string.strip() if ('last_seen' in string): last_seen = string.strip() if (('signature' in string) and (sig_count == 0)): signature = string.strip() sig_count += 1 print ('[*] Sample ' + first_seen.replace(',','')) print ('[*] Sample ' + last_seen.replace(',','')) print ('[*] Sample ' + signature.replace(',','')) if (POE.logging == True): newlogentry = 'Sample ' + first_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + last_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + signature.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'hash_not_found'): print (colored('[-] The hash value has not been found...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'no_results'): print (colored('[-] No results available for host...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Something weird happened... else: print (colored('[x] An error has occurred...', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'An error has occurred...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except Exception as e: print (colored('[x] Error: ' + str(e) + ' Terminating...', 'red', attrs=['bold'])) read_file.close() return -1 #Clean up before returning read_file.close() return 0 <|fim▁end|>
print (colored('\r\n[x] Unable to execute Malware Bazaar Search - hash value must be SHA256.', 'red', attrs=['bold'])) newlogentry = 'Unable to execute Malware Bazaar Search - hash value must be SHA256' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) return -1
<|file_name|>malware_bazaar_search.py<|end_file_name|><|fim▁begin|>#python imports import sys import os import time import datetime import subprocess import json import requests from termcolor import colored #third-party imports #No third-party imports #programmer generated imports from logger import logger from fileio import fileio ''' ***BEGIN DESCRIPTION*** Type: Search - Description: Searches for any available data on a target against the Abuse.ch Malware Bazaar database. ***END DESCRIPTION*** ''' def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' reputation_dump = '' reputation_output_data = '' malwarebazaar = '' if (POE.logging == True): newlogentry = 'Module: malware_bazaar_search' LOG.WriteStrongLog(POE.logdir, POE.targetfilename, newlogentry) if (POE.SHA256 == ''): print (colored('\r\n[x] Unable to execute Malware Bazaar Search - hash value must be SHA256.', 'red', attrs=['bold'])) newlogentry = 'Unable to execute Malware Bazaar Search - hash value must be SHA256' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) return -1 global json query_status = '' first_seen = '' last_seen = '' signature = '' sig_count = 0 output = POE.logdir + 'MalwareBazaarSearch.json' FI = fileio() print (colored('\r\n[*] Running abuse.ch Malware Bazaar Search against: ' + POE.target, 'white', attrs=['bold'])) malwarebazaar = "https://mb-api.abuse.ch/api/v1/" #API URL data = { #Our header params 'query': 'get_info', 'hash': POE.SHA256, } response_dump = requests.post(malwarebazaar, data=data, timeout=15) # Give us the results as JSON if (POE.debug == True): <|fim_middle|> try: FI.WriteLogFile(output, response_dump.content.decode("utf-8", "ignore")) print (colored('[*] Malware Bazaar data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold'])) if ((POE.logging == True) and (POE.nolinksummary == False)): newlogentry = 'Malware Bazaar data has been generated to file here: <a href=\"' + output + '\"> Malware Bazaar Host Output </a>' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except: print (colored('[x] Unable to write Malware Bazaar data to file', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'Unable to write Malware Bazaar data to file' LOG.WriteStrongSubLog(POE.logdir, POE.targetfilename, newlogentry) POE.csv_line += 'N/A,' return -1 try: #Open the file we just downloaded print ('[-] Reading Malware Bazaar file: ' + output.strip()) with open(output.strip(), 'rb') as read_file: data = json.load(read_file, cls=None) read_file.close() # Check what kind of results we have query_status = data["query_status"] print ('[*] query_status: ' + query_status) if (query_status == 'ok'): with open(output.strip(), 'r') as read_file: for string in read_file: if (POE.debug == True): print ('[DEBUG] string: ' + string.strip()) if ('first_seen' in string): first_seen = string.strip() if ('last_seen' in string): last_seen = string.strip() if (('signature' in string) and (sig_count == 0)): signature = string.strip() sig_count += 1 print ('[*] Sample ' + first_seen.replace(',','')) print ('[*] Sample ' + last_seen.replace(',','')) print ('[*] Sample ' + signature.replace(',','')) if (POE.logging == True): newlogentry = 'Sample ' + first_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + last_seen.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) newlogentry = 'Sample ' + signature.replace(',','') LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'hash_not_found'): print (colored('[-] The hash value has not been found...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Can't find anything on this one... elif (query_status == 'no_results'): print (colored('[-] No results available for host...', 'yellow', attrs=['bold'])) if (POE.logging == True): newlogentry = 'No results available for host...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) #Something weird happened... else: print (colored('[x] An error has occurred...', 'red', attrs=['bold'])) if (POE.logging == True): newlogentry = 'An error has occurred...' LOG.WriteSubLog(POE.logdir, POE.targetfilename, newlogentry) except Exception as e: print (colored('[x] Error: ' + str(e) + ' Terminating...', 'red', attrs=['bold'])) read_file.close() return -1 #Clean up before returning read_file.close() return 0 <|fim▁end|>
print (response_dump)