logs
stringlengths 104
251k
|
---|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/co2meter/co2meter.py", line 95, in __init__
with self.co2hid():
File "/usr/lib/python2.7/contextlib.py", line 17, in __enter__
return self.gen.next()
File "/usr/local/lib/python2.7/dist-packages/co2meter/co2meter.py", line 144, in co2hid
self.hid_open(send_magic_table=send_magic_table)
File "/usr/local/lib/python2.7/dist-packages/co2meter/co2meter.py", line 114, in hid_open
self._h.open(self._info['vendor_id'], self._info['product_id'])
File "hid.pyx", line 66, in hid.device.open
IOError: open failed
|
ERROR: An error occurred during the fetch of repository 'SlackTextViewController':
Not a regular file: /private/var/tmp/_bazel_noppe/593d550ebb7d3789479ed80d0a5c34d7/external/rules_pods/bin/RepoTools
|
ERROR: An error occurred during the fetch of repository 'SlackTextViewController':
Not a regular file: /private/var/tmp/_bazel_noppe/593d550ebb7d3789479ed80d0a5c34d7/external/rules_pods/bin/RepoTools
|
for synchronization.
*
* @param[in] pipeline The Audio Pipeline Handle
*
* @return
* - ESP_OK on success
* - ESP_FAIL when any errors
*/
esp_err_t audio_pipeline_stop(audio_pipeline_handle_t pipeline);
|
org.powermock.api.mockito.ClassNotPreparedException:
[Ljava.lang.Object;@7a9c84a5
The class net.gazeplay.commons.utils.games.BackgroundMusicManager not prepared for test.
at org.powermock.api.mockito.expectation.reporter.MockitoPowerMockReporter.classNotPrepared(MockitoPowerMockReporter.java:29)
at org.powermock.api.mockito.internal.mockcreation.MockTypeValidatorFactory$DefaultMockTypeValidator.validate(MockTypeValidatorFactory.java:37)
at org.powermock.api.mockito.internal.mockcreation.AbstractMockCreator.validateType(AbstractMockCreator.java:10)
at org.powermock.api.mockito.internal.mockcreation.DefaultMockCreator.createMock(DefaultMockCreator.java:49)
at org.powermock.api.mockito.internal.mockcreation.DefaultMockCreator.mock(DefaultMockCreator.java:40)
at org.powermock.api.mockito.PowerMockito.mockStatic(PowerMockito.java:62)
at net.gazeplay.ui.scenes.configuration.ConfigurationContextTest.setup(ConfigurationContextTest.java:31)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:567)
at org.junit.platform.commons.util.ReflectionUtils.invokeMethod(ReflectionUtils.java:628)
at org.junit.jupiter.engine.execution.ExecutableInvoker.invoke(ExecutableInvoker.java:117)
at org.junit.jupiter.engine.descriptor.ClassTestDescriptor.invokeMethodInExtensionContext(ClassTestDescriptor.java:439)
at org.junit.jupiter.engine.descriptor.ClassTestDescriptor.lambda$synthesizeBeforeEachMethodAdapter$15(ClassTestDescriptor.java:427)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.lambda$invokeBeforeEachMethods$3(TestMethodTestDescriptor.java:149)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.invokeBeforeMethodsOrCallbacksUntilExceptionOccurs(TestMethodTestDescriptor.java:169)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.invokeBeforeEachMethods(TestMethodTestDescriptor.java:148)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:123)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:68)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:135)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:125)
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:135)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:123)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:122)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:80)
at java.base/java.util.ArrayList.forEach(ArrayList.java:1540)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:139)
[Truncated]
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:122)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:80)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.submit(SameThreadHierarchicalTestExecutorService.java:32)
at org.junit.platform.engine.support.hierarchical.HierarchicalTestExecutor.execute(HierarchicalTestExecutor.java:57)
at org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine.execute(HierarchicalTestEngine.java:51)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:229)
at org.junit.platform.launcher.core.DefaultLauncher.lambda$execute$6(DefaultLauncher.java:197)
at org.junit.platform.launcher.core.DefaultLauncher.withInterceptedStreams(DefaultLauncher.java:211)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:191)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:128)
at com.intellij.junit5.JUnit5IdeaTestRunner.startRunnerWithArgs(JUnit5IdeaTestRunner.java:69)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
|
org.powermock.api.mockito.ClassNotPreparedException:
[Ljava.lang.Object;@7a9c84a5
The class net.gazeplay.commons.utils.games.BackgroundMusicManager not prepared for test.
at org.powermock.api.mockito.expectation.reporter.MockitoPowerMockReporter.classNotPrepared(MockitoPowerMockReporter.java:29)
at org.powermock.api.mockito.internal.mockcreation.MockTypeValidatorFactory$DefaultMockTypeValidator.validate(MockTypeValidatorFactory.java:37)
at org.powermock.api.mockito.internal.mockcreation.AbstractMockCreator.validateType(AbstractMockCreator.java:10)
at org.powermock.api.mockito.internal.mockcreation.DefaultMockCreator.createMock(DefaultMockCreator.java:49)
at org.powermock.api.mockito.internal.mockcreation.DefaultMockCreator.mock(DefaultMockCreator.java:40)
at org.powermock.api.mockito.PowerMockito.mockStatic(PowerMockito.java:62)
at net.gazeplay.ui.scenes.configuration.ConfigurationContextTest.setup(ConfigurationContextTest.java:31)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:567)
at org.junit.platform.commons.util.ReflectionUtils.invokeMethod(ReflectionUtils.java:628)
at org.junit.jupiter.engine.execution.ExecutableInvoker.invoke(ExecutableInvoker.java:117)
at org.junit.jupiter.engine.descriptor.ClassTestDescriptor.invokeMethodInExtensionContext(ClassTestDescriptor.java:439)
at org.junit.jupiter.engine.descriptor.ClassTestDescriptor.lambda$synthesizeBeforeEachMethodAdapter$15(ClassTestDescriptor.java:427)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.lambda$invokeBeforeEachMethods$3(TestMethodTestDescriptor.java:149)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.invokeBeforeMethodsOrCallbacksUntilExceptionOccurs(TestMethodTestDescriptor.java:169)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.invokeBeforeEachMethods(TestMethodTestDescriptor.java:148)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:123)
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:68)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:135)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:125)
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:135)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:123)
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:122)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:80)
at java.base/java.util.ArrayList.forEach(ArrayList.java:1540)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:139)
[Truncated]
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:122)
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:80)
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.submit(SameThreadHierarchicalTestExecutorService.java:32)
at org.junit.platform.engine.support.hierarchical.HierarchicalTestExecutor.execute(HierarchicalTestExecutor.java:57)
at org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine.execute(HierarchicalTestEngine.java:51)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:229)
at org.junit.platform.launcher.core.DefaultLauncher.lambda$execute$6(DefaultLauncher.java:197)
at org.junit.platform.launcher.core.DefaultLauncher.withInterceptedStreams(DefaultLauncher.java:211)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:191)
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:128)
at com.intellij.junit5.JUnit5IdeaTestRunner.startRunnerWithArgs(JUnit5IdeaTestRunner.java:69)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
|
It is an error if a nested package's root URI is insde the package URI of a package . It is an error if two different packages have the same directory as root URI.
|
koku_server | [2020-01-20 14:25:03,256] ERROR None Internal Server Error: /api/cost-management/v1/providers/
koku_server | Traceback (most recent call last):
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/core/handlers/exception.py", line 34, in inner
koku_server | response = get_response(request)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/core/handlers/base.py", line 115, in _get_response
koku_server | response = self.process_exception_by_middleware(e, request)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/core/handlers/base.py", line 113, in _get_response
koku_server | response = wrapped_callback(request, *callback_args, **callback_kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/views/decorators/csrf.py", line 54, in wrapped_view
koku_server | return view_func(*args, **kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/viewsets.py", line 114, in view
koku_server | return self.dispatch(request, *args, **kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 505, in dispatch
koku_server | response = self.handle_exception(exc)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 465, in handle_exception
koku_server | self.raise_uncaught_exception(exc)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 476, in raise_uncaught_exception
koku_server | raise exc
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 502, in dispatch
koku_server | response = handler(request, *args, **kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/views/decorators/cache.py", line 44, in _wrapped_view_func
koku_server | response = view_func(request, *args, **kwargs)
koku_server | File "/koku/koku/api/provider/view.py", line 132, in create
koku_server | request.data['type'] = request.data.get('type', '').lower()
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/http/request.py", line 459, in __setitem__
koku_server | self._assert_mutable()
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/http/request.py", line 456, in _assert_mutable
koku_server | raise AttributeError("This QueryDict instance is immutable")
koku_server | AttributeError: This QueryDict instance is immutable
koku_server | [2020-01-20 14:25:03,261] ERROR None "POST /api/cost-management/v1/providers/ HTTP/1.1" 500 122437
|
koku_server | [2020-01-20 14:25:03,256] ERROR None Internal Server Error: /api/cost-management/v1/providers/
koku_server | Traceback (most recent call last):
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/core/handlers/exception.py", line 34, in inner
koku_server | response = get_response(request)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/core/handlers/base.py", line 115, in _get_response
koku_server | response = self.process_exception_by_middleware(e, request)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/core/handlers/base.py", line 113, in _get_response
koku_server | response = wrapped_callback(request, *callback_args, **callback_kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/views/decorators/csrf.py", line 54, in wrapped_view
koku_server | return view_func(*args, **kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/viewsets.py", line 114, in view
koku_server | return self.dispatch(request, *args, **kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 505, in dispatch
koku_server | response = self.handle_exception(exc)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 465, in handle_exception
koku_server | self.raise_uncaught_exception(exc)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 476, in raise_uncaught_exception
koku_server | raise exc
koku_server | File "/opt/app-root/lib/python3.6/site-packages/rest_framework/views.py", line 502, in dispatch
koku_server | response = handler(request, *args, **kwargs)
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/views/decorators/cache.py", line 44, in _wrapped_view_func
koku_server | response = view_func(request, *args, **kwargs)
koku_server | File "/koku/koku/api/provider/view.py", line 132, in create
koku_server | request.data['type'] = request.data.get('type', '').lower()
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/http/request.py", line 459, in __setitem__
koku_server | self._assert_mutable()
koku_server | File "/opt/app-root/lib/python3.6/site-packages/django/http/request.py", line 456, in _assert_mutable
koku_server | raise AttributeError("This QueryDict instance is immutable")
koku_server | AttributeError: This QueryDict instance is immutable
koku_server | [2020-01-20 14:25:03,261] ERROR None "POST /api/cost-management/v1/providers/ HTTP/1.1" 500 122437
|
// GetConfig creates a *rest.Config for talking to a Kubernetes API server.
// If --kubeconfig is set, will use the kubeconfig file at that location. Otherwise will assume running
// in cluster and use the cluster provided kubeconfig.
//...
// Config precedence
//
// * --kubeconfig flag pointing at a file
//
// * KUBECONFIG environment variable pointing at a file
//
// * In-cluster config if running in cluster
//
// * $HOME/.kube/config if exists
func GetConfig() (*rest.Config, error) {
|
// GetConfig creates a *rest.Config for talking to a Kubernetes API server.
// If --kubeconfig is set, will use the kubeconfig file at that location. Otherwise will assume running
// in cluster and use the cluster provided kubeconfig.
//...
// Config precedence
//
// * --kubeconfig flag pointing at a file
//
// * KUBECONFIG environment variable pointing at a file
//
// * In-cluster config if running in cluster
//
// * $HOME/.kube/config if exists
func GetConfig() (*rest.Config, error) {
|
W0203 16:41:48.647690 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of W0203 16:44:23.738961 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:44:54.755625 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:45:25.774451 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:45:56.789435 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:46:27.804130 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:46:58.814932 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:47:29.829825 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:48:00.850847 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:48:31.870668 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
|
W0203 16:41:48.647690 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of W0203 16:44:23.738961 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:44:54.755625 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:45:25.774451 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:45:56.789435 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:46:27.804130 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:46:58.814932 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:47:29.829825 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:48:00.850847 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
W0203 16:48:31.870668 11151 reflector.go:326] k8s.io/client-go/informers/factory.go:135: watch of *v1.Secret ended with: an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
|
***********************************
END TEST unittest
************************************
10:00:35 # xtrace_enable
10:00:35 # '[' 0 -eq 1 ']'
10:00:35 # timing_enter cleanup
10:00:35 # xtrace_disable
10:00:35 # '[' no '!=' yes ']'
10:00:35 # PREV_BASH_OPTS=ehxBET
10:00:35 # [[ ehxBET == *\x* ]]
10:00:35 # XTRACE_DISABLED=yes
10:00:35 # set +x
10:00:35 # xtrace_enable
10:00:35 # autotest_cleanup
10:00:35 # /var/jenkins/workspace/Other/unittest_autotests/spdk/scripts/setup.sh reset
0000:00:09.0 (8086 5845): uio_pci_generic -> nvme
10:00:35 # /var/jenkins/workspace/Other/unittest_autotests/spdk/scripts/setup.sh cleanup
ls: cannot access '/dev/shm/*': No such file or directory
Clean
10:00:35 # uname -s
10:00:35 # '[' Linux = Linux ']'
10:00:35 # grep -q '#define SPDK_CONFIG_IGB_UIO_DRIVER 1' /var/jenkins/workspace/Other/unittest_autotests/spdk/include/spdk/config.h
10:00:35 # modprobe -r uio_pci_generic
modprobe: FATAL: Module uio_pci_generic is in use.
10:00:35 # trap - ERR
10:00:35 # print_backtrace
10:00:35 # [[ ehxBET =~ e ]]
10:00:35 # args=("${BASH_ARGV[@]}")
10:00:35 # local args
10:00:35 # xtrace_disable
10:00:35 # '[' no '!=' yes ']'
10:00:35 # PREV_BASH_OPTS=ehxBET
10:00:35 # [[ ehxBET == *\x* ]]
10:00:35 # XTRACE_DISABLED=yes
10:00:35 # set +x
========== Backtrace start: ==========
|
***********************************
END TEST unittest
************************************
10:00:35 # xtrace_enable
10:00:35 # '[' 0 -eq 1 ']'
10:00:35 # timing_enter cleanup
10:00:35 # xtrace_disable
10:00:35 # '[' no '!=' yes ']'
10:00:35 # PREV_BASH_OPTS=ehxBET
10:00:35 # [[ ehxBET == *\x* ]]
10:00:35 # XTRACE_DISABLED=yes
10:00:35 # set +x
10:00:35 # xtrace_enable
10:00:35 # autotest_cleanup
10:00:35 # /var/jenkins/workspace/Other/unittest_autotests/spdk/scripts/setup.sh reset
0000:00:09.0 (8086 5845): uio_pci_generic -> nvme
10:00:35 # /var/jenkins/workspace/Other/unittest_autotests/spdk/scripts/setup.sh cleanup
ls: cannot access '/dev/shm/*': No such file or directory
Clean
10:00:35 # uname -s
10:00:35 # '[' Linux = Linux ']'
10:00:35 # grep -q '#define SPDK_CONFIG_IGB_UIO_DRIVER 1' /var/jenkins/workspace/Other/unittest_autotests/spdk/include/spdk/config.h
10:00:35 # modprobe -r uio_pci_generic
modprobe: FATAL: Module uio_pci_generic is in use.
10:00:35 # trap - ERR
10:00:35 # print_backtrace
10:00:35 # [[ ehxBET =~ e ]]
10:00:35 # args=("${BASH_ARGV[@]}")
10:00:35 # local args
10:00:35 # xtrace_disable
10:00:35 # '[' no '!=' yes ']'
10:00:35 # PREV_BASH_OPTS=ehxBET
10:00:35 # [[ ehxBET == *\x* ]]
10:00:35 # XTRACE_DISABLED=yes
10:00:35 # set +x
========== Backtrace start: ==========
|
TypeError: Cannot read property 'x' of undefined
File "webpack:///./src/code/views/graph-view.tsx", line 320, in handleNodeMoved
const leftDiff = left - theNode.x;
File "webpack:///./src/code/views/node-view.tsx", line 447, in handleMove
this.props.onMove({
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 697, in _trigger
callback.apply( this.element[ 0 ], [ event ].concat( data ) ) === false ||
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 10264, in _trigger
return $.Widget.prototype._trigger.call( this, type, event, ui );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 144, in [anonymous]
returnValue = value.apply( this, arguments );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 9774, in _mouseDrag
if ( this._trigger( "drag", event, ui ) === false ) {
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 144, in [anonymous]
returnValue = value.apply( this, arguments );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 9427, in _mouseMove
this._mouseDrag( event );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 144, in [anonymous]
returnValue = value.apply( this, arguments );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 9378, in _mouseDown
return that._mouseMove( event );
|
TypeError: Cannot read property 'x' of undefined
File "webpack:///./src/code/views/graph-view.tsx", line 320, in handleNodeMoved
const leftDiff = left - theNode.x;
File "webpack:///./src/code/views/node-view.tsx", line 447, in handleMove
this.props.onMove({
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 697, in _trigger
callback.apply( this.element[ 0 ], [ event ].concat( data ) ) === false ||
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 10264, in _trigger
return $.Widget.prototype._trigger.call( this, type, event, ui );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 144, in [anonymous]
returnValue = value.apply( this, arguments );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 9774, in _mouseDrag
if ( this._trigger( "drag", event, ui ) === false ) {
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 144, in [anonymous]
returnValue = value.apply( this, arguments );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 9427, in _mouseMove
this._mouseDrag( event );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 144, in [anonymous]
returnValue = value.apply( this, arguments );
File "webpack:///./node_modules/jquery-ui-dist/jquery-ui.js", line 9378, in _mouseDown
return that._mouseMove( event );
|
(function() {
var script = document.createElement('script');
script.onload = function () {
var script = document.createElement('script');
script.onload = function () {
var server = "https://janus.conf.meetecho.com/janus";
var janus = null;
var echotest = null;
var opaqueId = "echotest-"+Janus.randomString(12);
Janus.init({debug: "all", callback: function() {
janus = new Janus(
{
server: server,
success: function() {
janus.attach(
{
plugin: "janus.plugin.echotest",
opaqueId: opaqueId,
success: function(pluginHandle) {
echotest = pluginHandle;
Janus.log("Plugin attached! (" + echotest.getPlugin() + ", id=" + echotest.getId() + ")");
// Negotiate WebRTC
var body = { "audio": true, "video": true };
echotest.send({"message": body});
Janus.debug("Trying a createOffer too (audio/video sendrecv)");
echotest.createOffer(
{
media: { data: true }, // Let's negotiate data channels as well
success: function(jsep) {
Janus.debug("Got SDP!");
Janus.debug(jsep);
echotest.send({"message": body, "jsep": jsep});
setInterval(function() {
echotest.data({
text: 'Sending msg via Datachannel...',
error: function(reason) { console.log(reason) },
success: function() {},
});
}, 5000);
},
error: function(error) {
Janus.error("WebRTC error:", error);
}
});
},
error: function(error) {
console.error(" -- Error attaching plugin...", error);
},
onmessage: function(msg, jsep) {
Janus.debug(" ::: Got a message :::");
Janus.debug(msg);
if(jsep !== undefined && jsep !== null) {
Janus.debug("Handling SDP as well...");
Janus.debug(jsep);
echotest.handleRemoteJsep({jsep: jsep});
}
var result = msg["result"];
if(result !== null && result !== undefined) {
if(result === "done") {
// The plugin closed the echo test
return;
}
}
},
onlocalstream: function(stream) {
Janus.debug(" ::: Got a local stream :::");
Janus.debug(stream);
},
onremotestream: function(stream) {
Janus.debug(" ::: Got a remote stream :::");
Janus.debug(stream);
},
ondataopen: function(data) {
Janus.log("The DataChannel is available!");
},
[Truncated]
Janus.debug("We got data from the DataChannel! " + data);
},
oncleanup: function() {
Janus.log(" ::: Got a cleanup notification :::");
}
});
},
error: function(error) {
Janus.error(error);
}
});
}});
};
script.src = 'https://janus.conf.meetecho.com/janus.js';
document.head.appendChild(script);
};
script.src = 'https://cdnjs.cloudflare.com/ajax/libs/webrtc-adapter/6.4.0/adapter.min.js';
document.head.appendChild(script);
})();
|
(function() {
var script = document.createElement('script');
script.onload = function () {
var script = document.createElement('script');
script.onload = function () {
var server = "https://janus.conf.meetecho.com/janus";
var janus = null;
var echotest = null;
var opaqueId = "echotest-"+Janus.randomString(12);
Janus.init({debug: "all", callback: function() {
janus = new Janus(
{
server: server,
success: function() {
janus.attach(
{
plugin: "janus.plugin.echotest",
opaqueId: opaqueId,
success: function(pluginHandle) {
echotest = pluginHandle;
Janus.log("Plugin attached! (" + echotest.getPlugin() + ", id=" + echotest.getId() + ")");
// Negotiate WebRTC
var body = { "audio": true, "video": true };
echotest.send({"message": body});
Janus.debug("Trying a createOffer too (audio/video sendrecv)");
echotest.createOffer(
{
media: { data: true }, // Let's negotiate data channels as well
success: function(jsep) {
Janus.debug("Got SDP!");
Janus.debug(jsep);
echotest.send({"message": body, "jsep": jsep});
setInterval(function() {
echotest.data({
text: 'Sending msg via Datachannel...',
error: function(reason) { console.log(reason) },
success: function() {},
});
}, 5000);
},
error: function(error) {
Janus.error("WebRTC error:", error);
}
});
},
error: function(error) {
console.error(" -- Error attaching plugin...", error);
},
onmessage: function(msg, jsep) {
Janus.debug(" ::: Got a message :::");
Janus.debug(msg);
if(jsep !== undefined && jsep !== null) {
Janus.debug("Handling SDP as well...");
Janus.debug(jsep);
echotest.handleRemoteJsep({jsep: jsep});
}
var result = msg["result"];
if(result !== null && result !== undefined) {
if(result === "done") {
// The plugin closed the echo test
return;
}
}
},
onlocalstream: function(stream) {
Janus.debug(" ::: Got a local stream :::");
Janus.debug(stream);
},
onremotestream: function(stream) {
Janus.debug(" ::: Got a remote stream :::");
Janus.debug(stream);
},
ondataopen: function(data) {
Janus.log("The DataChannel is available!");
},
[Truncated]
Janus.debug("We got data from the DataChannel! " + data);
},
oncleanup: function() {
Janus.log(" ::: Got a cleanup notification :::");
}
});
},
error: function(error) {
Janus.error(error);
}
});
}});
};
script.src = 'https://janus.conf.meetecho.com/janus.js';
document.head.appendChild(script);
};
script.src = 'https://cdnjs.cloudflare.com/ajax/libs/webrtc-adapter/6.4.0/adapter.min.js';
document.head.appendChild(script);
})();
|
diff
}
+
+ // gofail: var defragBeforeRename struct{}
err = os.Rename(tdbp, dbp)
if err != nil {
if b.lg != nil {
|
diff
}
+
+ // gofail: var defragBeforeRename struct{}
err = os.Rename(tdbp, dbp)
if err != nil {
if b.lg != nil {
|
02:55:35.245247928: Error File below / or /root opened for writing (user=root command=ysqlsh -U user1 -h yb-tserver-0 -d yugabyte parent=bash file=/root/.psql_history program=ysqlsh container_id=ac8028a3c3bc image=yugabytedb/yugabyte) k8s.ns=yb-demo k8s.pod=yb-tserver-0 container=ac8028a3c3bc k8s.ns=yb-demo k8s.pod=yb-tserver-0 container=ac8028a3c3bc
|
# YugabyteDB ports
- macro: yugabytedb_ysql_port
condition: fd.sport=5433
- macro: yugabytedb_ycql_port
condition: fd.sport=9042
- macro: yugabytedb_yedis
condition: fd.sport=6379
- macro: yugabytedb_port
condition: yugabytedb_ysql_port or yugabytedb_ycql_port or yugabytedb_yedis
# - rule: YugabyteDB unexpected network inbound traffic
# desc: inbound network traffic to YugabyteDB on a port other than the standard ports
# condition: user.name = yugabyte and inbound and not yugabytedb_port
# output: "Inbound network traffic to YugabyteDB on unexpected port (connection=%fd.name)"
# priority: WARNING
# - rule: YugabyteDB unexpected network outbound traffic
# desc: outbound network traffic from YugabyteDB on a port other than the standard ports
# condition: user.name = yugabyte and outbound and not yugabytedb_port
# output: "Outbound network traffic from YugabyteDB on unexpected port (connection=%fd.name)"
# priority: WARNING
|
.
Stale issues rot after an additional 30d of inactivity and eventually close.
If this issue is safe to close now please do so with
|
02:55:35.245247928: Error File below / or /root opened for writing (user=root command=ysqlsh -U user1 -h yb-tserver-0 -d yugabyte parent=bash file=/root/.psql_history program=ysqlsh container_id=ac8028a3c3bc image=yugabytedb/yugabyte) k8s.ns=yb-demo k8s.pod=yb-tserver-0 container=ac8028a3c3bc k8s.ns=yb-demo k8s.pod=yb-tserver-0 container=ac8028a3c3bc
|
# YugabyteDB ports
- macro: yugabytedb_ysql_port
condition: fd.sport=5433
- macro: yugabytedb_ycql_port
condition: fd.sport=9042
- macro: yugabytedb_yedis
condition: fd.sport=6379
- macro: yugabytedb_port
condition: yugabytedb_ysql_port or yugabytedb_ycql_port or yugabytedb_yedis
# - rule: YugabyteDB unexpected network inbound traffic
# desc: inbound network traffic to YugabyteDB on a port other than the standard ports
# condition: user.name = yugabyte and inbound and not yugabytedb_port
# output: "Inbound network traffic to YugabyteDB on unexpected port (connection=%fd.name)"
# priority: WARNING
# - rule: YugabyteDB unexpected network outbound traffic
# desc: outbound network traffic from YugabyteDB on a port other than the standard ports
# condition: user.name = yugabyte and outbound and not yugabytedb_port
# output: "Outbound network traffic from YugabyteDB on unexpected port (connection=%fd.name)"
# priority: WARNING
|
User should be logged in
##[error]Process completed with exit code 1.
Run snapcraft whoami
snapcraft whoami
shell: /bin/bash -e {0}
/ not root-owned 501:50
##[error]Process completed with exit code 1.
|
User should be logged in
##[error]Process completed with exit code 1.
Run snapcraft whoami
snapcraft whoami
shell: /bin/bash -e {0}
/ not root-owned 501:50
##[error]Process completed with exit code 1.
|
--oidc-issuer value OIDC issuer (default: "https://localhost:9130") [$REVA_OIDC_ISSUER]
--oidc-insecure OIDC allow insecure communication (default: true) [$REVA_OIDC_INSECURE]
--oidc-id-claim value OIDC id claim (default: "sub") [$REVA_OIDC_ID_CLAIM]
|
--oidc-issuer value OIDC issuer (default: "https://localhost:9130") [$REVA_OIDC_ISSUER]
--oidc-insecure OIDC allow insecure communication (default: true) [$REVA_OIDC_INSECURE]
--oidc-id-claim value OIDC id claim (default: "sub") [$REVA_OIDC_ID_CLAIM]
|
[INFO] --- maven-dependency-plugin:3.1.1:analyze-only (analyze-dependencies) @ gateway-service-livy ---
[WARNING] Used undeclared dependencies found:
[WARNING] org.apache.httpcomponents:httpclient:jar:4.5.11:compile
[WARNING] org.apache.httpcomponents:httpcore:jar:4.4.13:compile
|
[INFO] --- maven-dependency-plugin:3.1.1:analyze-only (analyze-dependencies) @ gateway-service-livy ---
[WARNING] Used undeclared dependencies found:
[WARNING] org.apache.httpcomponents:httpclient:jar:4.5.11:compile
[WARNING] org.apache.httpcomponents:httpcore:jar:4.4.13:compile
|
(removes all files not under version control) and rebuild numpy.
Note: this error has many possible causes, so please don't comment on
an existing issue about this - open a new one instead.
Original error was: DLL load failed: The specified module could not be found.
|
diff
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.FAILED)
- future, past, State.FAILED)
+ future, past, include_dag=True, State.FAILED)
|
diff
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.FAILED)
- future, past, State.FAILED)
+ future, past, include_dag=True, State.FAILED)
|
to enable (assuming default hostPath mounts)"
time="2020-03-16T21:59:54Z" level=info msg="Starting the h.ec2Provider.startEc2DescribeBatchProcessing "
time="2020-03-16T22:00:08Z" level=info msg="STS response" accesskeyid=ASIA4XXXXXXXXXXXX accountid=XXXXXXXXXXXX arn="arn:aws:sts::XXXXXXXXXXXX:assumed-role/foo/foo" client="127.0.0.1:64070" method=POST path=/authenticate session=foo userid=AROA4XTGQXXXXXXXXXXXX
time="2020-03-16T22:00:08Z" level=warning msg="access denied" arn="arn:aws:iam::XXXXXXXXXXXX:role/foo" client="127.0.0.1:64070" error="ARN is not mapped" method=POST path=/authenticate
|
to enable (assuming default hostPath mounts)"
time="2020-03-16T22:07:34Z" level=info msg="starting aws iam authenticator controller"
time="2020-03-16T22:07:34Z" level=info msg="waiting for informer caches to sync"
time="2020-03-16T22:07:34Z" level=info msg="Starting the h.ec2Provider.startEc2DescribeBatchProcessing "
time="2020-03-16T22:09:08Z" level=info msg="STS response" accesskeyid=ASIA4XXXXXXXXXXXX accountid=XXXXXXXXXXXX arn="arn:aws:sts::XXXXXXXXXXXX:assumed-role/foo/foo" client="127.0.0.1:64070" method=POST path=/authenticate session=foo userid=AROA4XTGQXXXXXXXXXXXX
time="2020-03-16T22:09:08Z" level=warning msg="access denied" arn="arn:aws:iam::XXXXXXXXXXXX:role/foo" client="127.0.0.1:64070" error="ARN is not mapped" method=POST path=/authenticate
|
$ scripts/rpc.py bdev_nvme_attach_controller -b nvme0 -a 00:04.0 -t pcie
$ scripts/rpc.py bdev_ocssd_create -c nvme0 -b nvme0n1
nvme0n1
$ scripts/rpc.py bdev_ftl_create -b ftl0 -d nvme0n1
Timeout while waiting for response:
|
/root/spdk/scripts/setup.sh
/root/spdk/app/spdk_tgt/spdk_tgt
0000:00:04.0 (1d1d 1f1f): nvme -> uio_pci_generic
Warning: printing stderr to console terminal without -q option specified.
Suggest using --silence-noticelog to disable logging to stderr and
monitor syslog, or redirect stderr to a file.
(Delaying for 10 seconds...)
Starting SPDK v20.01.1-pre git sha1 cc02904e8 / DPDK 19.11.0 initialization...
[ DPDK EAL parameters: spdk_tgt --no-shconf -c 0x1 --log-level=lib.eal:6 --log-level=lib.cryptodev:5 --log-level=user1:6 --iova-mode=pa --base-virtaddr=0x2000000000]
app.c: 642:spdk_app_start: *NOTICE*: Total cores available: 1
reactor.c: 316:_spdk_reactor_run: *NOTICE*: Reactor started on core 0
[Truncated]
NVMe Controller at 0000:00:04.0 [1d1d:1f1f]
=====================================================
Controller Capabilities/Features
================================
Vendor ID: 1d1d
Subsystem Vendor ID: 1af4
Serial Number: deadbeef
Model Number: QEMU NVMe Ctrl
Firmware Version: 1.0
scripts/rpc.py bdev_nvme_attach_controller -b nvme0 -a 00:0a.0 -t pcie
scripts/rpc.py bdev_ocssd_create -c nvme0 -b nvme0n1
nvme0n1
scripts/rpc.py bdev_ftl_create -b ftl0 -d nvme0n1
|
$ scripts/rpc.py bdev_nvme_attach_controller -b nvme0 -a 00:04.0 -t pcie
$ scripts/rpc.py bdev_ocssd_create -c nvme0 -b nvme0n1
nvme0n1
$ scripts/rpc.py bdev_ftl_create -b ftl0 -d nvme0n1
Timeout while waiting for response:
|
/root/spdk/scripts/setup.sh
/root/spdk/app/spdk_tgt/spdk_tgt
0000:00:04.0 (1d1d 1f1f): nvme -> uio_pci_generic
Warning: printing stderr to console terminal without -q option specified.
Suggest using --silence-noticelog to disable logging to stderr and
monitor syslog, or redirect stderr to a file.
(Delaying for 10 seconds...)
Starting SPDK v20.01.1-pre git sha1 cc02904e8 / DPDK 19.11.0 initialization...
[ DPDK EAL parameters: spdk_tgt --no-shconf -c 0x1 --log-level=lib.eal:6 --log-level=lib.cryptodev:5 --log-level=user1:6 --iova-mode=pa --base-virtaddr=0x2000000000]
app.c: 642:spdk_app_start: *NOTICE*: Total cores available: 1
reactor.c: 316:_spdk_reactor_run: *NOTICE*: Reactor started on core 0
[Truncated]
NVMe Controller at 0000:00:04.0 [1d1d:1f1f]
=====================================================
Controller Capabilities/Features
================================
Vendor ID: 1d1d
Subsystem Vendor ID: 1af4
Serial Number: deadbeef
Model Number: QEMU NVMe Ctrl
Firmware Version: 1.0
scripts/rpc.py bdev_nvme_attach_controller -b nvme0 -a 00:0a.0 -t pcie
scripts/rpc.py bdev_ocssd_create -c nvme0 -b nvme0n1
nvme0n1
scripts/rpc.py bdev_ftl_create -b ftl0 -d nvme0n1
|
Missing attribute: either ms.prod or ms.service is required. Use ms.prod for on-premise products, or ms.service for cloud services.
|
Missing attribute: either ms.prod or ms.service is required. Use ms.prod for on-premise products, or ms.service for cloud services.
|
Missing attribute: either ms.prod or ms.service is required. Use ms.prod for on-premise products, or ms.service for cloud services.
|
Missing attribute: either ms.prod or ms.service is required. Use ms.prod for on-premise products, or ms.service for cloud services.
|
Missing attribute: either ms.prod or ms.service is required. Use ms.prod for on-premise products, or ms.service for cloud services.
|
Missing attribute: either ms.prod or ms.service is required. Use ms.prod for on-premise products, or ms.service for cloud services.
|
2020/04/03 15:34:04 Unexpected error running "go build": exit status 2
# github.com/tektoncd/experimental/webhooks-extension/pkg/endpoints
pkg/endpoints/webhook.go:934:39: cannot use client (type "github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface) as type context.Context in argument to csr.WaitForCertificate:
"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface does not implement context.Context (missing Deadline method)
pkg/endpoints/webhook.go:934:39: cannot use csrRecord (type *"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/api/certificates/v1beta1".CertificateSigningRequest) as type "github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface in argument to csr.WaitForCertificate:
*"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/api/certificates/v1beta1".CertificateSigningRequest does not implement "github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface (missing Create method)
pkg/endpoints/webhook.go:934:63: cannot use 3600 * time.Second (type time.Duration) as type *"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/api/certificates/v1beta1".CertificateSigningRequest in argument to csr.WaitForCertificate
|
2020/04/03 15:34:04 Unexpected error running "go build": exit status 2
# github.com/tektoncd/experimental/webhooks-extension/pkg/endpoints
pkg/endpoints/webhook.go:934:39: cannot use client (type "github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface) as type context.Context in argument to csr.WaitForCertificate:
"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface does not implement context.Context (missing Deadline method)
pkg/endpoints/webhook.go:934:39: cannot use csrRecord (type *"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/api/certificates/v1beta1".CertificateSigningRequest) as type "github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface in argument to csr.WaitForCertificate:
*"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/api/certificates/v1beta1".CertificateSigningRequest does not implement "github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/client-go/kubernetes/typed/certificates/v1beta1".CertificateSigningRequestInterface (missing Create method)
pkg/endpoints/webhook.go:934:63: cannot use 3600 * time.Second (type time.Duration) as type *"github.com/tektoncd/experimental/webhooks-extension/vendor/k8s.io/api/certificates/v1beta1".CertificateSigningRequest in argument to csr.WaitForCertificate
|
➜ bacon git:(fix-config) ✗ java -jar cli/target/bacon.jar pnc build start 100 -v -p ~/
[DEBUG] - Log level set to DEBUG
[DEBUG] - Config file set from flag to /home/dcheung//config.yaml
[ERROR] - Keycloak section is needed in the configuration file!
|
➜ bacon git:(fix-config) ✗ java -jar cli/target/bacon.jar pnc build start 100 -v -p ~/
[DEBUG] - Log level set to DEBUG
[DEBUG] - Config file set from flag to /home/dcheung//config.yaml
[ERROR] - Keycloak section is needed in the configuration file!
|
TEST RESULT SUMMARY
---------------------------------------------------------------------
Exts skipped : 0
Exts tested : 45
---------------------------------------------------------------------
Number of tests : 124 124
Tests skipped : 0 ( 0.0%) --------
Tests warned : 5 ( 4.0%) ( 4.0%)
Tests failed : 0 ( 0.0%) ( 0.0%)
Tests passed : 119 ( 96.0%) ( 96.0%)
---------------------------------------------------------------------
Time taken : 8 seconds
=====================================================================
=====================================================================
WARNED TEST SUMMARY
---------------------------------------------------------------------
parallel cancellation (not running) [tests/base/045.phpt] (warn: XLEAK section but test passes)
parallel cancellation (running) [tests/base/046.phpt] (warn: XLEAK section but test passes)
parallel cancellation (already cancelled) [tests/base/048.phpt] (warn: XLEAK section but test passes)
parallel cancellation (runtime killed) [tests/base/049.phpt] (warn: XLEAK section but test passes)
parallel cancellation (value on cancelled) [tests/base/050.phpt] (warn: XLEAK section but test passes)
=====================================================================
|
=====================================================================
TEST RESULT SUMMARY
---------------------------------------------------------------------
Exts skipped : 0
Exts tested : 45
---------------------------------------------------------------------
Number of tests : 124 124
Tests skipped : 0 ( 0.0%) --------
Tests warned : 5 ( 4.0%) ( 4.0%)
Tests failed : 1 ( 0.8%) ( 0.8%)
Tests passed : 118 ( 95.2%) ( 95.2%)
---------------------------------------------------------------------
Time taken : 12 seconds
=====================================================================
=====================================================================
FAILED TEST SUMMARY
---------------------------------------------------------------------
Check sync get/set [tests/sync/002.phpt]
=====================================================================
=====================================================================
WARNED TEST SUMMARY
---------------------------------------------------------------------
parallel cancellation (not running) [tests/base/045.phpt] (warn: XLEAK section but test passes)
parallel cancellation (running) [tests/base/046.phpt] (warn: XLEAK section but test passes)
parallel cancellation (already cancelled) [tests/base/048.phpt] (warn: XLEAK section but test passes)
parallel cancellation (runtime killed) [tests/base/049.phpt] (warn: XLEAK section but test passes)
parallel cancellation (value on cancelled) [tests/base/050.phpt] (warn: XLEAK section but test passes)
=====================================================================
|
TEST RESULT SUMMARY
---------------------------------------------------------------------
Exts skipped : 0
Exts tested : 45
---------------------------------------------------------------------
Number of tests : 124 124
Tests skipped : 0 ( 0.0%) --------
Tests warned : 5 ( 4.0%) ( 4.0%)
Tests failed : 0 ( 0.0%) ( 0.0%)
Tests passed : 119 ( 96.0%) ( 96.0%)
---------------------------------------------------------------------
Time taken : 8 seconds
=====================================================================
=====================================================================
WARNED TEST SUMMARY
---------------------------------------------------------------------
parallel cancellation (not running) [tests/base/045.phpt] (warn: XLEAK section but test passes)
parallel cancellation (running) [tests/base/046.phpt] (warn: XLEAK section but test passes)
parallel cancellation (already cancelled) [tests/base/048.phpt] (warn: XLEAK section but test passes)
parallel cancellation (runtime killed) [tests/base/049.phpt] (warn: XLEAK section but test passes)
parallel cancellation (value on cancelled) [tests/base/050.phpt] (warn: XLEAK section but test passes)
=====================================================================
|
=====================================================================
TEST RESULT SUMMARY
---------------------------------------------------------------------
Exts skipped : 0
Exts tested : 45
---------------------------------------------------------------------
Number of tests : 124 124
Tests skipped : 0 ( 0.0%) --------
Tests warned : 5 ( 4.0%) ( 4.0%)
Tests failed : 1 ( 0.8%) ( 0.8%)
Tests passed : 118 ( 95.2%) ( 95.2%)
---------------------------------------------------------------------
Time taken : 12 seconds
=====================================================================
=====================================================================
FAILED TEST SUMMARY
---------------------------------------------------------------------
Check sync get/set [tests/sync/002.phpt]
=====================================================================
=====================================================================
WARNED TEST SUMMARY
---------------------------------------------------------------------
parallel cancellation (not running) [tests/base/045.phpt] (warn: XLEAK section but test passes)
parallel cancellation (running) [tests/base/046.phpt] (warn: XLEAK section but test passes)
parallel cancellation (already cancelled) [tests/base/048.phpt] (warn: XLEAK section but test passes)
parallel cancellation (runtime killed) [tests/base/049.phpt] (warn: XLEAK section but test passes)
parallel cancellation (value on cancelled) [tests/base/050.phpt] (warn: XLEAK section but test passes)
=====================================================================
|
php: /home/synida/php-src-php-7.4.3/Zend/zend_types.h:1039: zend_gc_delref: Assertion `p->refcount > 0' failed.
Aborted
|
(gdb) bt
#0 0x00007ffff1996337 in raise () from /lib64/libc.so.6
#1 0x00007ffff1997a28 in abort () from /lib64/libc.so.6
#2 0x00007ffff198f156 in __assert_fail_base () from /lib64/libc.so.6
#3 0x00007ffff198f202 in __assert_fail () from /lib64/libc.so.6
#4 0x0000000000961c68 in zend_gc_delref (p=0x7fffe582d340) at /home/synida/php-src-php-7.4.3/Zend/zend_types.h:1039
#5 0x0000000000961f0a in i_zval_ptr_dtor (zval_ptr=0x7fffe55545c0) at /home/synida/php-src-php-7.4.3/Zend/zend_variables.h:43
#6 0x0000000000962127 in zval_ptr_dtor (zval_ptr=0x7fffe55545c0) at /home/synida/php-src-php-7.4.3/Zend/zend_variables.c:84
#7 0x000000000097ca67 in _zend_hash_str_add_or_update_i (ht=0x7fffe581a360, str=0x7fffe7bc09a7 "mode", len=4, h=9223372043240258122,
pData=0x7fffffff8fc0, flag=1) at /home/synida/php-src-php-7.4.3/Zend/zend_hash.c:834
#8 0x000000000097ce75 in zend_hash_str_update (ht=0x7fffe581a360, str=0x7fffe7bc09a7 "mode", len=4, pData=0x7fffffff8fc0)
at /home/synida/php-src-php-7.4.3/Zend/zend_hash.c:910
#9 0x00007fffe7b954ec in php_phongo_readpreference_get_properties_hash (object=0x7fffffff90b0, is_debug=false)
at /home/synida/mongo-php-driver/src/MongoDB/ReadPreference.c:416
#10 0x00007fffe7b95825 in php_phongo_readpreference_get_properties (object=0x7fffffff90b0)
at /home/synida/mongo-php-driver/src/MongoDB/ReadPreference.c:556
#11 0x00000000009ba49f in zend_std_get_gc (object=0x7fffffff90b0, table=0x7fffffff90c8, n=0x7fffffff90d4)
at /home/synida/php-src-php-7.4.3/Zend/zend_object_handlers.c:123
#12 0x000000000099fa62 in gc_scan_black (ref=0x7fffe54ea8b0, stack=0x7fffe530e000) at /home/synida/php-src-php-7.4.3/Zend/zend_gc.c:708
#13 0x00000000009a0728 in gc_scan (ref=0x7fffe5a3ad80, stack=0x7fffffff9200) at /home/synida/php-src-php-7.4.3/Zend/zend_gc.c:1001
#14 0x00000000009a0c66 in gc_scan_roots (stack=0x7fffffff9200) at /home/synida/php-src-php-7.4.3/Zend/zend_gc.c:1122
#15 0x00000000009a196a in zend_gc_collect_cycles () at /home/synida/php-src-php-7.4.3/Zend/zend_gc.c:1454
#16 0x0000000000984b06 in zif_gc_collect_cycles (execute_data=0x7fffee618f20, return_value=0x7fffffffa280)
at /home/synida/php-src-php-7.4.3/Zend/zend_builtin_functions.c:375
#17 0x00000000009dcbe9 in ZEND_DO_FCALL_BY_NAME_SPEC_RETVAL_UNUSED_HANDLER () at /home/synida/php-src-php-7.4.3/Zend/zend_vm_execute.h:1442
#18 0x0000000000a41ff3 in execute_ex (ex=0x7fffee614020) at /home/synida/php-src-php-7.4.3/Zend/zend_vm_execute.h:53809
#19 0x0000000000a4615c in zend_execute (op_array=0x7fffee687300, return_value=0x0) at /home/synida/php-src-php-7.4.3/Zend/zend_vm_execute.h:57913
#20 0x0000000000967764 in zend_execute_scripts (type=8, retval=0x0, file_count=3) at /home/synida/php-src-php-7.4.3/Zend/zend.c:1665
#21 0x00000000008b308e in php_execute_script (primary_file=0x7fffffffd8e0) at /home/synida/php-src-php-7.4.3/main/main.c:2617
#22 0x0000000000a48b75 in do_cli (argc=7, argv=0x162a760) at /home/synida/php-src-php-7.4.3/sapi/cli/php_cli.c:961
#23 0x0000000000a49cc5 in main (argc=7, argv=0x162a760) at /home/synida/php-src-php-7.4.3/sapi/cli/php_cli.c:1356
|
zig
// bar.zig
const Foo = @import("foo.zig").Foo;
pub fn main() void {
var f = Foo{ .a = 10 };
if (f.getA() != 10) unreachable; // !! should raise compile error, but doesn't
if (Foo.getA(f) != 10) unreachable; // correctly raises compile error
}
|
zig
// bar.zig
const Foo = @import("foo.zig").Foo;
pub fn main() void {
var f = Foo{ .a = 10 };
if (f.getA() != 10) unreachable; // !! should raise compile error, but doesn't
if (Foo.getA(f) != 10) unreachable; // correctly raises compile error
}
|
import { Server } from 'warthog';
const app = new Server();
app.start();
app.httpServer.keepAliveTimeout = keepAliveTimeout;
app.httpServer.headersTimeout = headersTimeout;
|
async checkPermission() {
const authStatus = await messaging().hasPermission();
if (messaging.AuthorizationStatus.AUTHORIZED === authStatus) {
this.getToken();
} else {
this.requestPermission();
}
}
async requestPermission() {
try {
await messaging().requestPermission();
// User has authorised
if (!messaging().isDeviceRegisteredForRemoteMessages) {
await messaging().registerDeviceForRemoteMessages();
}
this.getToken();
} catch (error) {
// User has rejected permissions
console.log('permission rejected');
}
}
async getToken() {
let fcmToken = await AsyncStorage.getItem('fcmToken');
if (!fcmToken) {
fcmToken = await messaging().getToken();
if (fcmToken) {
await AsyncStorage.setItem('fcmToken', fcmToken);
}
}
}
|
async checkPermission() {
const authStatus = await messaging().hasPermission();
if (messaging.AuthorizationStatus.AUTHORIZED === authStatus) {
this.getToken();
} else {
this.requestPermission();
}
}
async requestPermission() {
try {
await messaging().requestPermission();
// User has authorised
if (!messaging().isDeviceRegisteredForRemoteMessages) {
await messaging().registerDeviceForRemoteMessages();
}
this.getToken();
} catch (error) {
// User has rejected permissions
console.log('permission rejected');
}
}
async getToken() {
let fcmToken = await AsyncStorage.getItem('fcmToken');
if (!fcmToken) {
fcmToken = await messaging().getToken();
if (fcmToken) {
await AsyncStorage.setItem('fcmToken', fcmToken);
}
}
}
|
Plugin build check failed: https://travis-ci.org/asdf-vm/asdf-erlang
Plugin build check failed: https://travis-ci.org/vic/asdf-haskell
Plugin build check failed: https://travis-ci.org/halcyon/asdf-java
Plugin build check failed: https://travis-ci.org/rkyleg/asdf-julia
Plugin build check failed: https://travis-ci.org/sylph01/asdf-mongodb
Plugin build check failed: https://travis-ci.org/asdf-vm/asdf-nodejs
|
Plugin build check failed: https://travis-ci.org/asdf-vm/asdf-erlang
Plugin build check failed: https://travis-ci.org/vic/asdf-haskell
Plugin build check failed: https://travis-ci.org/halcyon/asdf-java
Plugin build check failed: https://travis-ci.org/rkyleg/asdf-julia
Plugin build check failed: https://travis-ci.org/sylph01/asdf-mongodb
Plugin build check failed: https://travis-ci.org/asdf-vm/asdf-nodejs
|
FATA Error opening file: /etc/NetworkManager/conf.d/crc-nm-dnsmasq.conf: open /etc/NetworkManager/conf.d/crc-nm-dnsmasq.conf: permission denied
FATA Error opening file: /etc/NetworkManager/dnsmasq.d/crc.conf: open /etc/NetworkManager/dnsmasq.d/crc.conf: permission denied
|
FATA Error opening file: /etc/NetworkManager/conf.d/crc-nm-dnsmasq.conf: open /etc/NetworkManager/conf.d/crc-nm-dnsmasq.conf: permission denied
FATA Error opening file: /etc/NetworkManager/dnsmasq.d/crc.conf: open /etc/NetworkManager/dnsmasq.d/crc.conf: permission denied
|
~/projects/redwood/redwood-test$ yarn rwdev watch
yarn run v1.22.4
$ /home/dominic/projects/redwood/redwood-test/node_modules/.bin/rwdev watch
Error: 'undefined' does not exist
error Command failed with exit code 1.
info Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command.
|
~/projects/redwood/redwood-test$ yarn rwdev watch
yarn run v1.22.4
$ /home/dominic/projects/redwood/redwood-test/node_modules/.bin/rwdev watch
Error: 'undefined' does not exist
error Command failed with exit code 1.
info Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command.
|
v
......
OK 1172 ms [ 85/154] vlib\v\tests\const_embed_test.v
......
--------------------------------------------------------------------------------------------------------------------------------------------
71266 ms <=== total time spent testing all fixed tests
ok, fail, skip, total = 120, 0, 34, 154
|
v
......
OK 1172 ms [ 85/154] vlib\v\tests\const_embed_test.v
......
--------------------------------------------------------------------------------------------------------------------------------------------
71266 ms <=== total time spent testing all fixed tests
ok, fail, skip, total = 120, 0, 34, 154
|
rt->fd_table()->get_monitor(f.fd)->type() == FileMonitor::Type::Mmapped' failed to hold
username_0: # Steps to reproduce
1. Make sure
|
/root/.local/share/rr/perf-3'.
[FATAL /home/constantine/Projects/builds/rr-git/src/rr/src/record_syscall.cc:4903:process_mmap()]
(task 60240 (rec:60240) at time 1466)
-> Assertion
|
rr: Saving execution to trace directory `/root/.local/share/rr/perf-3'.
[FATAL /home/constantine/Projects/builds/rr-git/src/rr/src/record_syscall.cc:4903:process_mmap()]
(task 60240 (rec:60240) at time 1466)
-> Assertion `rt->fd_table()->get_monitor(f.fd)->type() == FileMonitor::Type::Mmapped' failed to hold.
Tail of trace dump:
{
real_time:62512.038288 global_time:1446, event:`SYSCALL: perf_event_open' (state:ENTERING_SYSCALL) tid:60240, ticks:58846775
rax:0xffffffffffffffda rbx:0x12a rcx:0xffffffffffffffff rdx:0x1 rsi:0xeb52 rdi:0x55e65192d2f0 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192d2e0 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038433 global_time:1447, event:`SYSCALL: perf_event_open' (state:EXITING_SYSCALL) tid:60240, ticks:58846775
rax:0x4 rbx:0x12a rcx:0xffffffffffffffff rdx:0x1 rsi:0xeb52 rdi:0x55e65192d2f0 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192d2e0 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038489 global_time:1448, event:`SYSCALL: perf_event_open' (state:ENTERING_SYSCALL) tid:60240, ticks:58846807
rax:0xffffffffffffffda rbx:0x12a rcx:0xffffffffffffffff rdx:0x2 rsi:0xeb52 rdi:0x55e65192d2f0 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192d2e0 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038617 global_time:1449, event:`SYSCALL: perf_event_open' (state:EXITING_SYSCALL) tid:60240, ticks:58846807
rax:0x5 rbx:0x12a rcx:0xffffffffffffffff rdx:0x2 rsi:0xeb52 rdi:0x55e65192d2f0 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192d2e0 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038670 global_time:1450, event:`SYSCALL: perf_event_open' (state:ENTERING_SYSCALL) tid:60240, ticks:58846839
rax:0xffffffffffffffda rbx:0x12a rcx:0xffffffffffffffff rdx:0x3 rsi:0xeb52 rdi:0x55e65192d2f0 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192d2e0 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038740 global_time:1451, event:`SYSCALL: perf_event_open' (state:EXITING_SYSCALL) tid:60240, ticks:58846839
rax:0x7 rbx:0x12a rcx:0xffffffffffffffff rdx:0x3 rsi:0xeb52 rdi:0x55e65192d2f0 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192d2e0 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038795 global_time:1452, event:`SYSCALL: perf_event_open' (state:ENTERING_SYSCALL) tid:60240, ticks:58846954
rax:0xffffffffffffffda rbx:0x12a rcx:0xffffffffffffffff rdx:0x0 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038881 global_time:1453, event:`SYSCALL: perf_event_open' (state:EXITING_SYSCALL) tid:60240, ticks:58846954
rax:0x8 rbx:0x12a rcx:0xffffffffffffffff rdx:0x0 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.038937 global_time:1454, event:`SYSCALL: perf_event_open' (state:ENTERING_SYSCALL) tid:60240, ticks:58846986
rax:0xffffffffffffffda rbx:0x12a rcx:0xffffffffffffffff rdx:0x1 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.039005 global_time:1455, event:`SYSCALL: perf_event_open' (state:EXITING_SYSCALL) tid:60240, ticks:58846986
rax:0x9 rbx:0x12a rcx:0xffffffffffffffff rdx:0x1 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.039058 global_time:1456, event:`SYSCALL: perf_event_open' (state:ENTERING_SYSCALL) tid:60240, ticks:58847018
rax:0xffffffffffffffda rbx:0x12a rcx:0xffffffffffffffff rdx:0x2 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.039121 global_time:1457, event:`SYSCALL: perf_event_open' (state:EXITING_SYSCALL) tid:60240, ticks:58847018
rax:0xa rbx:0x12a rcx:0xffffffffffffffff rdx:0x2 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.039174 global_time:1458, event:`SYSCALL: perf_event_open' (state:ENTERING_SYSCALL) tid:60240, ticks:58847050
rax:0xffffffffffffffda rbx:0x12a rcx:0xffffffffffffffff rdx:0x3 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.039236 global_time:1459, event:`SYSCALL: perf_event_open' (state:EXITING_SYSCALL) tid:60240, ticks:58847050
rax:0xb rbx:0x12a rcx:0xffffffffffffffff rdx:0x3 rsi:0xeb52 rdi:0x55e65192fd50 rbp:0x681fffa0 rsp:0x681ffdf0 r8:0x8 r9:0x0 r10:0xffffffff r11:0x246 r12:0x8 r13:0xeb52 r14:0x55e65192fd40 r15:0xffffffff rip:0x70000002 eflags:0x246 cs:0x33 ss:0x2b ds:0x0 es:0x0 fs:0x0 gs:0x0 orig_rax:0x12a fs_base:0x7f9971c28780 gs_base:0x0
}
{
real_time:62512.039417 global_time:1460, event:`SYSCALLBUF_FLUSH' tid:60240, ticks:58847990
{ syscall:'openat', ret:0xc, size:0x10 }
{ syscall:'readlink', ret:0x24, size:0x34 }
{ syscall:'read', ret:0x4, size:0x14 }
{ syscall:'close', ret:0x0, size:0x10 }
}
[Truncated]
/usr/lib/libc.so.6(__libc_start_main+0xf3)[0x7f764222e023]
rr(_start+0x2e)[0x55e9f0b981be]
=== End rr backtrace
Launch gdb with
gdb '-l' '10000' '-ex' 'set sysroot /' '-ex' 'target extended-remote 127.0.0.1:60240' /usr/bin/perf
|
metadata. This can be removed based on [comment](https://github.com/grpc/grpc-go/issues/3563#issuecomment-618680599) which says that we can remove it in
|
val target = ClientBuilder.newClient().register(
ContextResolver {
ObjectMapper()
.enable(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature())
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.registerModule(Jdk8Module()).registerModule(ParameterNamesModule()).registerModule(GuavaModule())
}
).target(url)
|
val target = ClientBuilder.newClient().register(
ContextResolver {
ObjectMapper()
.enable(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature())
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.registerModule(Jdk8Module()).registerModule(ParameterNamesModule()).registerModule(GuavaModule())
}
).target(url)
|
Name Stmts Miss Branch BrPart Cover Missing
-------------------------------------------------------------------------------------
src/stratis_cli/_actions/_top.py 182 45 46 0 72% 152-172, 188-207, 623-629, 665-680, 702-710, 735-739
src/stratis_cli/_errors.py 86 3 14 0 97% 58-59, 62
|
Name Stmts Miss Branch BrPart Cover Missing
-------------------------------------------------------------------------------------
src/stratis_cli/_actions/_top.py 182 45 46 0 72% 152-172, 188-207, 623-629, 665-680, 702-710, 735-739
src/stratis_cli/_errors.py 86 3 14 0 97% 58-59, 62
|
import React, { useEffect, useState } from 'react'
import Select from 'react-select'
import PropTypes from 'prop-types'
import api from '~/services/api'
export default function SelectRace({ changeRace, register }) {
const [race, setRace] = useState([])
const [loading, setLoading] = useState(true)
useEffect(() => {
async function load() {
const response = await api.get('races')
const races = response.data.map(m => ({
value: m.id,
label: m.name.toUpperCase(),
}))
setRace(races)
setLoading(false)
}
load()
}, [])
const customStyles = {
input: styles => {
return {
...styles,
height: '30px',
minHeight: '30px',
}
},
}
return (
<div style={{ width: '250px', marginRight: '15px' }}>
<Select
styles={customStyles}
maxMenuHeight={250}
placeholder="ESCOLHA A RAÇA"
onChange={changeRace}
isLoading={loading}
options={race}
isClearable
ref={register}
/>
</div>
)
}
SelectRace.propTypes = {
changeRace: PropTypes.func.isRequired,
}
|
<SelectRace
ref={register}
name="race"
changeRace={e => setRace(e && e.value)}
/>
|
<SelectRace
ref={register}
name="race"
changeRace={e => setRace('race', e.value)}
/>
|
const { register, handleSubmit, errors, setValue } = useForm()
<SelectLevel
name="level"
changeLevel={e => setValue('level', e && e.value)}
/>
|
import React, { useEffect, useState } from 'react'
import Select from 'react-select'
import PropTypes from 'prop-types'
import api from '~/services/api'
export default function SelectRace({ changeRace, register }) {
const [race, setRace] = useState([])
const [loading, setLoading] = useState(true)
useEffect(() => {
async function load() {
const response = await api.get('races')
const races = response.data.map(m => ({
value: m.id,
label: m.name.toUpperCase(),
}))
setRace(races)
setLoading(false)
}
load()
}, [])
const customStyles = {
input: styles => {
return {
...styles,
height: '30px',
minHeight: '30px',
}
},
}
return (
<div style={{ width: '250px', marginRight: '15px' }}>
<Select
styles={customStyles}
maxMenuHeight={250}
placeholder="ESCOLHA A RAÇA"
onChange={changeRace}
isLoading={loading}
options={race}
isClearable
ref={register}
/>
</div>
)
}
SelectRace.propTypes = {
changeRace: PropTypes.func.isRequired,
}
|
<SelectRace
ref={register}
name="race"
changeRace={e => setRace(e && e.value)}
/>
|
<SelectRace
ref={register}
name="race"
changeRace={e => setRace('race', e.value)}
/>
|
const { register, handleSubmit, errors, setValue } = useForm()
<SelectLevel
name="level"
changeLevel={e => setValue('level', e && e.value)}
/>
|
DQMFileSaver::globalEndRun()
----- Begin Fatal Exception 04-May-2020 20:27:06 CEST-----------------------
An exception of category 'StdException' occurred while
[0] Processing global end Run run: 334393
[1] Calling method for module DQMFileSaverOnline/'dqmSaver'
Exception Message:
A std::exception was thrown.
failed opening file: No such file or directory: iostream error
----- End Fatal Exception -------------------------------------------------
|
06-May-2020 02:10:24 CEST Writing DQM Root file: ./upload/DQM_V0001_DT_R000334393.root
DQMFileSaver::globalEndRun()
----- Begin Fatal Exception 06-May-2020 02:10:24 CEST-----------------------
An exception of category 'StdException' occurred while
[0] Processing global end Run run: 334393
[1] Calling method for module DQMFileSaverOnline/'dqmSaver'
Exception Message:
A std::exception was thrown.
failed opening file: No such file or directory: iostream error
----- End Fatal Exception -------------------------------------------------
|
An exception of category 'ProductNotFound' occurred while
[0] Processing Event run: 1 lumi: 1 event: 4 stream: 0
[1] Calling method for module DTPreCalibrationTask/'ALCARECODTCalibrationDQM'
Exception Message:
Principal::getByToken: Found zero products matching all criteria
Looking for type: MuonDigiCollection<DTLayerId,DTDigi>
Looking for module label: muonDTDigis
Looking for productInstanceName:
|
because the straightforward workaround would lead to 25.0 to fail (https://github.com/cms-sw/cmssw/pull/29630#issuecomment-629253247).
As discussed above, the simplest option would indeed be to remove the modules filling
|
DQMFileSaver::globalEndRun()
----- Begin Fatal Exception 04-May-2020 20:27:06 CEST-----------------------
An exception of category 'StdException' occurred while
[0] Processing global end Run run: 334393
[1] Calling method for module DQMFileSaverOnline/'dqmSaver'
Exception Message:
A std::exception was thrown.
failed opening file: No such file or directory: iostream error
----- End Fatal Exception -------------------------------------------------
|
06-May-2020 02:10:24 CEST Writing DQM Root file: ./upload/DQM_V0001_DT_R000334393.root
DQMFileSaver::globalEndRun()
----- Begin Fatal Exception 06-May-2020 02:10:24 CEST-----------------------
An exception of category 'StdException' occurred while
[0] Processing global end Run run: 334393
[1] Calling method for module DQMFileSaverOnline/'dqmSaver'
Exception Message:
A std::exception was thrown.
failed opening file: No such file or directory: iostream error
----- End Fatal Exception -------------------------------------------------
|
An exception of category 'ProductNotFound' occurred while
[0] Processing Event run: 1 lumi: 1 event: 4 stream: 0
[1] Calling method for module DTPreCalibrationTask/'ALCARECODTCalibrationDQM'
Exception Message:
Principal::getByToken: Found zero products matching all criteria
Looking for type: MuonDigiCollection<DTLayerId,DTDigi>
Looking for module label: muonDTDigis
Looking for productInstanceName:
|
1.MoveNext()\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Web.Http.Controllers.ApiControllerActionInvoker.<InvokeActionAsyncCore>d__1.MoveNext()\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Web.Http.Controllers.ActionFilterResult.<ExecuteAsync>d__5.MoveNext()\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Web.Http.Dispatcher.HttpControllerDispatcher.<SendAsync>d__15.MoveNext()"}
ERROR: for db Cannot create container for service db: status code not OK but 500: {"Message":"Unhandled exception: Filesharing has been cancelled","StackTrace":" at Docker.ApiServices.Mounting.FileSharing.<DoShareAsync>d__6.MoveNext() in C:\\workspaces\\stable-2.3.x\\src\\github.com\\docker\\pinata\\win\\src\\Docker.ApiServices\\Mounting\\FileSharing.cs:line 0\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at Docker.ApiServices.Mounting.FileSharing.<ShareAsync>d__4.MoveNext() in C:\\workspaces\\stable-2.3.x\\src\\github.com\\docker\\pinata\\win\\src\\Docker.ApiServices\\Mounting\\FileSharing.cs:line 47\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at Docker.HttpApi.Controllers.FilesharingController.<ShareDirectory>d__2.MoveNext() in C:\\workspaces\\stable-2.3.x\\src\\github.com\\docker\\pinata\\win\\src\\Docker.HttpApi\\Controllers\\FilesharingController.cs:line 21\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Threading.Tasks.TaskHelpersExtensions.<CastToObject>d__1
|
1.MoveNext()\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Web.Http.Controllers.ApiControllerActionInvoker.<InvokeActionAsyncCore>d__1.MoveNext()\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Web.Http.Controllers.ActionFilterResult.<ExecuteAsync>d__5.MoveNext()\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Web.Http.Dispatcher.HttpControllerDispatcher.<SendAsync>d__15.MoveNext()"}
ERROR: for db Cannot create container for service db: status code not OK but 500: {"Message":"Unhandled exception: Filesharing has been cancelled","StackTrace":" at Docker.ApiServices.Mounting.FileSharing.<DoShareAsync>d__6.MoveNext() in C:\\workspaces\\stable-2.3.x\\src\\github.com\\docker\\pinata\\win\\src\\Docker.ApiServices\\Mounting\\FileSharing.cs:line 0\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at Docker.ApiServices.Mounting.FileSharing.<ShareAsync>d__4.MoveNext() in C:\\workspaces\\stable-2.3.x\\src\\github.com\\docker\\pinata\\win\\src\\Docker.ApiServices\\Mounting\\FileSharing.cs:line 47\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at Docker.HttpApi.Controllers.FilesharingController.<ShareDirectory>d__2.MoveNext() in C:\\workspaces\\stable-2.3.x\\src\\github.com\\docker\\pinata\\win\\src\\Docker.HttpApi\\Controllers\\FilesharingController.cs:line 21\r\n--- End of stack trace from previous location where exception was thrown ---\r\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\r\n at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification(Task task)\r\n at System.Threading.Tasks.TaskHelpersExtensions.<CastToObject>d__1
|
StreamSubscription<bool> _isVisibleSubscription;
// We need to call this code from a place where valueChanged Stream is already initialized
// so this is a workaround before we have a proper lifecycle callback.
void subscribeToIsVisible() {
if (_isVisibleSubscription == null) {
_isVisibleSubscription = isVisibleProperty.valueChanged.listen((visible) {
if (shouldReset && !visible) {
if (valueProperty is MutableProperty<String>) {
var mutableValueProperty = valueProperty as MutableProperty<String>;
mutableValueProperty.setValue("-1");
}
}
});
}
}
|
StreamSubscription<bool> _isVisibleSubscription;
// We need to call this code from a place where valueChanged Stream is already initialized
// so this is a workaround before we have a proper lifecycle callback.
void subscribeToIsVisible() {
if (_isVisibleSubscription == null) {
_isVisibleSubscription = isVisibleProperty.valueChanged.listen((visible) {
if (shouldReset && !visible) {
if (valueProperty is MutableProperty<String>) {
var mutableValueProperty = valueProperty as MutableProperty<String>;
mutableValueProperty.setValue("-1");
}
}
});
}
}
|
[12907] [2020-05-22 13:22:59.879] [GLnexus] [error] ARC_D30.g.vcf.gz Exists: sample is currently being added (default (ARC_D30.g.vcf.gz))
[12907] [2020-05-22 13:23:04.756] [GLnexus] [error] Failed to bulk load into DB: Failure: One or more gVCF inputs failed validation or database loading; check log for details.
Failed to read from standard input: unknown file type
|
[12907] [2020-05-22 13:22:59.879] [GLnexus] [error] ARC_D30.g.vcf.gz Exists: sample is currently being added (default (ARC_D30.g.vcf.gz))
[12907] [2020-05-22 13:23:04.756] [GLnexus] [error] Failed to bulk load into DB: Failure: One or more gVCF inputs failed validation or database loading; check log for details.
Failed to read from standard input: unknown file type
|
Reference check summary:
OK DOIs
- 10.18637/jss.v067.i01 is OK
- 10.18637/jss.v080.i01 is OK
- 10.1167/17.11.3 is OK
MISSING DOIs
- None
INVALID DOIs
- None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.