Build: #3 failed

Job: Default Job failed

Stages & jobs

  1. Default Stage

raise for status: Test case result

The below summarizes the result of the test " raise for status" in build 3 of Clowder - pyclowder2 - bugfix-GLGVO-343-parser-vm-spawning-many-jobs-update - Default Job.
Description
raise for status
Test class
tests.test_geostreams
Method
test_raise_for_status
Duration
< 1 sec
Status
Failed (Existing Failure)

Error Log

ConnectionError: HTTPConnectionPool(host='localhost', port=9000): Max retries exceeded with url: /clowder/apithis_path_does_not_exist (Caused by NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at 0x7f6c1544df10>: Failed to establish a new connection: [Errno 111] Connection refused',))
caplog = <pytest_capturelog.CaptureLogFuncArg object at 0x7f6c1545ec90>
host = 'http://localhost:9000/clowder', key = 'r1ek3rs'

    def test_raise_for_status(caplog, host, key):
        client = ClowderClient(host=host, key=key)
        try:
>           client.get_json("this_path_does_not_exist")

tests/test_geostreams.py:27: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
pyclowder/client.py:68: in get_json
    r = requests.get(url, headers=self.headers)
/tmp/virtualenv/pyclowder2/local/lib/python2.7/site-packages/requests/api.py:71: in get
    return request('get', url, params=params, **kwargs)
/tmp/virtualenv/pyclowder2/local/lib/python2.7/site-packages/requests/api.py:57: in request
    return session.request(method=method, url=url, **kwargs)
/tmp/virtualenv/pyclowder2/local/lib/python2.7/site-packages/requests/sessions.py:475: in request
    resp = self.send(prep, **send_kwargs)
/tmp/virtualenv/pyclowder2/local/lib/python2.7/site-packages/requests/sessions.py:585: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7f6c15505f10>
request = <PreparedRequest [GET]>, stream = False
timeout = <requests.packages.urllib3.util.timeout.Timeout object at 0x7f6c1544de90>
verify = True, cert = None, proxies = OrderedDict()

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.
    
            :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
            :param stream: (optional) Whether to stream the request content.
            :param timeout: (optional) How long to wait for the server to send
                data before giving up, as a float, or a :ref:`(connect timeout,
                read timeout) <timeouts>` tuple.
            :type timeout: float or tuple
            :param verify: (optional) Whether to verify SSL certificates.
            :param cert: (optional) Any user-provided SSL certificate to be trusted.
            :param proxies: (optional) The proxies dictionary to apply to the request.
            """
    
        conn = self.get_connection(request.url, proxies)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request)
    
        chunked = not (request.body is None or 'Content-Length' in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {0}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
                resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout
                )
    
            # Send the request.
            else:
                if hasattr(conn, 'proxy_pool'):
                    conn = conn.proxy_pool
    
                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
    
                try:
                    low_conn.putrequest(request.method,
                                        url,
                                        skip_accept_encoding=True)
    
                    for header, value in request.headers.items():
                        low_conn.putheader(header, value)
    
                    low_conn.endheaders()
    
                    for i in request.body:
                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
                        low_conn.send(b'\r\n')
                        low_conn.send(i)
                        low_conn.send(b'\r\n')
                    low_conn.send(b'0\r\n\r\n')
    
                    # Receive the response from the server
                    try:
                        # For Python 2.7+ versions, use buffering of HTTP
                        # responses
                        r = low_conn.getresponse(buffering=True)
                    except TypeError:
                        # For compatibility with Python 2.6 versions and back
                        r = low_conn.getresponse()
    
                    resp = HTTPResponse.from_httplib(
                        r,
                        pool=conn,
                        connection=low_conn,
                        preload_content=False,
                        decode_content=False
                    )
                except:
                    # If we hit any problems here, clean up the connection.
                    # Then, reraise so that we can handle the actual exception.
                    low_conn.close()
                    raise
    
        except (ProtocolError, socket.error) as err:
            raise ConnectionError(err, request=request)
    
        except MaxRetryError as e:
            if isinstance(e.reason, ConnectTimeoutError):
                # TODO: Remove this in 3.0.0: see #2811
                if not isinstance(e.reason, NewConnectionError):
                    raise ConnectTimeout(e, request=request)
    
            if isinstance(e.reason, ResponseError):
                raise RetryError(e, request=request)
    
            if isinstance(e.reason, _ProxyError):
                raise ProxyError(e, request=request)
    
>           raise ConnectionError(e, request=request)
E           ConnectionError: HTTPConnectionPool(host='localhost', port=9000): Max retries exceeded with url: /clowder/apithis_path_does_not_exist (Caused by NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at 0x7f6c1544df10>: Failed to establish a new connection: [Errno 111] Connection refused',))

/tmp/virtualenv/pyclowder2/local/lib/python2.7/site-packages/requests/adapters.py:467: ConnectionError
--------------------------------- Captured log ---------------------------------
connectionpool.py          213 INFO     Starting new HTTP connection (1): localhost