quickjs-tart

quickjs-based runtime for wallet-core logic
Log | Files | Refs | README | LICENSE

test_16_info.py (8169B)


      1 #!/usr/bin/env python3
      2 # -*- coding: utf-8 -*-
      3 #***************************************************************************
      4 #                                  _   _ ____  _
      5 #  Project                     ___| | | |  _ \| |
      6 #                             / __| | | | |_) | |
      7 #                            | (__| |_| |  _ <| |___
      8 #                             \___|\___/|_| \_\_____|
      9 #
     10 # Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
     11 #
     12 # This software is licensed as described in the file COPYING, which
     13 # you should have received as part of this distribution. The terms
     14 # are also available at https://curl.se/docs/copyright.html.
     15 #
     16 # You may opt to use, copy, modify, merge, publish, distribute and/or sell
     17 # copies of the Software, and permit persons to whom the Software is
     18 # furnished to do so, under the terms of the COPYING file.
     19 #
     20 # This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
     21 # KIND, either express or implied.
     22 #
     23 # SPDX-License-Identifier: curl
     24 #
     25 ###########################################################################
     26 #
     27 import logging
     28 import os
     29 import pytest
     30 
     31 from testenv import Env, CurlClient
     32 
     33 
     34 log = logging.getLogger(__name__)
     35 
     36 
     37 class TestInfo:
     38 
     39     @pytest.fixture(autouse=True, scope='class')
     40     def _class_scope(self, env, httpd):
     41         indir = httpd.docs_dir
     42         env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
     43         env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
     44         env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
     45         env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
     46 
     47     # download plain file
     48     @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
     49     def test_16_01_info_download(self, env: Env, httpd, nghttpx, proto):
     50         if proto == 'h3' and not env.have_h3():
     51             pytest.skip("h3 not supported")
     52         count = 2
     53         curl = CurlClient(env=env)
     54         url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
     55         r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
     56         r.check_stats(count=count, http_status=200, exitcode=0,
     57                       remote_port=env.port_for(alpn_proto=proto),
     58                       remote_ip='127.0.0.1')
     59         for idx, s in enumerate(r.stats):
     60             self.check_stat(idx, s, r, dl_size=30, ul_size=0)
     61 
     62     # download plain file with a 302 redirect
     63     @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
     64     def test_16_02_info_302_download(self, env: Env, httpd, nghttpx, proto):
     65         if proto == 'h3' and not env.have_h3():
     66             pytest.skip("h3 not supported")
     67         count = 2
     68         curl = CurlClient(env=env)
     69         url = f'https://{env.authority_for(env.domain1, proto)}/data.json.302?[0-{count-1}]'
     70         r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True, extra_args=[
     71             '--location'
     72         ])
     73         r.check_stats(count=count, http_status=200, exitcode=0,
     74                       remote_port=env.port_for(alpn_proto=proto),
     75                       remote_ip='127.0.0.1')
     76         for idx, s in enumerate(r.stats):
     77             self.check_stat(idx, s, r, dl_size=30, ul_size=0)
     78 
     79     @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
     80     def test_16_03_info_upload(self, env: Env, httpd, nghttpx, proto):
     81         if proto == 'h3' and not env.have_h3():
     82             pytest.skip("h3 not supported")
     83         count = 2
     84         fdata = os.path.join(env.gen_dir, 'data-100k')
     85         fsize = 100 * 1024
     86         curl = CurlClient(env=env)
     87         url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
     88         r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
     89                              with_headers=True, extra_args=[
     90                                 '--trace-config', 'http/2,http/3'
     91                              ])
     92         r.check_response(count=count, http_status=200)
     93         r.check_stats(count=count, http_status=200, exitcode=0,
     94                       remote_port=env.port_for(alpn_proto=proto),
     95                       remote_ip='127.0.0.1')
     96         for idx, s in enumerate(r.stats):
     97             self.check_stat(idx, s, r, dl_size=fsize, ul_size=fsize)
     98 
     99     # download plain file via http: ('time_appconnect' is 0)
    100     @pytest.mark.parametrize("proto", ['http/1.1'])
    101     def test_16_04_info_http_download(self, env: Env, httpd, nghttpx, proto):
    102         count = 2
    103         curl = CurlClient(env=env)
    104         url = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
    105         r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
    106         r.check_stats(count=count, http_status=200, exitcode=0,
    107                       remote_port=env.http_port, remote_ip='127.0.0.1')
    108         for idx, s in enumerate(r.stats):
    109             self.check_stat(idx, s, r, dl_size=30, ul_size=0)
    110 
    111     def check_stat(self, idx, s, r, dl_size=None, ul_size=None):
    112         self.check_stat_times(s, idx)
    113         # we always send something
    114         self.check_stat_positive(s, idx, 'size_request')
    115         # we always receive response headers
    116         self.check_stat_positive(s, idx, 'size_header')
    117         if ul_size is not None:
    118             assert s['size_upload'] == ul_size, f'stat #{idx}\n{r.dump_logs()}'  # the file we sent
    119         assert s['size_request'] >= s['size_upload'], \
    120             f'stat #{idx}, "size_request" smaller than "size_upload", {s}\n{r.dump_logs()}'
    121         if dl_size is not None:
    122             assert s['size_download'] == dl_size, f'stat #{idx}\n{r.dump_logs()}'  # the file we received
    123 
    124     def check_stat_positive(self, s, idx, key):
    125         assert key in s, f'stat #{idx} "{key}" missing: {s}'
    126         assert s[key] > 0, f'stat #{idx} "{key}" not positive: {s}'
    127 
    128     def check_stat_positive_or_0(self, s, idx, key):
    129         assert key in s, f'stat #{idx} "{key}" missing: {s}'
    130         assert s[key] >= 0, f'stat #{idx} "{key}" not positive: {s}'
    131 
    132     def check_stat_zero(self, s, key):
    133         assert key in s, f'stat "{key}" missing: {s}'
    134         assert s[key] == 0, f'stat "{key}" not zero: {s}'
    135 
    136     def check_stat_times(self, s, idx):
    137         # check timings reported on a transfer for consistency
    138         url = s['url_effective']
    139         # connect time is sometimes reported as 0 by openssl-quic (sigh)
    140         self.check_stat_positive_or_0(s, idx, 'time_connect')
    141         # all stat keys which reporting timings
    142         all_keys = {
    143             'time_appconnect', 'time_redirect',
    144             'time_pretransfer', 'time_starttransfer', 'time_total'
    145         }
    146         # stat keys where we expect a positive value
    147         pos_keys = {'time_pretransfer', 'time_starttransfer', 'time_total', 'time_queue'}
    148         if s['num_connects'] > 0:
    149             if url.startswith('https:'):
    150                 pos_keys.add('time_appconnect')
    151         if s['num_redirects'] > 0:
    152             pos_keys.add('time_redirect')
    153         zero_keys = all_keys - pos_keys
    154         # assert all zeros are zeros and the others are positive
    155         for key in zero_keys:
    156             self.check_stat_zero(s, key)
    157         for key in pos_keys:
    158             self.check_stat_positive(s, idx, key)
    159         # assert that all timers before "time_pretransfer" are less or equal
    160         for key in ['time_appconnect', 'time_connect', 'time_namelookup']:
    161             assert s[key] < s['time_pretransfer'], f'time "{key}" larger than' \
    162                 f'"time_pretransfer": {s}'
    163         # assert transfer start is after pretransfer
    164         assert s['time_pretransfer'] <= s['time_starttransfer'], f'"time_pretransfer" '\
    165             f'greater than "time_starttransfer", {s}'
    166         # assert that transfer start is before total
    167         assert s['time_starttransfer'] <= s['time_total'], f'"time_starttransfer" '\
    168             f'greater than "time_total", {s}'
    169         if s['num_redirects'] > 0:
    170             assert s['time_queue'] < s['time_starttransfer'], f'"time_queue" '\
    171                 f'greater/equal than "time_starttransfer", {s}'
    172         else:
    173             assert s['time_queue'] <= s['time_starttransfer'], f'"time_queue" '\
    174                 f'greater than "time_starttransfer", {s}'