quickjs-tart

quickjs-based runtime for wallet-core logic
Log | Files | Refs | README | LICENSE

generate_pkcs7_tests.py (7066B)


      1 #!/usr/bin/env python3
      2 #
      3 #  Copyright The Mbed TLS Contributors
      4 #  SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
      5 #
      6 
      7 """
      8 Make fuzz like testing for pkcs7 tests
      9 Given a valid DER pkcs7 file add tests to the test_suite_pkcs7.data file
     10  - It is expected that the pkcs7_asn1_fail( data_t *pkcs7_buf )
     11     function is defined in test_suite_pkcs7.function
     12  - This is not meant to be portable code, if anything it is meant to serve as
     13    documentation for showing how those ugly tests in test_suite_pkcs7.data were created
     14 """
     15 
     16 
     17 import sys
     18 from os.path import exists
     19 from mbedtls_framework import test_case
     20 
     21 PKCS7_TEST_FILE = "../suites/test_suite_pkcs7.data"
     22 
     23 class Test: # pylint: disable=too-few-public-methods
     24     """
     25     A instance of a test in test_suite_pkcs7.data
     26     """
     27     def __init__(self, name, depends, func_call):
     28         self.name = name
     29         self.depends = depends
     30         self.func_call = func_call
     31 
     32     # pylint: disable=no-self-use
     33     def to_string(self):
     34         return "\n" + self.name + "\n" + self.depends + "\n" + self.func_call + "\n"
     35 
     36 class TestData:
     37     """
     38     Take in test_suite_pkcs7.data file.
     39     Allow for new tests to be added.
     40     """
     41     mandatory_dep = test_case.psa_or_3_6_feature_macro("PSA_ALG_SHA_256",
     42                                                        test_case.Domain36.USE_PSA)
     43 
     44     test_name = "PKCS7 Parse Failure Invalid ASN1"
     45     test_function = "pkcs7_asn1_fail:"
     46     def __init__(self, file_name):
     47         self.file_name = file_name
     48         self.last_test_num, self.old_tests = self.read_test_file(file_name)
     49         self.new_tests = []
     50 
     51     # pylint: disable=no-self-use
     52     def read_test_file(self, file):
     53         """
     54         Parse the test_suite_pkcs7.data file.
     55         """
     56         tests = []
     57         if not exists(file):
     58             print(file + " Does not exist")
     59             sys.exit()
     60         with open(file, "r", encoding='UTF-8') as fp:
     61             data = fp.read()
     62         lines = [line.strip() for line in data.split('\n') if len(line.strip()) > 1]
     63         i = 0
     64         while i < len(lines):
     65             if "depends" in lines[i+1]:
     66                 tests.append(Test(lines[i], lines[i+1], lines[i+2]))
     67                 i += 3
     68             else:
     69                 tests.append(Test(lines[i], None, lines[i+1]))
     70                 i += 2
     71         latest_test_num = float(tests[-1].name.split('#')[1])
     72         return latest_test_num, tests
     73 
     74     def add(self, name, func_call):
     75         self.last_test_num += 1
     76         self.new_tests.append(Test(self.test_name + ": " + name +  " #" + \
     77                 str(self.last_test_num), "depends_on:" + self.mandatory_dep, \
     78                 self.test_function + '"' + func_call + '"'))
     79 
     80     def write_changes(self):
     81         with open(self.file_name, 'a', encoding='UTF-8') as fw:
     82             fw.write("\n")
     83             for t in self.new_tests:
     84                 fw.write(t.to_string())
     85 
     86 
     87 def asn1_mutate(data):
     88     """
     89     We have been given an asn1 structure representing a pkcs7.
     90     We want to return an array of slightly modified versions of this data
     91     they should be modified in a way which makes the structure invalid
     92 
     93     We know that asn1 structures are:
     94     |---1 byte showing data type---|----byte(s) for length of data---|---data content--|
     95     We know that some data types can contain other data types.
     96     Return a dictionary of reasons and mutated data types.
     97     """
     98 
     99     # off the bat just add bytes to start and end of the buffer
    100     mutations = []
    101     reasons = []
    102     mutations.append(["00"] + data)
    103     reasons.append("Add null byte to start")
    104     mutations.append(data + ["00"])
    105     reasons.append("Add null byte to end")
    106     # for every asn1 entry we should attempt to:
    107     #    - change the data type tag
    108     #    - make the length longer than actual
    109     #    - make the length shorter than actual
    110     i = 0
    111     while i < len(data):
    112         tag_i = i
    113         leng_i = tag_i + 1
    114         data_i = leng_i + 1 + (int(data[leng_i][1], 16) if data[leng_i][0] == '8' else 0)
    115         if data[leng_i][0] == '8':
    116             length = int(''.join(data[leng_i + 1: data_i]), 16)
    117         else:
    118             length = int(data[leng_i], 16)
    119 
    120         tag = data[tag_i]
    121         print("Looking at ans1: offset " + str(i) + " tag = " + tag + \
    122                 ", length = " + str(length)+ ":")
    123         print(''.join(data[data_i:data_i+length]))
    124         # change tag to something else
    125         if tag == "02":
    126             # turn integers into octet strings
    127             new_tag = "04"
    128         else:
    129             # turn everything else into an integer
    130             new_tag = "02"
    131         mutations.append(data[:tag_i] + [new_tag] + data[leng_i:])
    132         reasons.append("Change tag " + tag + " to " + new_tag)
    133 
    134         # change lengths to too big
    135         # skip any edge cases which would cause carry over
    136         if int(data[data_i - 1], 16) < 255:
    137             new_length = str(hex(int(data[data_i - 1], 16) + 1))[2:]
    138             if len(new_length) == 1:
    139                 new_length = "0"+new_length
    140             mutations.append(data[:data_i -1] + [new_length] + data[data_i:])
    141             reasons.append("Change length from " + str(length) + " to " \
    142                     + str(length + 1))
    143             # we can add another test here for tags that contain other tags \
    144             # where they have more data than there containing tags account for
    145             if tag in ["30", "a0", "31"]:
    146                 mutations.append(data[:data_i -1] + [new_length] + \
    147                         data[data_i:data_i + length] + ["00"] + \
    148                         data[data_i + length:])
    149                 reasons.append("Change contents of tag " + tag + " to contain \
    150                         one unaccounted extra byte")
    151         # change lengths to too small
    152         if int(data[data_i - 1], 16) > 0:
    153             new_length = str(hex(int(data[data_i - 1], 16) - 1))[2:]
    154             if len(new_length) == 1:
    155                 new_length = "0"+new_length
    156             mutations.append(data[:data_i -1] + [new_length] + data[data_i:])
    157             reasons.append("Change length from " + str(length) + " to " + str(length - 1))
    158 
    159         # some tag types contain other tag types so we should iterate into the data
    160         if tag in ["30", "a0", "31"]:
    161             i = data_i
    162         else:
    163             i = data_i + length
    164 
    165     return list(zip(reasons, mutations))
    166 
    167 if __name__ == "__main__":
    168     if len(sys.argv) < 2:
    169         print("USAGE: " + sys.argv[0] + " <pkcs7_der_file>")
    170         sys.exit()
    171 
    172     DATA_FILE = sys.argv[1]
    173     TEST_DATA = TestData(PKCS7_TEST_FILE)
    174     with open(DATA_FILE, 'rb') as f:
    175         DATA_STR = f.read().hex()
    176     # make data an array of byte strings eg ['de','ad','be','ef']
    177     HEX_DATA = list(map(''.join, [[DATA_STR[i], DATA_STR[i+1]] for i in range(0, len(DATA_STR), \
    178             2)]))
    179     # returns tuples of test_names and modified data buffers
    180     MUT_ARR = asn1_mutate(HEX_DATA)
    181 
    182     print("made " + str(len(MUT_ARR)) + " new tests")
    183     for new_test in MUT_ARR:
    184         TEST_DATA.add(new_test[0], ''.join(new_test[1]))
    185 
    186     TEST_DATA.write_changes()