|
| 1 | +#!/usr/bin/env python |
| 2 | + |
| 3 | +# Copyright 2020, New York University and the TUF contributors |
| 4 | +# SPDX-License-Identifier: MIT OR Apache-2.0 |
| 5 | +""" Unit tests for api/metadata.py |
| 6 | +
|
| 7 | +""" |
| 8 | + |
| 9 | +import json |
| 10 | +import sys |
| 11 | +import logging |
| 12 | +import os |
| 13 | +import shutil |
| 14 | +import tempfile |
| 15 | +import unittest |
| 16 | + |
| 17 | +from datetime import datetime, timedelta |
| 18 | +from dateutil.relativedelta import relativedelta |
| 19 | + |
| 20 | +# TODO: Remove case handling when fully dropping support for versions >= 3.6 |
| 21 | +IS_PY_VERSION_SUPPORTED = sys.version_info >= (3, 6) |
| 22 | + |
| 23 | +# Use setUpModule to tell unittest runner to skip this test module gracefully. |
| 24 | +def setUpModule(): |
| 25 | + if not IS_PY_VERSION_SUPPORTED: |
| 26 | + raise unittest.SkipTest('requires Python 3.6 or higher') |
| 27 | + |
| 28 | +# Since setUpModule is called after imports we need to import conditionally. |
| 29 | +if IS_PY_VERSION_SUPPORTED: |
| 30 | + import tuf.exceptions |
| 31 | + from tuf.api.metadata import ( |
| 32 | + Metadata, |
| 33 | + Snapshot, |
| 34 | + Timestamp, |
| 35 | + Targets |
| 36 | + ) |
| 37 | + |
| 38 | + from securesystemslib.interface import ( |
| 39 | + import_ed25519_publickey_from_file, |
| 40 | + import_ed25519_privatekey_from_file |
| 41 | + ) |
| 42 | + |
| 43 | +logger = logging.getLogger(__name__) |
| 44 | + |
| 45 | + |
| 46 | +class TestMetadata(unittest.TestCase): |
| 47 | + |
| 48 | + @classmethod |
| 49 | + def setUpClass(cls): |
| 50 | + # Create a temporary directory to store the repository, metadata, and |
| 51 | + # target files. 'temporary_directory' must be deleted in |
| 52 | + # TearDownClass() so that temporary files are always removed, even when |
| 53 | + # exceptions occur. |
| 54 | + cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) |
| 55 | + |
| 56 | + test_repo_data = os.path.join( |
| 57 | + os.path.dirname(os.path.realpath(__file__)), 'repository_data') |
| 58 | + |
| 59 | + cls.repo_dir = os.path.join(cls.temporary_directory, 'repository') |
| 60 | + shutil.copytree( |
| 61 | + os.path.join(test_repo_data, 'repository'), cls.repo_dir) |
| 62 | + |
| 63 | + cls.keystore_dir = os.path.join(cls.temporary_directory, 'keystore') |
| 64 | + shutil.copytree( |
| 65 | + os.path.join(test_repo_data, 'keystore'), cls.keystore_dir) |
| 66 | + |
| 67 | + # Load keys into memory |
| 68 | + cls.keystore = {} |
| 69 | + for role in ['delegation', 'snapshot', 'targets', 'timestamp']: |
| 70 | + cls.keystore[role] = { |
| 71 | + 'private': import_ed25519_privatekey_from_file( |
| 72 | + os.path.join(cls.keystore_dir, role + '_key'), |
| 73 | + password="password"), |
| 74 | + 'public': import_ed25519_publickey_from_file( |
| 75 | + os.path.join(cls.keystore_dir, role + '_key.pub')) |
| 76 | + } |
| 77 | + |
| 78 | + |
| 79 | + @classmethod |
| 80 | + def tearDownClass(cls): |
| 81 | + # Remove the temporary repository directory, which should contain all |
| 82 | + # the metadata, targets, and key files generated for the test cases. |
| 83 | + shutil.rmtree(cls.temporary_directory) |
| 84 | + |
| 85 | + |
| 86 | + def test_generic_read(self): |
| 87 | + for metadata, inner_metadata_cls in [ |
| 88 | + ('snapshot', Snapshot), |
| 89 | + ('timestamp', Timestamp), |
| 90 | + ('targets', Targets)]: |
| 91 | + |
| 92 | + # Load JSON-formatted metdata of each supported type from file |
| 93 | + # and from out-of-band read JSON string |
| 94 | + path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') |
| 95 | + metadata_obj = Metadata.from_json_file(path) |
| 96 | + with open(path, 'rb') as f: |
| 97 | + metadata_str = f.read() |
| 98 | + metadata_obj2 = Metadata.from_json(metadata_str) |
| 99 | + |
| 100 | + # Assert that both methods instantiate the right inner class for |
| 101 | + # each metadata type and ... |
| 102 | + self.assertTrue( |
| 103 | + isinstance(metadata_obj.signed, inner_metadata_cls)) |
| 104 | + self.assertTrue( |
| 105 | + isinstance(metadata_obj2.signed, inner_metadata_cls)) |
| 106 | + |
| 107 | + # ... and return the same object (compared by dict representation) |
| 108 | + self.assertDictEqual( |
| 109 | + metadata_obj.to_dict(), metadata_obj2.to_dict()) |
| 110 | + |
| 111 | + |
| 112 | + # Assert that it chokes correctly on an unknown metadata type |
| 113 | + bad_metadata_path = 'bad-metadata.json' |
| 114 | + bad_metadata = {'signed': {'_type': 'bad-metadata'}} |
| 115 | + with open(bad_metadata_path, 'wb') as f: |
| 116 | + f.write(json.dumps(bad_metadata).encode('utf-8')) |
| 117 | + |
| 118 | + with self.assertRaises(ValueError): |
| 119 | + Metadata.from_json_file(bad_metadata_path) |
| 120 | + |
| 121 | + os.remove(bad_metadata_path) |
| 122 | + |
| 123 | + |
| 124 | + def test_compact_json(self): |
| 125 | + path = os.path.join(self.repo_dir, 'metadata', 'targets.json') |
| 126 | + metadata_obj = Metadata.from_json_file(path) |
| 127 | + self.assertTrue( |
| 128 | + len(metadata_obj.to_json(compact=True)) < |
| 129 | + len(metadata_obj.to_json())) |
| 130 | + |
| 131 | + |
| 132 | + def test_read_write_read_compare(self): |
| 133 | + for metadata in ['snapshot', 'timestamp', 'targets']: |
| 134 | + path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') |
| 135 | + metadata_obj = Metadata.from_json_file(path) |
| 136 | + |
| 137 | + path_2 = path + '.tmp' |
| 138 | + metadata_obj.to_json_file(path_2) |
| 139 | + metadata_obj_2 = Metadata.from_json_file(path_2) |
| 140 | + |
| 141 | + self.assertDictEqual( |
| 142 | + metadata_obj.to_dict(), |
| 143 | + metadata_obj_2.to_dict()) |
| 144 | + |
| 145 | + os.remove(path_2) |
| 146 | + |
| 147 | + |
| 148 | + def test_sign_verify(self): |
| 149 | + # Load sample metadata (targets) and assert ... |
| 150 | + path = os.path.join(self.repo_dir, 'metadata', 'targets.json') |
| 151 | + metadata_obj = Metadata.from_json_file(path) |
| 152 | + |
| 153 | + # ... it has a single existing signature, |
| 154 | + self.assertTrue(len(metadata_obj.signatures) == 1) |
| 155 | + # ... which is valid for the correct key. |
| 156 | + self.assertTrue(metadata_obj.verify( |
| 157 | + self.keystore['targets']['public'])) |
| 158 | + |
| 159 | + # Append a new signature with the unrelated key and assert that ... |
| 160 | + metadata_obj.sign(self.keystore['snapshot']['private'], append=True) |
| 161 | + # ... there are now two signatures, and |
| 162 | + self.assertTrue(len(metadata_obj.signatures) == 2) |
| 163 | + # ... both are valid for the corresponding keys. |
| 164 | + self.assertTrue(metadata_obj.verify( |
| 165 | + self.keystore['targets']['public'])) |
| 166 | + self.assertTrue(metadata_obj.verify( |
| 167 | + self.keystore['snapshot']['public'])) |
| 168 | + |
| 169 | + # Create and assign (don't append) a new signature and assert that ... |
| 170 | + metadata_obj.sign(self.keystore['timestamp']['private'], append=False) |
| 171 | + # ... there now is only one signature, |
| 172 | + self.assertTrue(len(metadata_obj.signatures) == 1) |
| 173 | + # ... valid for that key. |
| 174 | + self.assertTrue(metadata_obj.verify( |
| 175 | + self.keystore['timestamp']['public'])) |
| 176 | + |
| 177 | + # Assert exception if there are more than one signatures for a key |
| 178 | + metadata_obj.sign(self.keystore['timestamp']['private'], append=True) |
| 179 | + with self.assertRaises(tuf.exceptions.Error) as ctx: |
| 180 | + metadata_obj.verify(self.keystore['timestamp']['public']) |
| 181 | + self.assertTrue( |
| 182 | + '2 signatures for key' in str(ctx.exception), |
| 183 | + str(ctx.exception)) |
| 184 | + |
| 185 | + # Assert exception if there is no signature for a key |
| 186 | + with self.assertRaises(tuf.exceptions.Error) as ctx: |
| 187 | + metadata_obj.verify(self.keystore['targets']['public']) |
| 188 | + self.assertTrue( |
| 189 | + 'no signature for' in str(ctx.exception), |
| 190 | + str(ctx.exception)) |
| 191 | + |
| 192 | + |
| 193 | + def test_metadata_base(self): |
| 194 | + # Use of Snapshot is arbitrary, we're just testing the base class features |
| 195 | + # with real data |
| 196 | + snapshot_path = os.path.join( |
| 197 | + self.repo_dir, 'metadata', 'snapshot.json') |
| 198 | + md = Metadata.from_json_file(snapshot_path) |
| 199 | + |
| 200 | + self.assertEqual(md.signed.version, 1) |
| 201 | + md.signed.bump_version() |
| 202 | + self.assertEqual(md.signed.version, 2) |
| 203 | + self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) |
| 204 | + md.signed.bump_expiration() |
| 205 | + self.assertEqual(md.signed.expires, datetime(2030, 1, 2, 0, 0)) |
| 206 | + md.signed.bump_expiration(timedelta(days=365)) |
| 207 | + self.assertEqual(md.signed.expires, datetime(2031, 1, 2, 0, 0)) |
| 208 | + |
| 209 | + |
| 210 | + def test_metadata_snapshot(self): |
| 211 | + snapshot_path = os.path.join( |
| 212 | + self.repo_dir, 'metadata', 'snapshot.json') |
| 213 | + snapshot = Metadata.from_json_file(snapshot_path) |
| 214 | + |
| 215 | + # Create a dict representing what we expect the updated data to be |
| 216 | + fileinfo = snapshot.signed.meta |
| 217 | + hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} |
| 218 | + fileinfo['role1.json']['version'] = 2 |
| 219 | + fileinfo['role1.json']['hashes'] = hashes |
| 220 | + fileinfo['role1.json']['length'] = 123 |
| 221 | + |
| 222 | + snapshot.signed.update('role1', 2, 123, hashes) |
| 223 | + self.assertEqual(snapshot.signed.meta, fileinfo) |
| 224 | + |
| 225 | + |
| 226 | + def test_metadata_timestamp(self): |
| 227 | + timestamp_path = os.path.join( |
| 228 | + self.repo_dir, 'metadata', 'timestamp.json') |
| 229 | + timestamp = Metadata.from_json_file(timestamp_path) |
| 230 | + |
| 231 | + self.assertEqual(timestamp.signed.version, 1) |
| 232 | + timestamp.signed.bump_version() |
| 233 | + self.assertEqual(timestamp.signed.version, 2) |
| 234 | + |
| 235 | + self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 1, 0, 0)) |
| 236 | + timestamp.signed.bump_expiration() |
| 237 | + self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 2, 0, 0)) |
| 238 | + timestamp.signed.bump_expiration(timedelta(days=365)) |
| 239 | + self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 2, 0, 0)) |
| 240 | + |
| 241 | + # Test whether dateutil.relativedelta works, this provides a much |
| 242 | + # easier to use interface for callers |
| 243 | + delta = relativedelta(days=1) |
| 244 | + timestamp.signed.bump_expiration(delta) |
| 245 | + self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 3, 0, 0)) |
| 246 | + delta = relativedelta(years=5) |
| 247 | + timestamp.signed.bump_expiration(delta) |
| 248 | + self.assertEqual(timestamp.signed.expires, datetime(2036, 1, 3, 0, 0)) |
| 249 | + |
| 250 | + hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} |
| 251 | + fileinfo = timestamp.signed.meta['snapshot.json'] |
| 252 | + fileinfo['hashes'] = hashes |
| 253 | + fileinfo['version'] = 2 |
| 254 | + fileinfo['length'] = 520 |
| 255 | + timestamp.signed.update(2, 520, hashes) |
| 256 | + self.assertEqual(timestamp.signed.meta['snapshot.json'], fileinfo) |
| 257 | + |
| 258 | + |
| 259 | +# Run unit test. |
| 260 | +if __name__ == '__main__': |
| 261 | + unittest.main() |
0 commit comments