Skip to content

Commit 00a6ac7

Browse files
author
Lukas Pühringer
authored
Merge pull request #2273 from VickyMerzOwn/develop
enhancement: Add from_data() method to MetaFile
2 parents 016e16c + d3c0e61 commit 00a6ac7

2 files changed

Lines changed: 82 additions & 28 deletions

File tree

tests/test_api.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
Delegations,
3535
Key,
3636
Metadata,
37+
MetaFile,
3738
Root,
3839
Signature,
3940
Snapshot,
@@ -725,6 +726,29 @@ def test_targetfile_from_data(self) -> None:
725726
targetfile_from_data = TargetFile.from_data(target_file_path, data)
726727
targetfile_from_data.verify_length_and_hashes(data)
727728

729+
def test_metafile_from_data(self) -> None:
730+
data = b"Inline test content"
731+
732+
# Test with a valid hash algorithm
733+
metafile = MetaFile.from_data(1, data, ["sha256"])
734+
metafile.verify_length_and_hashes(data)
735+
736+
# Test with an invalid hash algorithm
737+
with self.assertRaises(ValueError):
738+
metafile = MetaFile.from_data(1, data, ["invalid_algorithm"])
739+
metafile.verify_length_and_hashes(data)
740+
741+
self.assertEqual(
742+
metafile,
743+
MetaFile(
744+
1,
745+
19,
746+
{
747+
"sha256": "fcee2e6d56ab08eab279016f7db7e4e1d172ccea78e15f4cf8bd939991a418fa"
748+
},
749+
),
750+
)
751+
728752
def test_targetfile_get_prefixed_paths(self) -> None:
729753
target = TargetFile(100, {"sha256": "abc", "md5": "def"}, "a/b/f.ext")
730754
self.assertEqual(

tuf/api/metadata.py

Lines changed: 58 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -900,6 +900,41 @@ def _validate_length(length: int) -> None:
900900
if length < 0:
901901
raise ValueError(f"Length must be >= 0, got {length}")
902902

903+
@staticmethod
904+
def _get_length_and_hashes(
905+
data: Union[bytes, IO[bytes]], hash_algorithms: Optional[List[str]]
906+
) -> Tuple[int, Dict[str, str]]:
907+
"""Calculate length and hashes of ``data``."""
908+
if isinstance(data, bytes):
909+
length = len(data)
910+
else:
911+
data.seek(0, io.SEEK_END)
912+
length = data.tell()
913+
914+
hashes = {}
915+
916+
if hash_algorithms is None:
917+
hash_algorithms = [sslib_hash.DEFAULT_HASH_ALGORITHM]
918+
919+
for algorithm in hash_algorithms:
920+
try:
921+
if isinstance(data, bytes):
922+
digest_object = sslib_hash.digest(algorithm)
923+
digest_object.update(data)
924+
else:
925+
digest_object = sslib_hash.digest_fileobject(
926+
data, algorithm
927+
)
928+
except (
929+
sslib_exceptions.UnsupportedAlgorithmError,
930+
sslib_exceptions.FormatError,
931+
) as e:
932+
raise ValueError(f"Unsupported algorithm '{algorithm}'") from e
933+
934+
hashes[algorithm] = digest_object.hexdigest()
935+
936+
return (length, hashes)
937+
903938

904939
class MetaFile(BaseFile):
905940
"""A container with information about a particular metadata file.
@@ -966,6 +1001,28 @@ def from_dict(cls, meta_dict: Dict[str, Any]) -> "MetaFile":
9661001
# All fields left in the meta_dict are unrecognized.
9671002
return cls(version, length, hashes, meta_dict)
9681003

1004+
@classmethod
1005+
def from_data(
1006+
cls,
1007+
version: int,
1008+
data: Union[bytes, IO[bytes]],
1009+
hash_algorithms: List[str],
1010+
) -> "MetaFile":
1011+
"""Creates MetaFile object from bytes.
1012+
This constructor should only be used if hashes are wanted.
1013+
By default, MetaFile(ver) should be used.
1014+
Args:
1015+
version: Version of the metadata file.
1016+
data: Metadata bytes that the metafile represents.
1017+
hash_algorithms: Hash algorithms to create the hashes with. If not
1018+
specified, the securesystemslib default hash algorithm is used.
1019+
Raises:
1020+
ValueError: The hash algorithms list contains an unsupported
1021+
algorithm.
1022+
"""
1023+
length, hashes = cls._get_length_and_hashes(data, hash_algorithms)
1024+
return cls(version, length, hashes)
1025+
9691026
def to_dict(self) -> Dict[str, Any]:
9701027
"""Return the dictionary representation of self."""
9711028
res_dict: Dict[str, Any] = {
@@ -1693,34 +1750,7 @@ def from_data(
16931750
ValueError: The hash algorithms list contains an unsupported
16941751
algorithm.
16951752
"""
1696-
if isinstance(data, bytes):
1697-
length = len(data)
1698-
else:
1699-
data.seek(0, io.SEEK_END)
1700-
length = data.tell()
1701-
1702-
hashes = {}
1703-
1704-
if hash_algorithms is None:
1705-
hash_algorithms = [sslib_hash.DEFAULT_HASH_ALGORITHM]
1706-
1707-
for algorithm in hash_algorithms:
1708-
try:
1709-
if isinstance(data, bytes):
1710-
digest_object = sslib_hash.digest(algorithm)
1711-
digest_object.update(data)
1712-
else:
1713-
digest_object = sslib_hash.digest_fileobject(
1714-
data, algorithm
1715-
)
1716-
except (
1717-
sslib_exceptions.UnsupportedAlgorithmError,
1718-
sslib_exceptions.FormatError,
1719-
) as e:
1720-
raise ValueError(f"Unsupported algorithm '{algorithm}'") from e
1721-
1722-
hashes[algorithm] = digest_object.hexdigest()
1723-
1753+
length, hashes = cls._get_length_and_hashes(data, hash_algorithms)
17241754
return cls(length, hashes, target_file_path)
17251755

17261756
def verify_length_and_hashes(self, data: Union[bytes, IO[bytes]]) -> None:

0 commit comments

Comments
 (0)