diff --git a/swh/model/hashutil.py b/swh/model/hashutil.py
index 75d9f8b40d6ea1a5379e96034c68b2f0b227d88b..8d2cb908e35f9bb7fc85e316b40f8750c0f562d4 100644
--- a/swh/model/hashutil.py
+++ b/swh/model/hashutil.py
@@ -58,7 +58,9 @@ from io import BytesIO
 import os
 from typing import Callable, Dict, Optional, Union
 
-ALGORITHMS = set(["sha1", "sha256", "sha1_git", "blake2s256", "blake2b512", "md5"])
+ALGORITHMS = set(
+    ["sha1", "sha256", "sha1_git", "blake2s256", "blake2b512", "md5", "sha512"]
+)
 """Hashing algorithms supported by this module"""
 
 DEFAULT_ALGORITHMS = set(["sha1", "sha256", "sha1_git", "blake2s256"])
diff --git a/swh/model/tests/test_hashutil.py b/swh/model/tests/test_hashutil.py
index 1ab28124d25d30f9d6581896f22ea5fd0f0156ff..b279f2fa9d15bf85ab2ffa8a32deefc27545332a 100644
--- a/swh/model/tests/test_hashutil.py
+++ b/swh/model/tests/test_hashutil.py
@@ -112,34 +112,44 @@ def test_multi_hash_file_bytehexdigest(hash_test_data):
     assert checksums == hash_test_data.bytehex_checksums
 
 
-def test_multi_hash_file_with_md5(hash_test_data):
+EXTRA_HASH_ALGOS = ["md5", "sha512"]
+
+
+@pytest.mark.parametrize("hash_algo", EXTRA_HASH_ALGOS)
+def test_multi_hash_file_with_extra_hash_algo(hash_test_data, hash_algo):
     fobj = io.BytesIO(hash_test_data.data)
 
     checksums = MultiHash.from_file(
-        fobj, hash_names=DEFAULT_ALGORITHMS | {"md5"}, length=len(hash_test_data.data)
+        fobj,
+        hash_names=DEFAULT_ALGORITHMS | {hash_algo},
+        length=len(hash_test_data.data),
     ).digest()
-    md5sum = {"md5": hashlib.md5(hash_test_data.data).digest()}
-    assert checksums == {**hash_test_data.checksums, **md5sum}
+    checksum = {hash_algo: hashlib.new(hash_algo, hash_test_data.data).digest()}
+    assert checksums == {**hash_test_data.checksums, **checksum}
 
 
-def test_multi_hash_file_hexdigest_with_md5(hash_test_data):
+@pytest.mark.parametrize("hash_algo", EXTRA_HASH_ALGOS)
+def test_multi_hash_file_hexdigest_with_extra_hash_algo(hash_test_data, hash_algo):
     fobj = io.BytesIO(hash_test_data.data)
     length = len(hash_test_data.data)
     checksums = MultiHash.from_file(
-        fobj, hash_names=DEFAULT_ALGORITHMS | {"md5"}, length=length
+        fobj, hash_names=DEFAULT_ALGORITHMS | {hash_algo}, length=length
     ).hexdigest()
-    md5sum = {"md5": hashlib.md5(hash_test_data.data).hexdigest()}
-    assert checksums == {**hash_test_data.hex_checksums, **md5sum}
+    checksum = {hash_algo: hashlib.new(hash_algo, hash_test_data.data).hexdigest()}
+    assert checksums == {**hash_test_data.hex_checksums, **checksum}
 
 
-def test_multi_hash_file_bytehexdigest_with_md5(hash_test_data):
+@pytest.mark.parametrize("hash_algo", EXTRA_HASH_ALGOS)
+def test_multi_hash_file_bytehexdigest_with_extra_algo(hash_test_data, hash_algo):
     fobj = io.BytesIO(hash_test_data.data)
     length = len(hash_test_data.data)
     checksums = MultiHash.from_file(
-        fobj, hash_names=DEFAULT_ALGORITHMS | {"md5"}, length=length
+        fobj, hash_names=DEFAULT_ALGORITHMS | {hash_algo}, length=length
     ).bytehexdigest()
-    md5sum = {"md5": hash_to_bytehex(hashlib.md5(hash_test_data.data).digest())}
-    assert checksums == {**hash_test_data.bytehex_checksums, **md5sum}
+    checksum = {
+        hash_algo: hash_to_bytehex(hashlib.new(hash_algo, hash_test_data.data).digest())
+    }
+    assert checksums == {**hash_test_data.bytehex_checksums, **checksum}
 
 
 def test_multi_hash_file_missing_length(hash_test_data):