Skip to content
Snippets Groups Projects
Commit c58f32f6 authored by Jenkins for Software Heritage's avatar Jenkins for Software Heritage
Browse files

Merge tag 'debian/6.5.1-1_swh1' into debian/buster-swh

parents c5b48ef7 1dce5f13
No related branches found
No related tags found
No related merge requests found
Metadata-Version: 2.1
Name: swh.model
Version: 6.5.0
Version: 6.5.1
Summary: Software Heritage data model
Home-page: https://forge.softwareheritage.org/diffusion/DMOD/
Author: Software Heritage developers
......
swh-model (6.5.0-1~swh1~bpo10+1) buster-swh; urgency=medium
swh-model (6.5.1-1~swh1) unstable-swh; urgency=medium
* Rebuild for buster-swh
* New upstream release 6.5.1 - (tagged by Antoine Lambert
<anlambert@softwareheritage.org> on 2022-09-30 15:23:14 +0200)
* Upstream changes: - version 6.5.1
-- Software Heritage autobuilder (on jenkins-debian1) <jenkins@jenkins-debian1.internal.softwareheritage.org> Mon, 26 Sep 2022 16:17:36 +0000
-- Software Heritage autobuilder (on jenkins-debian1) <jenkins@jenkins-debian1.internal.softwareheritage.org> Fri, 30 Sep 2022 13:27:54 +0000
swh-model (6.5.0-1~swh1) unstable-swh; urgency=medium
......
Metadata-Version: 2.1
Name: swh.model
Version: 6.5.0
Version: 6.5.1
Summary: Software Heritage data model
Home-page: https://forge.softwareheritage.org/diffusion/DMOD/
Author: Software Heritage developers
......
......@@ -300,16 +300,20 @@ def extract_regex_objs(
Args:
root_path (bytes): path to the root directory
patterns (list of byte): patterns to match
patterns (list of byte): shell patterns to match
Yields:
an SRE_Pattern object
"""
absolute_root_path = os.path.abspath(root_path)
for pattern in patterns:
for path in glob.glob(pattern):
absolute_path = os.path.abspath(path)
if not absolute_path.startswith(absolute_root_path):
if os.path.isabs(pattern):
pattern = os.path.relpath(pattern, root_path)
# python 3.10 has a `root_dir` argument for glob, but not the previous
# version. So we adjust the pattern
test_pattern = os.path.join(absolute_root_path, pattern)
for path in glob.glob(test_pattern):
if os.path.isabs(path) and not path.startswith(absolute_root_path):
error_msg = (
b'The path "' + path + b'" is not a subdirectory or relative '
b'to the root directory path: "' + root_path + b'"'
......@@ -326,7 +330,7 @@ def ignore_directories_patterns(root_path: bytes, patterns: Iterable[bytes]):
Args:
root_path (bytes): path of the root directory
patterns (list of byte): patterns to ignore
patterns (list of bytes): patterns to ignore
Returns:
a directory filter for :func:`directory_to_objects`
......
......@@ -58,7 +58,9 @@ from io import BytesIO
import os
from typing import Callable, Dict, Optional, Union
ALGORITHMS = set(["sha1", "sha256", "sha1_git", "blake2s256", "blake2b512", "md5"])
ALGORITHMS = set(
["sha1", "sha256", "sha1_git", "blake2s256", "blake2b512", "md5", "sha512"]
)
"""Hashing algorithms supported by this module"""
DEFAULT_ALGORITHMS = set(["sha1", "sha256", "sha1_git", "blake2s256"])
......
......@@ -112,34 +112,44 @@ def test_multi_hash_file_bytehexdigest(hash_test_data):
assert checksums == hash_test_data.bytehex_checksums
def test_multi_hash_file_with_md5(hash_test_data):
EXTRA_HASH_ALGOS = ["md5", "sha512"]
@pytest.mark.parametrize("hash_algo", EXTRA_HASH_ALGOS)
def test_multi_hash_file_with_extra_hash_algo(hash_test_data, hash_algo):
fobj = io.BytesIO(hash_test_data.data)
checksums = MultiHash.from_file(
fobj, hash_names=DEFAULT_ALGORITHMS | {"md5"}, length=len(hash_test_data.data)
fobj,
hash_names=DEFAULT_ALGORITHMS | {hash_algo},
length=len(hash_test_data.data),
).digest()
md5sum = {"md5": hashlib.md5(hash_test_data.data).digest()}
assert checksums == {**hash_test_data.checksums, **md5sum}
checksum = {hash_algo: hashlib.new(hash_algo, hash_test_data.data).digest()}
assert checksums == {**hash_test_data.checksums, **checksum}
def test_multi_hash_file_hexdigest_with_md5(hash_test_data):
@pytest.mark.parametrize("hash_algo", EXTRA_HASH_ALGOS)
def test_multi_hash_file_hexdigest_with_extra_hash_algo(hash_test_data, hash_algo):
fobj = io.BytesIO(hash_test_data.data)
length = len(hash_test_data.data)
checksums = MultiHash.from_file(
fobj, hash_names=DEFAULT_ALGORITHMS | {"md5"}, length=length
fobj, hash_names=DEFAULT_ALGORITHMS | {hash_algo}, length=length
).hexdigest()
md5sum = {"md5": hashlib.md5(hash_test_data.data).hexdigest()}
assert checksums == {**hash_test_data.hex_checksums, **md5sum}
checksum = {hash_algo: hashlib.new(hash_algo, hash_test_data.data).hexdigest()}
assert checksums == {**hash_test_data.hex_checksums, **checksum}
def test_multi_hash_file_bytehexdigest_with_md5(hash_test_data):
@pytest.mark.parametrize("hash_algo", EXTRA_HASH_ALGOS)
def test_multi_hash_file_bytehexdigest_with_extra_algo(hash_test_data, hash_algo):
fobj = io.BytesIO(hash_test_data.data)
length = len(hash_test_data.data)
checksums = MultiHash.from_file(
fobj, hash_names=DEFAULT_ALGORITHMS | {"md5"}, length=length
fobj, hash_names=DEFAULT_ALGORITHMS | {hash_algo}, length=length
).bytehexdigest()
md5sum = {"md5": hash_to_bytehex(hashlib.md5(hash_test_data.data).digest())}
assert checksums == {**hash_test_data.bytehex_checksums, **md5sum}
checksum = {
hash_algo: hash_to_bytehex(hashlib.new(hash_algo, hash_test_data.data).digest())
}
assert checksums == {**hash_test_data.bytehex_checksums, **checksum}
def test_multi_hash_file_missing_length(hash_test_data):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment