Skip to content

Utils

qa_testing_utils

__all__ = ['ContainsStringIgnoringCase', 'Context', 'FromTupleMixin', 'ImmutableMixin', 'InvalidValueException', 'IsIteratorYielding', 'IsIteratorYieldingAll', 'IsStreamContainingEvery', 'IsWithinDates', 'IterableReader', 'LoggerMixin', 'SingletonBase', 'SingletonMeta', 'TestException', 'ThreadLocal', 'ToDictMixin', 'TracingMatcher', 'Valid', 'WithMixin', 'adapted_iterator', 'adapted_object', 'adapted_sequence', 'classproperty', 'configure', 'contains_string_ignoring_case', 'crc32_of', 'decompress_xz_stream', 'extract_files_from_tar', 'get_test_body', 'logger', 'makereport', 'match_as', 'process_next', 'read_lines', 'require_not_none', 'safely', 'sleep_for', 'stream_file', 'swallow', 'to_string', 'trace', 'tracing', 'valid', 'within_dates', 'write_csv', 'yields_every', 'yields_item', 'yields_items'] module-attribute

ContainsStringIgnoringCase

Bases: BaseMatcher[str]

Matcher that checks if a string contains a given substring, ignoring case.

Parameters:

Name Type Description Default
substring str

The substring to search for (case-insensitive).

required
Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
@final
class ContainsStringIgnoringCase(BaseMatcher[str]):
    """
    Matcher that checks if a string contains a given substring, ignoring case.

    Args:
        substring (str): The substring to search for (case-insensitive).
    """

    def __init__(self, substring: str) -> None:
        self.substring: str = substring.lower()

    def _matches(self, item: Any) -> bool:
        if not isinstance(item, str):
            return False
        return self.substring in item.lower()

    def describe_to(self, description: Description) -> None:
        description.append_text(
            f"a string containing (case-insensitive) '{self.substring}'")

substring = substring.lower() instance-attribute

__init__(substring)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
73
74
def __init__(self, substring: str) -> None:
    self.substring: str = substring.lower()

describe_to(description)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
81
82
83
def describe_to(self, description: Description) -> None:
    description.append_text(
        f"a string containing (case-insensitive) '{self.substring}'")

Context dataclass

Per-thread context for reporting and logging, allowing dynamic formatting of messages.

Source code in qa-testing-utils/src/qa_testing_utils/logger.py
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
@dataclass
@final
class Context:
    """Per-thread context for reporting and logging, allowing dynamic formatting of messages."""
    _THREAD_LOCAL: ClassVar[ThreadLocal['Context']]
    _formatter: Final[Callable[[str], str]]

    @classmethod
    def default(cls) -> "Context":
        """
        Returns a default Context instance with a no-op formatter.

        Returns:
            Context: A Context instance with the identity formatter.
        """
        return cls(lambda _: _)  # no formatter

    @classproperty
    def _format(cls) -> Callable[[str], str]:
        return cls._THREAD_LOCAL.get()._formatter

    @classmethod
    def set(cls, context_fn: Callable[[str], str]) -> None:
        """Sets per-thread context function to be used for formatting report and log messages."""
        cls._THREAD_LOCAL.set(Context(context_fn))

    @classmethod
    def traced(cls, func: Callable[_P, _R]) -> Callable[_P, _R]:
        """
        Decorator to log function entry, arguments, and return value at DEBUG level.

        Also adds an Allure step for reporting. Use on methods where tracing is useful
        for debugging or reporting.

        Example:
            @Context.traced
            def my_method(self, x):
                ...

        Args:
            func (Callable[P, R]): The function to be decorated.
            *args (Any): Positional arguments to be passed to the function.
            **kwargs (Any): Keyword arguments to be passed to the function.

        Returns:
            Callable[P, R]: The result of the function call.
        """
        @wraps(func)
        def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R:
            # NOTE: each time a decorated function is called this logic will be
            # re-evaluated.
            signature = inspect.signature(func)
            parameters = list(signature.parameters.keys())

            if parameters and parameters[0] == 'self' and len(args) > 0:
                instance = args[0]
                logger = logging.getLogger(f"{instance.__class__.__name__}")
                logger.debug(f">>> "
                             + cls._format(
                                 f"{func.__name__} "
                                 f"{", ".join([str(arg) for arg in args[1:]])} "
                                 f"{LF.join(
                                     f"{key}={str(value)}"
                                     for key, value in kwargs.items()) if kwargs else EMPTY_STRING}"))

                with allure.step(  # type: ignore
                    cls._format(
                        f"{func.__name__} "
                        f"{', '.join([str(arg) for arg in args[1:]])}")):
                    result = func(*args, **kwargs)

                if result == instance:
                    logger.debug(f"<<< " + cls._format(f"{func.__name__}"))
                else:
                    logger.debug(
                        f"<<< " + cls._format(f"{func.__name__} {result}"))

                return result
            else:
                logger = logging.getLogger(func.__name__)
                logger.debug(f">>> {func.__name__} {args} {kwargs}")
                result = func(*args, **kwargs)
                logger.debug(f"<<< {func.__name__} {result}")
                return result

        return wrapper

__init__(_formatter)

default() classmethod

Returns a default Context instance with a no-op formatter.

Returns:

Name Type Description
Context Context

A Context instance with the identity formatter.

Source code in qa-testing-utils/src/qa_testing_utils/logger.py
27
28
29
30
31
32
33
34
35
@classmethod
def default(cls) -> "Context":
    """
    Returns a default Context instance with a no-op formatter.

    Returns:
        Context: A Context instance with the identity formatter.
    """
    return cls(lambda _: _)  # no formatter

set(context_fn) classmethod

Sets per-thread context function to be used for formatting report and log messages.

Source code in qa-testing-utils/src/qa_testing_utils/logger.py
41
42
43
44
@classmethod
def set(cls, context_fn: Callable[[str], str]) -> None:
    """Sets per-thread context function to be used for formatting report and log messages."""
    cls._THREAD_LOCAL.set(Context(context_fn))

traced(func) classmethod

Decorator to log function entry, arguments, and return value at DEBUG level.

Also adds an Allure step for reporting. Use on methods where tracing is useful for debugging or reporting.

Example

@Context.traced def my_method(self, x): ...

Parameters:

Name Type Description Default
func Callable[P, R]

The function to be decorated.

required
*args Any

Positional arguments to be passed to the function.

required
**kwargs Any

Keyword arguments to be passed to the function.

required

Returns:

Type Description
Callable[_P, _R]

Callable[P, R]: The result of the function call.

Source code in qa-testing-utils/src/qa_testing_utils/logger.py
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
@classmethod
def traced(cls, func: Callable[_P, _R]) -> Callable[_P, _R]:
    """
    Decorator to log function entry, arguments, and return value at DEBUG level.

    Also adds an Allure step for reporting. Use on methods where tracing is useful
    for debugging or reporting.

    Example:
        @Context.traced
        def my_method(self, x):
            ...

    Args:
        func (Callable[P, R]): The function to be decorated.
        *args (Any): Positional arguments to be passed to the function.
        **kwargs (Any): Keyword arguments to be passed to the function.

    Returns:
        Callable[P, R]: The result of the function call.
    """
    @wraps(func)
    def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R:
        # NOTE: each time a decorated function is called this logic will be
        # re-evaluated.
        signature = inspect.signature(func)
        parameters = list(signature.parameters.keys())

        if parameters and parameters[0] == 'self' and len(args) > 0:
            instance = args[0]
            logger = logging.getLogger(f"{instance.__class__.__name__}")
            logger.debug(f">>> "
                         + cls._format(
                             f"{func.__name__} "
                             f"{", ".join([str(arg) for arg in args[1:]])} "
                             f"{LF.join(
                                 f"{key}={str(value)}"
                                 for key, value in kwargs.items()) if kwargs else EMPTY_STRING}"))

            with allure.step(  # type: ignore
                cls._format(
                    f"{func.__name__} "
                    f"{', '.join([str(arg) for arg in args[1:]])}")):
                result = func(*args, **kwargs)

            if result == instance:
                logger.debug(f"<<< " + cls._format(f"{func.__name__}"))
            else:
                logger.debug(
                    f"<<< " + cls._format(f"{func.__name__} {result}"))

            return result
        else:
            logger = logging.getLogger(func.__name__)
            logger.debug(f">>> {func.__name__} {args} {kwargs}")
            result = func(*args, **kwargs)
            logger.debug(f"<<< {func.__name__} {result}")
            return result

    return wrapper

FromTupleMixin

Mixin that adds a from_tuple class method for instantiating objects from a tuple.

Allows creating an instance of a class (dataclass or regular class) by passing a tuple whose values match the order of the class fields. Works with frozen dataclasses as well.

Example

@dataclass(frozen=True) class Point(FromTupleMixin): x: int y: int p = Point.from_tuple((1, 2))

Source code in qa-testing-utils/src/qa_testing_utils/tuple_utils.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
class FromTupleMixin:
    """
    Mixin that adds a `from_tuple` class method for instantiating objects from a tuple.

    Allows creating an instance of a class (dataclass or regular class) by passing a tuple
    whose values match the order of the class fields. Works with frozen dataclasses as well.

    Example:
        @dataclass(frozen=True)
        class Point(FromTupleMixin):
            x: int
            y: int
        p = Point.from_tuple((1, 2))
    """
    @classmethod
    def from_tuple(cls: Type[Self], data: Tuple[Any, ...]) -> Self:
        """
        Instantiates the class from a tuple of values, matching the order of class fields.

        Args:
            data (Tuple[Any, ...]): Tuple of values corresponding to the class fields.
        Returns:
            Self: An instance of the class with fields set from the tuple.
        """
        if is_dataclass(cls):
            # Retrieve all fields, including inherited ones
            cls_fields = [f.name for f in fields(cls)]

            # Create a dictionary of field names to values from the tuple
            field_values = {name: value for name,
                            value in zip(cls_fields, data)}

            # Create a new instance using `__new__`
            instance = cls.__new__(cls)

            # If the dataclass is frozen, use `replace` to set the attributes
            if getattr(cls, '__dataclass_params__').frozen:
                return replace(instance, **field_values)
            else:
                # If the dataclass is not frozen, use setattr to set attributes
                for key, value in field_values.items():
                    setattr(instance, key, value)

                # Call __init__ if defined
                instance.__init__(*data)
                return instance
        else:
            # For vanilla classes, assume fields are defined in __init__
            # Using `__init__` directly as the custom initializer
            instance = cls.__new__(cls)
            for attr, value in zip(cls.__annotations__.keys(), data):
                setattr(instance, attr, value)

            # Call __init__ if it expects parameters
            instance.__init__(*data)
            return instance

from_tuple(data) classmethod

Instantiates the class from a tuple of values, matching the order of class fields.

Parameters:

Name Type Description Default
data Tuple[Any, ...]

Tuple of values corresponding to the class fields.

required

Returns: Self: An instance of the class with fields set from the tuple.

Source code in qa-testing-utils/src/qa_testing_utils/tuple_utils.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
@classmethod
def from_tuple(cls: Type[Self], data: Tuple[Any, ...]) -> Self:
    """
    Instantiates the class from a tuple of values, matching the order of class fields.

    Args:
        data (Tuple[Any, ...]): Tuple of values corresponding to the class fields.
    Returns:
        Self: An instance of the class with fields set from the tuple.
    """
    if is_dataclass(cls):
        # Retrieve all fields, including inherited ones
        cls_fields = [f.name for f in fields(cls)]

        # Create a dictionary of field names to values from the tuple
        field_values = {name: value for name,
                        value in zip(cls_fields, data)}

        # Create a new instance using `__new__`
        instance = cls.__new__(cls)

        # If the dataclass is frozen, use `replace` to set the attributes
        if getattr(cls, '__dataclass_params__').frozen:
            return replace(instance, **field_values)
        else:
            # If the dataclass is not frozen, use setattr to set attributes
            for key, value in field_values.items():
                setattr(instance, key, value)

            # Call __init__ if defined
            instance.__init__(*data)
            return instance
    else:
        # For vanilla classes, assume fields are defined in __init__
        # Using `__init__` directly as the custom initializer
        instance = cls.__new__(cls)
        for attr, value in zip(cls.__annotations__.keys(), data):
            setattr(instance, attr, value)

        # Call __init__ if it expects parameters
        instance.__init__(*data)
        return instance

ImmutableMixin

Mixin to enforce immutability after initialization.

Overrides setattr to raise AttributeError if an attribute is modified after being set. Intended for use with non-dataclasses. For dataclasses, use @dataclass(frozen=True).

Limitations
  • Does not work with WithMixin if attributes have default values.
  • Does not work if applied to a superclass with a custom init.
Example

class MyImmutable(ImmutableMixin): foo: int = 1 obj = MyImmutable() obj.foo = 2 # Raises AttributeError

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
class ImmutableMixin:
    """
    Mixin to enforce immutability after initialization.

    Overrides __setattr__ to raise AttributeError if an attribute is modified after being set.
    Intended for use with non-dataclasses. For dataclasses, use `@dataclass(frozen=True)`.

    Limitations:
        - Does not work with WithMixin if attributes have default values.
        - Does not work if applied to a superclass with a custom __init__.

    Example:
        class MyImmutable(ImmutableMixin):
            foo: int = 1
        obj = MyImmutable()
        obj.foo = 2  # Raises AttributeError
    """

    def __setattr__(self, key: str, value: Any) -> None:
        if hasattr(self, key):
            raise AttributeError(f"Can't modify attribute '{
                                 key}' after initialization")
        super().__setattr__(key, value)  # Properly sets the attribute

__setattr__(key, value)

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
54
55
56
57
58
def __setattr__(self, key: str, value: Any) -> None:
    if hasattr(self, key):
        raise AttributeError(f"Can't modify attribute '{
                             key}' after initialization")
    super().__setattr__(key, value)  # Properly sets the attribute

InvalidValueException

Bases: ValueError

Raised when an object fails validation via the Valid protocol.

Example

if not obj.is_valid(): raise InvalidValueException(obj)

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
199
200
201
202
203
204
205
206
207
class InvalidValueException(ValueError):
    """
    Raised when an object fails validation via the Valid protocol.

    Example:
        if not obj.is_valid():
            raise InvalidValueException(obj)
    """
    pass

IsIteratorYielding

Bases: BaseMatcher[Iterator[T]]

Matcher for data yielded by iterators.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
@final
class IsIteratorYielding[T](BaseMatcher[Iterator[T]]):
    """
    Matcher for data yielded by iterators.
    """

    def __init__(self, element_matcher: Matcher[T]) -> None:
        self.element_matcher = element_matcher

    @override
    def _matches(self, item: Iterable[T]) -> bool:
        for element in item:
            if self.element_matcher.matches(element):
                return True

        # No matching element found
        return False

    @override
    def describe_to(self, description: Description) -> None:
        description.append_text("a stream containing ") \
            .append_description_of(self.element_matcher)

element_matcher = element_matcher instance-attribute

__init__(element_matcher)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
105
106
def __init__(self, element_matcher: Matcher[T]) -> None:
    self.element_matcher = element_matcher

describe_to(description)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
117
118
119
120
@override
def describe_to(self, description: Description) -> None:
    description.append_text("a stream containing ") \
        .append_description_of(self.element_matcher)

IsIteratorYieldingAll

Bases: BaseMatcher[Iterator[T]]

Matcher to ensure that the iterator yields at least one instance of each specified matcher.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
@final
class IsIteratorYieldingAll[T](BaseMatcher[Iterator[T]]):
    """
    Matcher to ensure that the iterator yields at least one instance of each specified matcher.
    """

    def __init__(self, element_matchers: List[Matcher[T]]) -> None:
        self.element_matchers = element_matchers

    @override
    def _matches(self, item: Iterable[T]) -> bool:
        unmatched_matchers = set(self.element_matchers)
        for element in item:
            unmatched_matchers = {
                m for m in unmatched_matchers if not m.matches(element)}
            if not unmatched_matchers:  # All matchers have been satisfied
                return True

        return False

    @override
    def describe_to(self, description: Description) -> None:
        description.append_text("a stream containing each of: ")
        for index, matcher in enumerate(self.element_matchers):
            if index > 0:
                description.append_text(", ")
            description.append_description_of(matcher)

element_matchers = element_matchers instance-attribute

__init__(element_matchers)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
155
156
def __init__(self, element_matchers: List[Matcher[T]]) -> None:
    self.element_matchers = element_matchers

describe_to(description)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
169
170
171
172
173
174
175
@override
def describe_to(self, description: Description) -> None:
    description.append_text("a stream containing each of: ")
    for index, matcher in enumerate(self.element_matchers):
        if index > 0:
            description.append_text(", ")
        description.append_description_of(matcher)

IsStreamContainingEvery

Bases: BaseMatcher[Iterator[T]]

Matcher to ensure every element yielded by an iterator matches a given matcher.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
@final
class IsStreamContainingEvery[T](BaseMatcher[Iterator[T]]):
    """
    Matcher to ensure every element yielded by an iterator matches a given matcher.
    """

    def __init__(self, element_matcher: Matcher[T]) -> None:
        self.element_matcher = element_matcher

    @override
    def _matches(self, item: Iterable[T]) -> bool:
        for element in item:
            if not self.element_matcher.matches(element):
                return False  # One non-matching element means failure

        # All elements matched
        return True

    @override
    def describe_to(self, description: Description) -> None:
        description.append_text("a stream where every item is ") \
            .append_description_of(self.element_matcher)

element_matcher = element_matcher instance-attribute

__init__(element_matcher)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
131
132
def __init__(self, element_matcher: Matcher[T]) -> None:
    self.element_matcher = element_matcher

describe_to(description)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
143
144
145
146
@override
def describe_to(self, description: Description) -> None:
    description.append_text("a stream where every item is ") \
        .append_description_of(self.element_matcher)

IsWithinDates

Bases: BaseMatcher[DateOrDateTime]

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
@final
class IsWithinDates(BaseMatcher[DateOrDateTime]):
    def __init__(
            self, start_date: Optional[DateOrDateTime],
            end_date: Optional[DateOrDateTime]) -> None:
        self.start_date = start_date
        self.end_date = end_date

    def _matches(self, item: Optional[DateOrDateTime]) -> bool:
        if item is None:
            return False

        # Normalize item to datetime
        if not isinstance(item, datetime):
            item = datetime.combine(item, datetime.min.time())

        # Normalize start_date and end_date to datetime
        def to_datetime(value: Optional[DateOrDateTime]) -> Optional[datetime]:
            if value is None:
                return None
            return value if isinstance(
                value, datetime) else datetime.combine(
                value, datetime.min.time())

        start = to_datetime(self.start_date)
        end = to_datetime(self.end_date)

        if start and end:
            return start <= item <= end
        if start:
            return item >= start
        if end:
            return item <= end

        return False

    def describe_to(self, description: Description) -> None:
        if self.start_date is None:
            description.append_text(f"a date before {self.end_date}")
        elif self.end_date is None:
            description.append_text(f"a date after {self.start_date}")
        else:
            description.append_text(
                f"a date within {self.start_date} and {self.end_date}")

end_date = end_date instance-attribute

start_date = start_date instance-attribute

__init__(start_date, end_date)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
183
184
185
186
187
def __init__(
        self, start_date: Optional[DateOrDateTime],
        end_date: Optional[DateOrDateTime]) -> None:
    self.start_date = start_date
    self.end_date = end_date

describe_to(description)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
217
218
219
220
221
222
223
224
def describe_to(self, description: Description) -> None:
    if self.start_date is None:
        description.append_text(f"a date before {self.end_date}")
    elif self.end_date is None:
        description.append_text(f"a date after {self.start_date}")
    else:
        description.append_text(
            f"a date within {self.start_date} and {self.end_date}")

IterableReader

Bases: RawIOBase, LoggerMixin, ImmutableMixin

I/O read-only stream over iterable of bytes, enabling streaming mode.

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
@final
class IterableReader(RawIOBase, LoggerMixin, ImmutableMixin):
    """
    I/O read-only stream over iterable of bytes, enabling streaming mode.
    """

    def __init__(self, chunks: Iterable[bytes]):
        self._chunks = iter(chunks)
        self._accumulated_buffer = bytearray()

    @override
    def readable(self) -> bool:
        return True

    @override
    def readinto(self, output_buffer: memoryview) -> int:  # type: ignore
        # consume chunks, accumulating their bytes up to size of output buffer
        while len(self._accumulated_buffer) < len(output_buffer) \
                and (chunk := next(self._chunks, None)) is not None:
            self.log.debug(f"buffered chunk with length={len(chunk)}")
            self._accumulated_buffer.extend(chunk)

        # consume accumulated bytes up to size of output buffer
        consumed_bytes = min(len(self._accumulated_buffer), len(output_buffer))
        output_buffer[:consumed_bytes] = self._accumulated_buffer[:consumed_bytes]

        # delete consumed bytes, shifting left the accumulated buffer
        del self._accumulated_buffer[:consumed_bytes]

        self.log.debug(f"consumed {consumed_bytes} bytes")
        return consumed_bytes

    @staticmethod
    def from_(
            chunks: Iterable[bytes],
            buffer_size: int = DEFAULT_BUFFER_SIZE) -> BinaryIO:
        """
        Converts a stream of binary chunks into a BufferedReader.

        You should ensure closing.

        Args:
            chunks (Iterable[bytes]): stream of binary chunks

        Returns:
            io.BufferedReader: buffered reader around stream of binary chunks.
        """
        return BufferedReader(IterableReader(chunks), buffer_size)

__init__(chunks)

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
27
28
29
def __init__(self, chunks: Iterable[bytes]):
    self._chunks = iter(chunks)
    self._accumulated_buffer = bytearray()

from_(chunks, buffer_size=DEFAULT_BUFFER_SIZE) staticmethod

Converts a stream of binary chunks into a BufferedReader.

You should ensure closing.

Parameters:

Name Type Description Default
chunks Iterable[bytes]

stream of binary chunks

required

Returns:

Type Description
BinaryIO

io.BufferedReader: buffered reader around stream of binary chunks.

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
@staticmethod
def from_(
        chunks: Iterable[bytes],
        buffer_size: int = DEFAULT_BUFFER_SIZE) -> BinaryIO:
    """
    Converts a stream of binary chunks into a BufferedReader.

    You should ensure closing.

    Args:
        chunks (Iterable[bytes]): stream of binary chunks

    Returns:
        io.BufferedReader: buffered reader around stream of binary chunks.
    """
    return BufferedReader(IterableReader(chunks), buffer_size)

readable()

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
31
32
33
@override
def readable(self) -> bool:
    return True

readinto(output_buffer)

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
@override
def readinto(self, output_buffer: memoryview) -> int:  # type: ignore
    # consume chunks, accumulating their bytes up to size of output buffer
    while len(self._accumulated_buffer) < len(output_buffer) \
            and (chunk := next(self._chunks, None)) is not None:
        self.log.debug(f"buffered chunk with length={len(chunk)}")
        self._accumulated_buffer.extend(chunk)

    # consume accumulated bytes up to size of output buffer
    consumed_bytes = min(len(self._accumulated_buffer), len(output_buffer))
    output_buffer[:consumed_bytes] = self._accumulated_buffer[:consumed_bytes]

    # delete consumed bytes, shifting left the accumulated buffer
    del self._accumulated_buffer[:consumed_bytes]

    self.log.debug(f"consumed {consumed_bytes} bytes")
    return consumed_bytes

LoggerMixin

Mixin that provides a log property for convenient class-based logging.

Inherit from this mixin to get a self.log logger named after the class. Useful for adding debug/info/error logging to any class without boilerplate.

Example

class MyClass(LoggerMixin): def do_something(self): self.log.info("Doing something")

Source code in qa-testing-utils/src/qa_testing_utils/logger.py
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
class LoggerMixin:
    """
    Mixin that provides a `log` property for convenient class-based logging.

    Inherit from this mixin to get a `self.log` logger named after the class.
    Useful for adding debug/info/error logging to any class without boilerplate.

    Example:
        class MyClass(LoggerMixin):
            def do_something(self):
                self.log.info("Doing something")
    """
    @final
    @cached_property
    def log(self) -> logging.Logger:
        """
        Returns a logger named after the class.

        Returns:
            logging.Logger: The logger instance for this class.
        """
        return logging.getLogger(self.__class__.__name__)

    @final
    def trace[T](self, value: T) -> T:
        """
        Logs value at DEBUG level using this logger.

        Use to log something as a value, usually in a lambda expression::

            then.eventually_assert_that(
                lambda: self.trace(...call some API...),
                greater_that(0)) \
                .and_....other verifications may follow...

        Args:
            value (T): The value to log.

        Returns:
            T: The value (unchanged).
        """
        self.log.debug(f"=== {value}")
        return value

log cached property

Returns a logger named after the class.

Returns:

Type Description
Logger

logging.Logger: The logger instance for this class.

trace(value)

Logs value at DEBUG level using this logger.

Use to log something as a value, usually in a lambda expression::

then.eventually_assert_that(
    lambda: self.trace(...call some API...),
    greater_that(0))                 .and_....other verifications may follow...

Parameters:

Name Type Description Default
value T

The value to log.

required

Returns:

Name Type Description
T T

The value (unchanged).

Source code in qa-testing-utils/src/qa_testing_utils/logger.py
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
@final
def trace[T](self, value: T) -> T:
    """
    Logs value at DEBUG level using this logger.

    Use to log something as a value, usually in a lambda expression::

        then.eventually_assert_that(
            lambda: self.trace(...call some API...),
            greater_that(0)) \
            .and_....other verifications may follow...

    Args:
        value (T): The value to log.

    Returns:
        T: The value (unchanged).
    """
    self.log.debug(f"=== {value}")
    return value

SingletonBase

Base class for singletons using SingletonMeta.

Inherit from this class to make your class a singleton.

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
190
191
192
193
194
195
196
class SingletonBase(metaclass=SingletonMeta):
    """
    Base class for singletons using SingletonMeta.

    Inherit from this class to make your class a singleton.
    """
    pass

SingletonMeta

Bases: type

Thread-safe singleton metaclass.

Ensures only one instance of a class exists per process. Use by setting metaclass=SingletonMeta on your class.

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
@final
class SingletonMeta(type):
    """
    Thread-safe singleton metaclass.

    Ensures only one instance of a class exists per process.
    Use by setting `metaclass=SingletonMeta` on your class.
    """
    _instances: ClassVar[Dict[type, object]] = {}
    _lock: ClassVar[threading.Lock] = threading.Lock()  # Ensure thread-safety

    def __call__(
            cls: type,
            *args: Any, **kwargs: Any) -> "SingletonBase":
        with SingletonMeta._lock:
            if cls not in SingletonMeta._instances:
                instance = super().__call__(*args, **kwargs)  # type: ignore
                SingletonMeta._instances[cls] = instance
        return SingletonMeta._instances[cls]  # type: ignore[return-value]

__call__(*args, **kwargs)

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
180
181
182
183
184
185
186
187
def __call__(
        cls: type,
        *args: Any, **kwargs: Any) -> "SingletonBase":
    with SingletonMeta._lock:
        if cls not in SingletonMeta._instances:
            instance = super().__call__(*args, **kwargs)  # type: ignore
            SingletonMeta._instances[cls] = instance
    return SingletonMeta._instances[cls]  # type: ignore[return-value]

TestException

Bases: Exception

Marks an exception raised by tests infrastructure. Useful to differentiate between unexpected run-time exceptions, which should be handled as programming errors, and legitimate run-time exceptions such as time-out, not found, etc. The former might be handled via a retry mechanism.

Source code in qa-testing-utils/src/qa_testing_utils/exceptions.py
 5
 6
 7
 8
 9
10
11
12
class TestException(Exception):
    """
    Marks an exception raised by tests infrastructure. Useful to differentiate
    between unexpected run-time exceptions, which should be handled as
    programming errors, and legitimate run-time exceptions such as time-out,
    not found, etc. The former might be handled via a retry mechanism.
    """
    pass

ThreadLocal

Thread-local storage for a value, with a default initializer.

Provides per-thread storage for a value of type T, initialized with a default.

Source code in qa-testing-utils/src/qa_testing_utils/thread_utils.py
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
class ThreadLocal[T]:
    """
    Thread-local storage for a value, with a default initializer.

    Provides per-thread storage for a value of type T, initialized with a default.
    """

    def __init__(self, default: Optional[T] = None):
        """
        Initializes the thread-local storage with a default value.

        Args:
            default (Optional[T]): The default value for each thread, None if not specified.
        """
        self._local = local()
        self._local.value = default

    def set(self, value: T) -> None:
        """
        Sets the thread-local value for the current thread.

        Args:
            value (T): The value to set for the current thread.
        """
        self._local.value = value

    def get(self) -> T:
        """
        Gets the thread-local value for the current thread.

        Returns:
            T: The value for the current thread.
        """
        return cast(T, self._local.value)

__init__(default=None)

Initializes the thread-local storage with a default value.

Parameters:

Name Type Description Default
default Optional[T]

The default value for each thread, None if not specified.

None
Source code in qa-testing-utils/src/qa_testing_utils/thread_utils.py
34
35
36
37
38
39
40
41
42
def __init__(self, default: Optional[T] = None):
    """
    Initializes the thread-local storage with a default value.

    Args:
        default (Optional[T]): The default value for each thread, None if not specified.
    """
    self._local = local()
    self._local.value = default

get()

Gets the thread-local value for the current thread.

Returns:

Name Type Description
T T

The value for the current thread.

Source code in qa-testing-utils/src/qa_testing_utils/thread_utils.py
53
54
55
56
57
58
59
60
def get(self) -> T:
    """
    Gets the thread-local value for the current thread.

    Returns:
        T: The value for the current thread.
    """
    return cast(T, self._local.value)

set(value)

Sets the thread-local value for the current thread.

Parameters:

Name Type Description Default
value T

The value to set for the current thread.

required
Source code in qa-testing-utils/src/qa_testing_utils/thread_utils.py
44
45
46
47
48
49
50
51
def set(self, value: T) -> None:
    """
    Sets the thread-local value for the current thread.

    Args:
        value (T): The value to set for the current thread.
    """
    self._local.value = value

ToDictMixin

Mixin to add serialization methods to dataclasses.

Provides
  • to_dict(): Recursively converts a dataclass (and nested dataclasses) to a dictionary.
  • flatten(): Flattens nested structure for CSV or flat serialization.
Example

@dataclass class User(ToDictMixin): name: str age: int

user = User("Alice", 30) user.to_dict() # {'name': 'Alice', 'age': 30}

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
class ToDictMixin:
    """
    Mixin to add serialization methods to dataclasses.

    Provides:
        - to_dict(): Recursively converts a dataclass (and nested dataclasses) to a dictionary.
        - flatten(): Flattens nested structure for CSV or flat serialization.

    Example:
        @dataclass
        class User(ToDictMixin):
            name: str
            age: int

        user = User("Alice", 30)
        user.to_dict()  # {'name': 'Alice', 'age': 30}
    """

    def to_dict(self) -> Dict[str, Any]:
        """
        Converts a dataclass instance (with nested dataclasses) to a dictionary.
        """
        from typing import cast

        def convert(value: Any) -> Any:
            if isinstance(value, ToDictMixin):
                return value.to_dict()
            elif isinstance(value, list):
                # Provide a type hint for v
                return [convert(v) for v in cast(list[Any], value)]
            elif isinstance(value, dict):
                return {k: convert(v) for k, v in value.items()}  # type: ignore
            return value

        if not is_dataclass(self):
            raise TypeError("not a dataclass instance")

        return {key: convert(value) for key, value in asdict(self).items()}

    def flatten(self, prefix: str = "") -> Dict[str, Any]:
        """
        Flattens the nested structure into a flat dictionary for CSV serialization.
        """
        flat_dict: Dict[str, Any] = {}

        def flatten_value(key: str, value: Any) -> None:
            if isinstance(value, ToDictMixin):
                # Flatten nested ToDictMixin dataclasses
                nested_flat = value.flatten(prefix=f"{key}_")
                flat_dict.update(nested_flat)
            elif isinstance(value, list):
                # Serialize lists as JSON strings or expand into multiple columns
                for idx, item in enumerate(value):  # type: ignore
                    flat_dict[f"{key}[{idx}]"] = item
            elif isinstance(value, dict):
                # Serialize dicts as JSON strings or expand into multiple columns
                for sub_key, sub_val in value.items():  # type: ignore
                    flat_dict[f"{key}_{sub_key}"] = sub_val
            else:
                # Directly add non-nested fields
                flat_dict[key] = value

        if not is_dataclass(self):
            raise TypeError("not a dataclass instance")

        for field in fields(self):
            value = getattr(self, field.name)
            flatten_value(f"{prefix}{field.name}", value)

        return flat_dict

flatten(prefix='')

Flattens the nested structure into a flat dictionary for CSV serialization.

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
def flatten(self, prefix: str = "") -> Dict[str, Any]:
    """
    Flattens the nested structure into a flat dictionary for CSV serialization.
    """
    flat_dict: Dict[str, Any] = {}

    def flatten_value(key: str, value: Any) -> None:
        if isinstance(value, ToDictMixin):
            # Flatten nested ToDictMixin dataclasses
            nested_flat = value.flatten(prefix=f"{key}_")
            flat_dict.update(nested_flat)
        elif isinstance(value, list):
            # Serialize lists as JSON strings or expand into multiple columns
            for idx, item in enumerate(value):  # type: ignore
                flat_dict[f"{key}[{idx}]"] = item
        elif isinstance(value, dict):
            # Serialize dicts as JSON strings or expand into multiple columns
            for sub_key, sub_val in value.items():  # type: ignore
                flat_dict[f"{key}_{sub_key}"] = sub_val
        else:
            # Directly add non-nested fields
            flat_dict[key] = value

    if not is_dataclass(self):
        raise TypeError("not a dataclass instance")

    for field in fields(self):
        value = getattr(self, field.name)
        flatten_value(f"{prefix}{field.name}", value)

    return flat_dict

to_dict()

Converts a dataclass instance (with nested dataclasses) to a dictionary.

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
def to_dict(self) -> Dict[str, Any]:
    """
    Converts a dataclass instance (with nested dataclasses) to a dictionary.
    """
    from typing import cast

    def convert(value: Any) -> Any:
        if isinstance(value, ToDictMixin):
            return value.to_dict()
        elif isinstance(value, list):
            # Provide a type hint for v
            return [convert(v) for v in cast(list[Any], value)]
        elif isinstance(value, dict):
            return {k: convert(v) for k, v in value.items()}  # type: ignore
        return value

    if not is_dataclass(self):
        raise TypeError("not a dataclass instance")

    return {key: convert(value) for key, value in asdict(self).items()}

TracingMatcher

Bases: BaseMatcher[T], LoggerMixin

A matcher wrapper that adds debug logging around another matcher.

Logs the result of each match attempt using the class logger.

Parameters:

Name Type Description Default
matcher Matcher[T]

The matcher to wrap and trace.

required
Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
class TracingMatcher[T](BaseMatcher[T], LoggerMixin):
    """
    A matcher wrapper that adds debug logging around another matcher.

    Logs the result of each match attempt using the class logger.

    Args:
        matcher (Matcher[T]): The matcher to wrap and trace.
    """

    def __init__(self, matcher: Matcher[T]) -> None:
        self._matcher = matcher

    def _matches(self, item: Any) -> bool:
        result = self._matcher.matches(item)
        self.log.debug(f"{item!r} -> {result}")
        return result

    def describe_to(self, description: Description) -> None:
        self._matcher.describe_to(description)

__init__(matcher)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
37
38
def __init__(self, matcher: Matcher[T]) -> None:
    self._matcher = matcher

describe_to(description)

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
45
46
def describe_to(self, description: Description) -> None:
    self._matcher.describe_to(description)

Valid

Bases: Protocol

Specifies a method for validating objects.

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
20
21
22
23
24
25
26
27
28
29
30
31
32
33
@runtime_checkable
class Valid(Protocol):
    """
    Specifies a method for validating objects.
    """

    def is_valid(self) -> bool:
        """
        Should be implemented by objects that need validation.

        Returns:
            bool: true, if the object is valid
        """
        ...

is_valid()

Should be implemented by objects that need validation.

Returns:

Name Type Description
bool bool

true, if the object is valid

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
26
27
28
29
30
31
32
33
def is_valid(self) -> bool:
    """
    Should be implemented by objects that need validation.

    Returns:
        bool: true, if the object is valid
    """
    ...

WithMixin

Mixin to support copy-on-change (functional update) for objects.

Instead of mutating an object, use with_() to create a copy with updated fields: obj2 = obj.with_(field=new_value)

Works with both plain Python classes and dataclasses.

Example

@dataclass(frozen=True) class Point(WithMixin): x: int y: int

p1 = Point(1, 2) p2 = p1.with_(x=3) # p2 is Point(3, 2)

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
class WithMixin:
    '''
    Mixin to support copy-on-change (functional update) for objects.

    Instead of mutating an object, use `with_()` to create a copy with updated fields:
        obj2 = obj.with_(field=new_value)

    Works with both plain Python classes and dataclasses.

    Example:
        @dataclass(frozen=True)
        class Point(WithMixin):
            x: int
            y: int

        p1 = Point(1, 2)
        p2 = p1.with_(x=3)  # p2 is Point(3, 2)
    '''
    @final
    def with_[T:WithMixin](self: T, **changes: Any) -> T:
        if is_dataclass(self):
            # Directly use replace for dataclasses; it will raise an error for invalid fields
            return replace(self, **changes)

        duplicated_object = self.__class__(**self.__dict__)
        for key, value in changes.items():
            # Get the current attribute to determine its type
            current_attr = getattr(self, key, None)
            if isinstance(current_attr, Enum):
                # If the current attribute is an enum,
                # convert the value to the corresponding enum
                value = current_attr.__class__(value)
            setattr(duplicated_object, key, value)
        return duplicated_object

with_(**changes)

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
@final
def with_[T:WithMixin](self: T, **changes: Any) -> T:
    if is_dataclass(self):
        # Directly use replace for dataclasses; it will raise an error for invalid fields
        return replace(self, **changes)

    duplicated_object = self.__class__(**self.__dict__)
    for key, value in changes.items():
        # Get the current attribute to determine its type
        current_attr = getattr(self, key, None)
        if isinstance(current_attr, Enum):
            # If the current attribute is an enum,
            # convert the value to the corresponding enum
            value = current_attr.__class__(value)
        setattr(duplicated_object, key, value)
    return duplicated_object

classproperty

Descriptor for defining class-level properties (like @property but for classes).

Example

class MyClass: @classproperty def foo(cls): return ...

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
@final
class classproperty[T]:
    """
    Descriptor for defining class-level properties (like @property but for classes).

    Example:
        class MyClass:
            @classproperty
            def foo(cls):
                return ...
    """

    def __init__(self, fget: Callable[[Any], T]) -> None:
        self.fget = fget

    def __get__(self, instance: Any, owner: Any) -> T:
        return self.fget(owner)

fget = fget instance-attribute

__get__(instance, owner)

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
265
266
def __get__(self, instance: Any, owner: Any) -> T:
    return self.fget(owner)

__init__(fget)

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
262
263
def __init__(self, fget: Callable[[Any], T]) -> None:
    self.fget = fget

adapted_iterator(converter, matcher)

Hamcrest matcher adapting an Iterator of type T by specified converter and applying specified matcher. For example::

adapt_iterator( lambda message: message.id,
            yields_item(is_greater_than(0)) )

where id being a number, and is_greater_than being a matcher that can be applied on numbers.

See more on PyHamcrest <https://github.com/hamcrest/PyHamcrest>

Parameters:

Name Type Description Default
converter Callable[[T], R]

function converting T into R

required
matcher Matcher[Iterator[R]

matcher for adapted Iterator of R

required

Returns:

Type Description
Matcher[Iterator[T]]

Matcher[Iterator[T]]: matcher for target Iterator of type T

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
def adapted_iterator[T, R](
        converter: Callable[[T], R],
        matcher: Matcher[Iterator[R]]) -> Matcher[Iterator[T]]:
    """
    Hamcrest matcher adapting an Iterator of type T by specified converter and
    applying specified matcher. For example::

        adapt_iterator( lambda message: message.id,
                    yields_item(is_greater_than(0)) )

    where id being a number, and is_greater_than being a matcher that can be
    applied on numbers.

    See more on `PyHamcrest <https://github.com/hamcrest/PyHamcrest>`

    Args:
        converter (Callable[[T], R]): function converting T into R
        matcher (Matcher[Iterator[R]): matcher for adapted Iterator of R

    Returns:
        Matcher[Iterator[T]]: matcher for target Iterator of type T
    """
    @final
    class AdaptedMatcher(BaseMatcher[Iterator[T]]):
        @override
        def _matches(self, item: Iterable[T]) -> bool:
            return matcher.matches(map(converter, item))

        @override
        def describe_to(self, description: Description) -> None:
            description.append_description_of(matcher)

    return AdaptedMatcher()

adapted_object(converter, matcher)

Hamcrest matcher adapting an object of type T by specified converter and applying specified matcher. For example::

adapt_object( lambda message: message.id,
            is_greater_than(0) )

where id being a number, and is_greater_than being a matcher that can be applied on numbers.

See more on PyHamcrest <https://github.com/hamcrest/PyHamcrest>

Parameters:

Name Type Description Default
converter Callable[[T], R]

function converting T into R

required
matcher Matcher[R]

matcher for adapted type R

required

Returns:

Type Description
Matcher[T]

Matcher[T]: matcher for target type T

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
def adapted_object[T, R](
        converter: Callable[[T], R],
        matcher: Matcher[R]) -> Matcher[T]:
    """
    Hamcrest matcher adapting an object of type T by specified converter and
    applying specified matcher. For example::

        adapt_object( lambda message: message.id,
                    is_greater_than(0) )

    where id being a number, and is_greater_than being a matcher that can be
    applied on numbers.

    See more on `PyHamcrest <https://github.com/hamcrest/PyHamcrest>`

    Args:
        converter (Callable[[T], R]): function converting T into R
        matcher (Matcher[R]): matcher for adapted type R

    Returns:
        Matcher[T]: matcher for target type T
    """
    @final
    class AdaptedMatcher(BaseMatcher[T]):
        @override
        def _matches(self, item: T) -> bool:
            return False if item is None \
                else matcher.matches(converter(item))

        @override
        def describe_to(self, description: Description) -> None:
            description.append_description_of(matcher)

    return AdaptedMatcher()

adapted_sequence(converter, matcher)

Hamcrest matcher adapting a Sequence of type T by specified converter and applying specified matcher. For example::

adapt_sequence( lambda message: message.id,
            has_item(is_greater_than(0)) )

where id being a number, and is_greater_than being a matcher that can be applied on numbers.

See more on PyHamcrest <https://github.com/hamcrest/PyHamcrest>

Parameters:

Name Type Description Default
converter Callable[[T], R]

function converting T into R

required
matcher Matcher[Sequence[R]

matcher for adapted Sequence of R

required

Returns:

Type Description
Matcher[Sequence[T]]

Matcher[Sequence[T]]: matcher for target Sequence of type T

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
def adapted_sequence[T, R](
        converter: Callable[[T], R],
        matcher: Matcher[Sequence[R]]) -> Matcher[Sequence[T]]:
    """
    Hamcrest matcher adapting a Sequence of type T by specified converter and
    applying specified matcher. For example::

        adapt_sequence( lambda message: message.id,
                    has_item(is_greater_than(0)) )

    where id being a number, and is_greater_than being a matcher that can be
    applied on numbers.

    See more on `PyHamcrest <https://github.com/hamcrest/PyHamcrest>`

    Args:
        converter (Callable[[T], R]): function converting T into R
        matcher (Matcher[Sequence[R]): matcher for adapted Sequence of R

    Returns:
        Matcher[Sequence[T]]: matcher for target Sequence of type T
    """
    @final
    class AdaptedMatcher(BaseMatcher[Sequence[T]]):
        @override
        def _matches(self, item: Sequence[T]) -> bool:
            return matcher.matches([converter(x) for x in item])

        @override
        def describe_to(self, description: Description) -> None:
            description.append_description_of(matcher)

    return AdaptedMatcher()

configure(config, path=Path(__file__).parent / 'logging.ini')

Configures logging for pytest using a specified INI file, or defaults to internal logging.ini.

Parameters:

Name Type Description Default
config Config

The pytest configuration object.

required
path Path

Path to the logging configuration file. Defaults to 'logging.ini' in the current directory.

parent / 'logging.ini'
Source code in qa-testing-utils/src/qa_testing_utils/conftest_helpers.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
def configure(config: pytest.Config,
              path: Path = Path(__file__).parent / "logging.ini") -> None:
    """
    Configures logging for pytest using a specified INI file, or defaults to internal logging.ini.

    Args:
        config (pytest.Config): The pytest configuration object.
        path (Path, optional): Path to the logging configuration file. Defaults to 'logging.ini' in the current directory.
    """
    caller_module = inspect.getmodule(inspect.stack()[1][0])
    module_name = caller_module.__name__ if caller_module else "unknown"

    if path.is_file():
        logging.config.fileConfig(path)
        logging.info(f"{module_name} loaded logs config from: {path}")
    else:
        sys.stderr.write(f"{module_name} couldn't find logs config file {path}")

contains_string_ignoring_case(substring)

Creates a matcher that checks if a given string contains the specified substring, ignoring case.

Parameters:

Name Type Description Default
substring str

The substring to search for within the target string, case-insensitively.

required

Returns:

Name Type Description
ContainsStringIgnoringCase ContainsStringIgnoringCase

A matcher object that evaluates whether the target string contains the specified substring, ignoring case.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
86
87
88
89
90
91
92
93
94
95
96
def contains_string_ignoring_case(substring: str) -> ContainsStringIgnoringCase:
    """
    Creates a matcher that checks if a given string contains the specified substring, ignoring case.

    Args:
        substring (str): The substring to search for within the target string, case-insensitively.

    Returns:
        ContainsStringIgnoringCase: A matcher object that evaluates whether the target string contains the specified substring, ignoring case.
    """
    return ContainsStringIgnoringCase(substring)

crc32_of(file, chunk_size=DEFAULT_BUFFER_SIZE)

Calculate the CRC of a binary stream from its current position to its tail, using chunked reading.

Parameters:

Name Type Description Default
file BinaryIO

The file object to read data from, starting from its current position.

required
chunk_size int

The size of chunks to read at a time (default is 8192).

DEFAULT_BUFFER_SIZE

Returns:

Name Type Description
int int

Calculated CRC value of the remaining file content.

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
def crc32_of(file: BinaryIO, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
    """
    Calculate the CRC of a binary stream from its current position to its tail,
    using chunked reading.

    Args:
        file (BinaryIO): The file object to read data from, starting from its current position.
        chunk_size (int): The size of chunks to read at a time (default is 8192).

    Returns:
        int: Calculated CRC value of the remaining file content.
    """
    crc_value = 0

    while chunk := file.read(chunk_size):
        crc_value = crc32(chunk, crc_value)

    return crc_value & 0xFFFFFFFF  # ensure 32-bit unsigned

decompress_xz_stream(compressed_chunks)

Decompresses XZ stream.

Parameters:

Name Type Description Default
compressed_chunks Iterable[bytes]

stream of binary compressed chunks

required

Yields:

Type Description
bytes

Iterator[bytes]: the decompressed stream

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
129
130
131
132
133
134
135
136
137
138
139
140
def decompress_xz_stream(compressed_chunks: Iterable[bytes]) -> Iterator[bytes]:
    """
    Decompresses XZ stream.

    Args:
        compressed_chunks (Iterable[bytes]): stream of binary compressed chunks

    Yields:
        Iterator[bytes]: the decompressed stream
    """
    decompressor = LZMADecompressor()
    return map(decompressor.decompress, compressed_chunks)

extract_files_from_tar(tar_chunks)

Extracts files from decompressed TAR stream.

Parameters:

Name Type Description Default
tar_chunks Iterable[bytes]

stream of decompressed TAR chunks

required

Yields:

Type Description
Tuple[TarInfo, bytes]

Iterator[Tuple[tarfile.TarInfo, bytes]]: streams tuples of meta-data and data for each file

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def extract_files_from_tar(tar_chunks: Iterable[bytes]) -> Iterator[Tuple[TarInfo, bytes]]:
    """
    Extracts files from decompressed TAR stream.

    Args:
        tar_chunks (Iterable[bytes]): stream of decompressed TAR chunks

    Yields:
        Iterator[Tuple[tarfile.TarInfo, bytes]]: \
            streams tuples of meta-data and data for each file
    """
    with open(fileobj=IterableReader.from_(tar_chunks),
              mode='r|*') as tar:
        for member in tar:
            if member.isfile():
                extracted_file = tar.extractfile(member)
                if extracted_file:
                    yield member, extracted_file.read()

get_test_body(item)

Retrieves the source code of the test function for the given pytest item.

Parameters:

Name Type Description Default
item Item

The pytest test item.

required

Returns: str: The source code of the test function, or an error message if unavailable.

Source code in qa-testing-utils/src/qa_testing_utils/conftest_helpers.py
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
def get_test_body(item: pytest.Item) -> str:
    """
    Retrieves the source code of the test function for the given pytest item.

    Args:
        item (pytest.Item): The pytest test item.
    Returns:
        str: The source code of the test function, or an error message if unavailable.
    """
    function = getattr(item, 'function', None)
    if function is None:
        return "No function found for this test item."

    try:
        return inspect.getsource(function)
    except Exception as e:
        return f"Could not get source code: {str(e)}"

makereport(item, call)

Creates a pytest test report and appends the test body source code to the report sections.

Parameters:

Name Type Description Default
item Item

The pytest test item.

required
call CallInfo[None]

The call information for the test.

required

Returns: pytest.TestReport: The generated test report with the test body included.

Source code in qa-testing-utils/src/qa_testing_utils/conftest_helpers.py
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
def makereport(
        item: pytest.Item, call: pytest.CallInfo[None]) -> pytest.TestReport:
    """
    Creates a pytest test report and appends the test body source code to the report sections.

    Args:
        item (pytest.Item): The pytest test item.
        call (pytest.CallInfo[None]): The call information for the test.
    Returns:
        pytest.TestReport: The generated test report with the test body included.
    """
    report = pytest.TestReport.from_item_and_call(item, call)

    if call.when == "call":
        report.sections.append(('body', get_test_body(item)))

    return report

match_as(matcher)

Utility function to cast a generic matcher to the specific type Matcher[T].

Parameters:

Name Type Description Default
matcher Matcher[object]

The original matcher that needs to be cast.

required

Returns:

Type Description
Matcher[T]

A matcher cast to Matcher[T].

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
396
397
398
399
400
401
402
403
404
405
406
def match_as[T](matcher: Matcher[object]) -> Matcher[T]:  # type: ignore
    """
    Utility function to cast a generic matcher to the specific type Matcher[T].

    Args:
        matcher: The original matcher that needs to be cast.

    Returns:
        A matcher cast to Matcher[T].
    """
    return cast(Matcher[T], matcher)

process_next(i, p)

Processes next items per specified predicate. Useful for cases in which the first item in a stream decides the meaning of the rest of the items.

Parameters:

Name Type Description Default
i Iterator[T]

the iterator to process

required
p Predicate[T]

the predicate to be applied on next(i)

required

Returns:

Type Description
Iterator[T]

Iterator[T]: the original iterator if the predicate evaluated true, otherwise empty iterator

Source code in qa-testing-utils/src/qa_testing_utils/stream_utils.py
37
38
39
40
41
42
43
44
45
46
47
48
49
50
def process_next[T](i: Iterator[T], p: Predicate[T]) -> Iterator[T]:
    """
    Processes next items per specified predicate. Useful for cases in which
    the first item in a stream decides the meaning of the rest of the items.

    Args:
        i (Iterator[T]): the iterator to process
        p (Predicate[T]): the predicate to be applied on `next(i)`

    Returns:
        Iterator[T]: the original iterator if the predicate evaluated true, \
            otherwise empty iterator
    """
    return i if p(next(i)) else iter([])

read_lines(byte_stream, encoding=UTF_8, eol=LF)

Converts a stream of binary chunks into stream of text lines. Handles cases where lines are split across chunks.

Parameters:

Name Type Description Default
byte_stream Iterable[bytes]

the binary (chunks) stream

required
encoding str

expected text encoding. Defaults to 'utf-8'.

UTF_8
eol str

expected line-ending. Default to LF.

LF

Yields:

Type Description
str

Iterator[str]: stream of text lines, not terminated by EOL marker

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
def read_lines(
        byte_stream: Iterable[bytes],
        encoding: str = UTF_8,
        eol: str = LF) -> Iterator[str]:
    """
    Converts a stream of binary chunks into stream of text lines.
    Handles cases where lines are split across chunks.

    Args:
        byte_stream (Iterable[bytes]): the binary (chunks) stream
        encoding (str, optional): expected text encoding. Defaults to 'utf-8'.
        eol (str, optional): expected line-ending. Default to LF.

    Yields:
        Iterator[str]: stream of text lines, not terminated by EOL marker
    """
    has_content = False
    buffer = bytearray()
    eol_bytes = eol.encode(encoding)

    for chunk in byte_stream:
        print(DOT, end=SPACE)
        has_content = True
        buffer.extend(chunk)
        *lines, buffer = buffer.split(eol_bytes)  # keep partial line in buffer
        trace(f"streaming {len(lines)} lines; leftover {len(buffer)} chars")
        yield from (line.decode(encoding) for line in lines)

    if buffer:  # yield the leftover
        yield buffer.decode(encoding)

    if not has_content:
        trace("no lines")

require_not_none(value, message='Value must not be None')

Ensures that the provided value is not None.

Parameters:

Name Type Description Default
value Optional[T]

The value to check for None.

required
message str

The error message to use if value is None. Defaults to "Value must not be None".

'Value must not be None'

Raises:

Type Description
ValueError

If value is None.

Returns:

Name Type Description
T T

The value, guaranteed to be not None.

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
def require_not_none[T](
        value: Optional[T],
        message: str = "Value must not be None") -> T:
    """
    Ensures that the provided value is not None.

    Args:
        value (Optional[T]): The value to check for None.
        message (str, optional): The error message to use if value is None. Defaults to "Value must not be None".

    Raises:
        ValueError: If value is None.

    Returns:
        T: The value, guaranteed to be not None.
    """
    if value is None:
        raise ValueError(message)
    return value

safely(supplier)

Calls a function safely, wrapping its result in Maybe, and swallowing any exceptions. The function should be a no-argument callable::

safely(lambda: call_something_that_may_fail(params))

Parameters:

Name Type Description Default
supplier Supplier[T]

The supplier to be called.

required

Returns:

Type Description
Maybe[T]

Maybe[T]: The result wrapped in Maybe, or Nothing if an exception occurs.

Source code in qa-testing-utils/src/qa_testing_utils/exception_utils.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
def safely[T](supplier: Supplier[T]) -> Maybe[T]:
    """
    Calls a function safely, wrapping its result in Maybe, and swallowing any exceptions.
    The function should be a no-argument callable::

        safely(lambda: call_something_that_may_fail(params))

    Args:
        supplier (Supplier[T]): The supplier to be called.

    Returns:
        Maybe[T]: The result wrapped in Maybe, or Nothing if an exception occurs.
    """
    try:
        return Some(supplier())
    except Exception as e:
        logging.exception(f"Exception occurred: {e}")
        return Nothing

sleep_for(duration)

Sleep for the specified duration.

Parameters:

Name Type Description Default
duration timedelta

The amount of time to sleep.

required
Source code in qa-testing-utils/src/qa_testing_utils/thread_utils.py
17
18
19
20
21
22
23
24
def sleep_for(duration: timedelta):
    """
    Sleep for the specified duration.

    Args:
        duration (timedelta): The amount of time to sleep.
    """
    time.sleep(duration.total_seconds())

stream_file(file_path, chunk_size=DEFAULT_BUFFER_SIZE)

Streams a binary file from disk into an iterator.

If the iterator is not consumed, the file will be closed when the iterator will be garbage collected.

Parameters:

Name Type Description Default
file_path Path

path to file

required
chunk_size int

the chunk size. Defaults to 8192.

DEFAULT_BUFFER_SIZE

Yields:

Type Description
bytes

Iterator[bytes]: the binary chunks stream

Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
def stream_file(
        file_path: Path,
        chunk_size: int = DEFAULT_BUFFER_SIZE) -> Iterator[bytes]:
    """
    Streams a binary file from disk into an iterator.

    If the iterator is not consumed, the file will be closed when the iterator
    will be garbage collected.

    Args:
        file_path (Path): path to file
        chunk_size (int, optional): the chunk size. Defaults to 8192.

    Yields:
        Iterator[bytes]: the binary chunks stream
    """
    with file_path.open('rb') as f:
        yield from iter(lambda: f.read(chunk_size), EMPTY_BYTES)

swallow(func)

Decorates a function to swallow any exceptions.

If an exception will occur, None will be returned.

Parameters:

Name Type Description Default
func Callable

the function, supplied by the run-time

required

Returns:

Name Type Description
Callable Callable[..., Any]

the decorated function

Source code in qa-testing-utils/src/qa_testing_utils/exception_utils.py
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
def swallow(func: Callable[..., Any]) -> Callable[..., Any]:
    """
    Decorates a function to swallow any exceptions.

    If an exception will occur, None will be returned.

    Args:
        func (Callable): the function, supplied by the run-time

    Returns:
        Callable: the decorated function
    """
    @functools.wraps(func)
    def wrapper(*args: Any, **kwargs: Any) -> Any:
        return safely(lambda: func(*args, **kwargs)).value_or(None)

    return wrapper

to_string(indent=' ', depth=1, width=72, seq_length=15, show_protected=False, show_private=False, show_static=False, show_properties=True, show_address=False, str_length=50)

Class decorator providing a readable str implementation.

The default Python str implementation, returns the type and the memory address of instance.

Important for diagnostics, actually every object that is logged, must provide such readable str.

Parameters:

Name Type Description Default
indent str

indentation; Defaults to ' '.

' '
depth int

depth in object hierarchy; defaults to 1.

1
width int

width of line before line-feed; defaults to 72.

72
seq_length int

how many items to include; defaults to 15.

15
show_protected bool

include protected; Defaults to False.

False
show_private bool

include private; defaults to False.

False
show_static bool

include static; defaults to False.

False
show_properties bool

include properties; defaults to True.

True
show_address bool

include object's memory address; defaults to False.

False
str_length int

maximum string length per item; defaults to 50.

50

Returns:

Type Description
Callable[[Type[T]], Type[T]]

Callable[[Type[T]], Type[T]]: description

Source code in qa-testing-utils/src/qa_testing_utils/string_utils.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
def to_string[T](indent: str = '    ',
                 depth: int = 1,
                 width: int = 72,
                 seq_length: int = 15,
                 show_protected: bool = False,
                 show_private: bool = False,
                 show_static: bool = False,
                 show_properties: bool = True,
                 show_address: bool = False,
                 str_length: int = 50) -> Callable[[Type[T]], Type[T]]:
    """
    Class decorator providing a readable __str__ implementation.

    The default Python __str__ implementation, returns the type and the memory
    address of instance.

    Important for diagnostics, actually every object that is logged, must
    provide such readable __str__.

    Args:
        indent (str, optional): indentation; Defaults to '    '.
        depth (int, optional): depth in object hierarchy; defaults to 1.
        width (int, optional): width of line before line-feed; defaults to 72.
        seq_length (int, optional): how many items to include; defaults to 15.
        show_protected (bool, optional): include protected; Defaults to False.
        show_private (bool, optional): include private; defaults to False.
        show_static (bool, optional): include static; defaults to False.
        show_properties (bool, optional): include properties; defaults to True.
        show_address (bool, optional): include object's memory address; defaults to False.
        str_length (int, optional): maximum string length per item; defaults to 50.

    Returns:
        Callable[[Type[T]], Type[T]]: _description_
    """
    def decorator(cls: Type[T]) -> Type[T]:
        def __str__(self: T) -> str:
            # IMPORTANT: must not use something that calls __str__
            return ppretty(self,
                           indent=indent,
                           depth=depth,
                           width=width,
                           seq_length=seq_length,
                           show_protected=show_protected,
                           show_private=show_private,
                           show_static=show_static,
                           show_properties=show_properties,
                           show_address=show_address,
                           str_length=str_length)  # type: ignore

        cls.__str__ = __str__
        return cls

    return decorator

trace(value)

Logs at debug level using the invoking module name as the logger.

Source code in qa-testing-utils/src/qa_testing_utils/logger.py
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
def trace[T](value: T) -> T:
    """Logs at debug level using the invoking module name as the logger."""
    frame = inspect.currentframe()
    try:
        if frame is not None:
            caller_frame = frame.f_back
            if caller_frame is not None:
                caller_module = inspect.getmodule(caller_frame)
                logger_name = caller_module.__name__ if caller_module else '__main__'
                logger = logging.getLogger(logger_name)
                logger.debug(f"=== {value}")
            else:
                logging.getLogger(__name__).debug(f"=== {value}")
        else:
            logging.getLogger(__name__).debug(f"=== {value}")
    finally:
        del frame

    return value

tracing(matcher)

Wraps a matcher with TracingMatcher to enable debug logging.

Usage

assert_that(actual, traced(contains_string("hello")))

Parameters:

Name Type Description Default
matcher Matcher[T]

The matcher to wrap.

required

Returns: TracingMatcher[T]: The wrapped matcher with tracing enabled.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
49
50
51
52
53
54
55
56
57
58
59
60
61
def tracing[T](matcher: Matcher[T]) -> TracingMatcher[T]:
    """
    Wraps a matcher with TracingMatcher to enable debug logging.

    Usage:
        assert_that(actual, traced(contains_string("hello")))

    Args:
        matcher (Matcher[T]): The matcher to wrap.
    Returns:
        TracingMatcher[T]: The wrapped matcher with tracing enabled.
    """
    return TracingMatcher(matcher)

valid(value)

Validates the specified object, assuming it supports the Valid protocol.

Parameters:

Name Type Description Default
value T

Valid): The object to validate.

required

Raises:

Type Description
InvalidValueException

If the object is invalid (is_valid() returns False).

Returns:

Name Type Description
T T

Valid: The validated object if valid.

Source code in qa-testing-utils/src/qa_testing_utils/object_utils.py
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
def valid[T:Valid](value: T) -> T:
    """
    Validates the specified object, assuming it supports the Valid protocol.

    Args:
        value (T:Valid): The object to validate.

    Raises:
        InvalidValueException: If the object is invalid (is_valid() returns False).

    Returns:
        T:Valid: The validated object if valid.
    """
    if value.is_valid():
        return value

    raise InvalidValueException(value)

within_dates(start_date, end_date)

Creates an instance of IsWithinDates to check if a date or datetime value falls within the specified start and end dates.

Parameters:

Name Type Description Default
start_date Optional[DateOrDateTime]

The start of the date range. Can be None to indicate no lower bound.

required
end_date Optional[DateOrDateTime]

The end of the date range. Can be None to indicate no upper bound.

required

Returns:

Name Type Description
IsWithinDates IsWithinDates

An instance configured with the provided start and end dates.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
227
228
229
230
231
232
233
234
235
236
237
238
239
240
def within_dates(
        start_date: Optional[DateOrDateTime],
        end_date: Optional[DateOrDateTime]) -> IsWithinDates:
    """
    Creates an instance of IsWithinDates to check if a date or datetime value falls within the specified start and end dates.

    Args:
        start_date (Optional[DateOrDateTime]): The start of the date range. Can be None to indicate no lower bound.
        end_date (Optional[DateOrDateTime]): The end of the date range. Can be None to indicate no upper bound.

    Returns:
        IsWithinDates: An instance configured with the provided start and end dates.
    """
    return IsWithinDates(start_date, end_date)

write_csv(file_path, data_stream)

Writes a stream of flattened telemetry packets to a CSV file.

Parameters:

Name Type Description Default
file_path Path

Path to the CSV file to be written.

required
data_stream Iterable[dict[str, object]]

Iterable of dictionaries representing the rows to be written.

required
Source code in qa-testing-utils/src/qa_testing_utils/file_utils.py
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
def write_csv(file_path: Path, data_stream: Iterable[dict[str, object]]):
    """
    Writes a stream of flattened telemetry packets to a CSV file.

    Args:
        file_path: Path to the CSV file to be written.
        data_stream: Iterable of dictionaries representing the rows to be written.
    """
    stream = peekable(data_stream)  # Allow peeking to extract headers
    try:
        first_row: dict[str, object] = stream.peek()
    except StopIteration:
        # No data to write
        return
    with file_path.open(mode="w", newline="") as csv_file:
        writer = csv.DictWriter(
            csv_file, fieldnames=list(first_row.keys()))
        writer.writeheader()
        writer.writerows(stream)

yields_every(match)

Matches if every element yielded by the iterator matches a given matcher.

:param match: The matcher to satisfy, or an expected value for equality matching.

This matcher iterates through the evaluated iterator, checking that every element satisfies the given matcher. If any element does not match, the matcher fails.

If the match argument is not a matcher, it is implicitly wrapped in an equality matcher.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
261
262
263
264
265
266
267
268
269
270
271
272
273
def yields_every[T](match: Union[Matcher[T], T]) -> Matcher[Iterator[T]]:
    """
    Matches if every element yielded by the iterator matches a given matcher.

    :param match: The matcher to satisfy, or an expected value for equality matching.

    This matcher iterates through the evaluated iterator, checking that every
    element satisfies the given matcher. If any element does not match, the matcher fails.

    If the `match` argument is not a matcher, it is implicitly wrapped in an
    equality matcher.
    """
    return IsStreamContainingEvery(wrap_matcher(match))

yields_item(match)

Matches if any element of yielded by iterator matches a given matcher.

:param match: The matcher to satisfy, or an expected value for :py:func:~hamcrest.core.core.isequal.equal_to matching.

This matcher iterates the evaluated iterator, searching for any element that satisfies a given matcher. If a matching element is found, has_item is satisfied.

If the match argument is not a matcher, it is implicitly wrapped in an :py:func:~hamcrest.core.core.isequal.equal_to matcher to check for equality.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
def yields_item[T](match: Union[Matcher[T], T]) -> Matcher[Iterator[T]]:
    """
    Matches if any element of yielded by iterator matches a given matcher.

    :param match: The matcher to satisfy, or an expected value for
        :py:func:`~hamcrest.core.core.isequal.equal_to` matching.

    This matcher iterates the evaluated iterator, searching for any element
    that satisfies a given matcher. If a matching element is found,
    ``has_item`` is satisfied.

    If the ``match`` argument is not a matcher, it is implicitly wrapped in an
    :py:func:`~hamcrest.core.core.isequal.equal_to` matcher to check for
    equality.
    """
    return IsIteratorYielding(wrap_matcher(match))

yields_items(matches)

Matches if each specified item is yielded at least once by the iterator.

:param matches: An iterable of matchers or values, each of which should be yielded at least once in the iterator for this matcher to succeed.

This matcher will iterate through the evaluated iterator and check if it yields at least one instance of each specified matcher or value.

Source code in qa-testing-utils/src/qa_testing_utils/matchers.py
276
277
278
279
280
281
282
283
284
285
286
287
def yields_items[T](matches: Iterable[Union[Matcher[T],
                                            T]]) -> Matcher[Iterator[T]]:
    """
    Matches if each specified item is yielded at least once by the iterator.

    :param matches: An iterable of matchers or values, each of which should be yielded
                    at least once in the iterator for this matcher to succeed.

    This matcher will iterate through the evaluated iterator and check if it yields
    at least one instance of each specified matcher or value.
    """
    return IsIteratorYieldingAll([wrap_matcher(match) for match in matches])