Fundamentals 11 min read

Advanced pytest Fixtures and Decorators for Test Setup, Skipping, Retries, Parametrization, and More

This article demonstrates how to use pytest fixtures and a collection of custom decorators to handle setup/teardown, conditional skipping, retry logic, parameterized tests, timing, error handling, logging, transaction management, API authentication, response validation, concurrency, data cleanup, performance monitoring, state checks, and test marking in Python testing.

Test Development Learning Exchange
Test Development Learning Exchange
Test Development Learning Exchange
Advanced pytest Fixtures and Decorators for Test Setup, Skipping, Retries, Parametrization, and More

Setup and Teardown (setup and teardown)

In pytest, a fixture defined with @pytest.fixture can perform pre‑test setup (e.g., login, environment initialization) and post‑test teardown (e.g., logout, cleanup) using a yield statement.

import pytest
@pytest.fixture
def setup_teardown():
print("测试开始前的准备工作")
yield
print("测试结束后的清理工作")
def test_example(setup_teardown):
print("执行测试用例")

Conditional Skipping (skip and skipif)

Use @pytest.mark.skip to unconditionally skip a test and @pytest.mark.skipif to skip based on a condition.

import pytest
@pytest.mark.skip(reason="暂时不执行此测试")
def test_skip_example():
pass
@pytest.mark.skipif(sys.version_info < (3, 7), reason="仅适用于Python 3.7及以上版本")
def test_skipif_example():
pass

Retry (retry)

For flaky tests, the pytest-rerunfailures plugin can automatically rerun failed tests.

import pytest
from pytest import Retry
@pytest.mark.flaky(reruns=2, reruns_delay=2)
def test_retry_example():
assert False  # simulated intermittent failure

Parametrize (parametrize)

Run the same test with multiple input values using @pytest.mark.parametrize .

import pytest
@pytest.mark.parametrize("input_value, expected", [
(3, 6),
(4, 8),
(5, 10)
])
def test_parametrize_example(input_value, expected):
assert input_value * 2 == expected

Timer (timer)

A custom decorator measures execution time of a test function.

import time
import pytest
def timer(func):
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
print(f"{func.__name__} 用时: {end_time - start_time}秒")
return result
return wrapper
@pytest.mark.usefixtures("timer")
def test_timer_example():
time.sleep(1)  # simulate long operation

Error Handling Decorator

Automatically catches exceptions in a test and can report or log them.

def error_handler(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
print(f"测试中发生错误: {e}")
# optional error reporting logic
return False
return wrapper
@error_handler
def test_with_error_handling():
raise ValueError("模拟错误")

Log Enhancement Decorator

Enriches test functions with detailed logging before and after execution.

import logging
def log_decorator(logger=logging.getLogger(__name__)):
def decorator(func):
def wrapper(*args, **kwargs):
logger.info(f"开始执行: {func.__name__}")
result = func(*args, **kwargs)
logger.info(f"{func.__name__} 执行完成")
return result
return wrapper
return decorator
@log_decorator()
def test_with_logging():
print("这是一个测试函数")

Transaction Management Decorator

Ensures database operations are atomic by committing on success or rolling back on error.

from contextlib import contextmanager
@contextmanager
def db_transaction(session):
try:
yield session
session.commit()
except Exception:
session.rollback()
raise
def transactional_test(session):
@db_transaction(session)
def inner_test(session):
# 执行数据库操作
pass
inner_test(session)

API Authentication Decorator

Injects a bearer token into request headers automatically.

def auth_required(token):
def decorator(func):
def wrapper(*args, **kwargs):
headers = kwargs.get('headers', {})
headers['Authorization'] = f"Bearer {token}"
kwargs['headers'] = headers
return func(*args, **kwargs)
return wrapper
return decorator
@auth_required("your_token_here")
def test_api_endpoint(headers=None):
# 使用headers发起API请求
pass

Response Validation Decorator

Validates API response status code and optionally its content.

def response_validator(status_code=200, content_checker=None):
def decorator(func):
def wrapper(*args, **kwargs):
response = func(*args, **kwargs)
assert response.status_code == status_code, f"状态码不匹配,期望:{status_code} 实际:{response.status_code}"
if content_checker:
assert content_checker(response.json()), "响应内容验证失败"
return response
return wrapper
return decorator
@response_validator(status_code=200, content_checker=lambda json: json.get('success'))
def test_api_response():
# 发起API请求并返回响应
pass

Concurrent Test Decorator

Runs a test function concurrently in multiple threads to simulate load.

import threading
from functools import wraps
def concurrent_test(num_threads=5):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
threads = []
for _ in range(num_threads):
thread = threading.Thread(target=func, args=args, kwargs=kwargs)
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
return wrapper
return decorator
@concurrent_test(10)
def test_concurrent_api_call():
# 发起API请求
pass

Data Cleanup Decorator

Ensures test‑generated data is cleaned up after each test execution.

def cleanup_after_test(cleanup_func):
def decorator(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
finally:
cleanup_func()
return wrapper
return decorator
def clean_database():
# 清理数据库操作
pass
@cleanup_after_test(clean_database)
def test_that_makes_changes():
# 执行可能产生需要清理的数据的操作
pass

Performance Monitor Decorator

Measures execution time and warns if it exceeds a threshold.

def performance_monitor(threshold_seconds=1):
def decorator(func):
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
elapsed_time = time.time() - start_time
if elapsed_time > threshold_seconds:
print(f"警告:{func.__name__} 执行超过{threshold_seconds}秒,实际耗时{elapsed_time}秒")
return result
return wrapper
return decorator
@performance_monitor(2)
def slow_function():
time.sleep(3)

State Validation Decorator

Checks a pre‑condition before running a test and skips it if the condition is not met.

def require_state(check_state_func):
def decorator(func):
def wrapper(*args, **kwargs):
if not check_state_func():
print(f"前置条件不满足,跳过测试: {func.__name__}")
return
return func(*args, **kwargs)
return wrapper
return decorator
def is_service_ready():
# 检查服务是否就绪
pass
@require_state(is_service_ready)
def test_dependent_on_service():
# 执行依赖于服务状态的测试
pass

Test Marking Decorator

Applies pytest markers to categorize tests for selective execution.

import pytest
@pytest.mark.smoke
def test_smoke_example():
# 用于快速验证核心功能的烟雾测试
pass
@pytest.mark.performance
def test_performance_example():
# 性能测试
pass
PythonAutomationtestingDecoratorspytestfixtures
Test Development Learning Exchange
Written by

Test Development Learning Exchange

Test Development Learning Exchange

0 followers
Reader feedback

How this landed with the community

login Sign in to like

Rate this article

Was this worth your time?

Sign in to rate
Discussion

0 Comments

Thoughtful readers leave field notes, pushback, and hard-won operational detail here.