
Python
Learn to design robust, maintainable modules with advanced patterns and best practices.
Craft clean, intuitive module APIs using __all__ and naming conventions.
# data_processing.py - Well-designed module API
"""
Data processing utilities with clear public/private separation.
Public API:
- process_csv(filename): Process CSV files
- validate_data(data): Validate dataset
- export_to_json(data, filename): Export to JSON
Private utilities (prefix with _):
- _normalize_column_names(): Internal helper
- _handle_missing_values(): Internal helper
"""
import csv
import json
from typing import List, Dict, Any
__all__ = [
"process_csv",
"validate_data",
"export_to_json",
"DataProcessor"
]
__version__ = "1.0.0"
__author__ = "Data Team"
class DataProcessor:
"""Main data processing class."""
def __init__(self, strict_mode=False):
self.strict_mode = strict_mode
self._cache = {}
def process(self, data: List[Dict]) -> List[Dict]:
"""Process data with validation."""
if self.strict_mode:
return self._process_strict(data)
return self._process_lenient(data)
def _process_strict(self, data):
"""Internal strict processing."""
# Implementation
pass
def _process_lenient(self, data):
"""Internal lenient processing."""
# Implementation
pass
def process_csv(filename: str) -> List[Dict[str, Any]]:
"""
Process CSV file and return as list of dicts.
Args:
filename: Path to CSV file
Returns:
List of dictionaries with row data
Raises:
FileNotFoundError: If file doesn't exist
ValueError: If CSV is malformed
"""
processor = DataProcessor()
# Implementation
pass
def validate_data(data: List[Dict]) -> bool:
"""Validate data structure and content."""
# Implementation
return True
def export_to_json(data: List[Dict], filename: str) -> None:
"""Export data to JSON file."""
with open(filename, "w") as f:
json.dump(data, f, indent=2)
def _normalize_column_names(headers: List[str]) -> List[str]:
"""Internal: normalize column names (prefix with _)."""
return [h.lower().replace(" ", "_") for h in headers]Pattern 1: Lazy Loading with Properties
class HeavyModule:
"""Module with lazy-loaded expensive resources."""
def __init__(self):
self._database = None
self._cache = None
@property
def database(self):
"""Lazy load database connection."""
if self._database is None:
import sqlite3
self._database = sqlite3.connect(":memory:")
return self._database
@property
def cache(self):
"""Lazy load cache."""
if self._cache is None:
import redis
self._cache = redis.Redis()
return self._cache
# Usage
module = HeavyModule()
# No heavy resources loaded yet
result = module.database.execute("SELECT 1") # Database loads herePattern 2: Context Managers for Resource Management
from contextlib import contextmanager
class DatabaseModule:
"""Module managing database connections."""
def __init__(self, db_path):
self.db_path = db_path
@contextmanager
def connection(self):
"""Context manager for database connection."""
import sqlite3
conn = sqlite3.connect(self.db_path)
try:
yield conn
conn.commit()
except Exception:
conn.rollback()
raise
finally:
conn.close()
def execute_query(self, query):
"""Execute query with automatic connection handling."""
with self.connection() as conn:
return conn.execute(query).fetchall()
# Usage
db = DatabaseModule("data.db")
with db.connection() as conn:
result = conn.execute("SELECT * FROM users").fetchall()Pattern 3: Registry Pattern for Extensibility
class HandlerRegistry:
"""Extensible handler registry pattern."""
def __init__(self):
self._handlers = {}
def register(self, name):
"""Decorator to register handlers."""
def decorator(func):
self._handlers[name] = func
return func
return decorator
def get(self, name):
"""Get handler by name."""
return self._handlers.get(name)
def execute(self, handler_name, *args, **kwargs):
"""Execute named handler."""
handler = self.get(handler_name)
if handler is None:
raise ValueError(f"Unknown handler: {handler_name}")
return handler(*args, **kwargs)
# Usage
handlers = HandlerRegistry()
@handlers.register("json")
def handle_json(data):
import json
return json.dumps(data)
@handlers.register("csv")
def handle_csv(data):
import csv
return "CSV format"
result = handlers.execute("json", {"key": "value"})Pattern 1: Configuration Module
# config.py - Configuration management
import os
from typing import Any, Dict
class Config:
"""Base configuration."""
DEBUG = False
LOG_LEVEL = "INFO"
DATABASE_URL = "sqlite:///app.db"
SECRET_KEY = "change-me"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
LOG_LEVEL = "DEBUG"
DATABASE_URL = "sqlite:///dev.db"
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
LOG_LEVEL = "WARNING"
DATABASE_URL = os.environ.get("DATABASE_URL")
def get_config(env: str = None) -> Config:
"""Get config based on environment."""
if env is None:
env = os.environ.get("APP_ENV", "development")
if env == "production":
return ProductionConfig()
else:
return DevelopmentConfig()
# Usage
config = get_config()
print(config.DEBUG)Pattern 2: Feature Flags Module
# features.py - Feature flags for gradual rollout
from typing import Dict, Callable, Any
class FeatureFlags:
"""Manage feature flags."""
def __init__(self):
self._flags: Dict[str, bool] = {}
self._conditions: Dict[str, Callable] = {}
def enable(self, feature: str) -> None:
"""Enable feature."""
self._flags[feature] = True
def disable(self, feature: str) -> None:
"""Disable feature."""
self._flags[feature] = False
def is_enabled(self, feature: str, context: Any = None) -> bool:
"""Check if feature is enabled."""
if feature not in self._flags:
return False
if not self._flags[feature]:
return False
# Check conditions if any
if feature in self._conditions:
return self._conditions[feature](context)
return True
def register_condition(self, feature: str, condition: Callable) -> None:
"""Register dynamic condition for feature."""
self._conditions[feature] = condition
flags = FeatureFlags()
flags.enable("new_ui")
flags.register_condition("beta_api", lambda ctx: ctx.user.is_beta)
if flags.is_enabled("new_ui"):
# Use new UI
passfrom typing import Optional, List, Union, Callable
from dataclasses import dataclass
@dataclass
class UserData:
"""User data structure."""
username: str
email: str
age: int
def process_users(
users: List[UserData],
filter_fn: Optional[Callable[[UserData], bool]] = None,
transform_fn: Optional[Callable[[UserData], UserData]] = None
) -> List[UserData]:
"""
Process user data with optional filtering and transformation.
Args:
users: List of UserData objects
filter_fn: Optional function to filter users
transform_fn: Optional function to transform users
Returns:
Processed list of UserData
Examples:
>>> users = [UserData("alice", "a@example.com", 25)]
>>> result = process_users(users)
>>> len(result)
1
Raises:
TypeError: If users is not a list
"""
if not isinstance(users, list):
raise TypeError("users must be a list")
# Apply filter
if filter_fn:
users = [u for u in users if filter_fn(u)]
# Apply transform
if transform_fn:
users = [transform_fn(u) for u in users]
return usersimport warnings
from functools import wraps
__version__ = "2.0.0"
def deprecated(alternative: str = None):
"""Decorator for deprecated functions."""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
msg = f"{func.__name__} is deprecated"
if alternative:
msg += f", use {alternative} instead"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
return wrapper
return decorator
@deprecated(alternative="new_process")
def old_process(data):
"""Deprecated: Use new_process instead."""
return data
def new_process(data):
"""New implementation."""
return data
# Usage
old_process([1, 2, 3]) # Raises DeprecationWarning# module_utils.py - Module with self-validation
def _validate_module():
"""Validate module at import time."""
import sys
# Check Python version
if sys.version_info < (3, 7):
raise RuntimeError("Requires Python 3.7+")
# Check required dependencies
try:
import numpy
import pandas
except ImportError as e:
raise ImportError(f"Missing required dependency: {e}")
# Run validation on import
_validate_module()
# Rest of module...Ready to practice? Challenges | Quiz
Resources
Ojasa Mirai
Master AI-powered development skills through structured learning, real projects, and verified credentials. Whether you're upskilling your team or launching your career, we deliver the skills companies actually need.
Learn Deep • Build Real • Verify Skills • Launch Forward