
Python
Master the Python import system by understanding finders, loaders, and implementing custom import hooks.
The import system consists of finders and loaders working together.
import sys
import importlib.abc
import importlib.machinery
# View current finders
print(sys.meta_path)
# [<class '_frozen_importlib.BuiltinImporter'>,
# <class '_frozen_importlib.FrozenImporter'>,
# <class '_frozen_importlib_external.PathFinder'>]
# sys.meta_path determines how modules are found
# Finders return loaders that know how to load the moduleThe import process:
# When you do: import mymodule
# 1. Check sys.modules cache
import sys
if "mymodule" not in sys.modules:
# 2. Call each finder in sys.meta_path
# (PathFinder finds files on disk)
# 3. Finder returns a loader
# (SourceFileLoader for .py files)
# 4. Loader executes the module code
# 5. Module is added to sys.modules
passCreate custom finders to intercept imports.
import sys
import importlib.abc
import importlib.machinery
class CustomFinder(importlib.abc.MetaPathFinder):
"""Custom finder that intercepts imports."""
def find_spec(self, fullname, path, target=None):
"""Find module and return spec."""
if fullname.startswith("custom_"):
print(f"Custom finder: Loading {fullname}")
# Return spec that tells Python how to load this module
return None # Let default loader handle it
return None # Not our module
# Register the finder
sys.meta_path.insert(0, CustomFinder())
# Now custom_module will trigger our finder
import custom_module
# Output: Custom finder: Loading custom_moduleBuild loaders for specialized file formats.
import importlib.abc
import importlib.machinery
import sys
class JSONLoader(importlib.abc.Loader):
"""Loader for .json files as Python modules."""
def __init__(self, name, path):
self.name = name
self.path = path
def exec_module(self, module):
"""Execute the module (load JSON)."""
import json
with open(self.path, "r") as f:
data = json.load(f)
# Make JSON data available as module attributes
for key, value in data.items():
setattr(module, key, value)
def create_module(self, spec):
"""Create the module object."""
return None # Use default module creation
class JSONFinder(importlib.abc.MetaPathFinder):
"""Finder for .json files."""
def find_spec(self, fullname, path, target=None):
"""Find JSON modules."""
# Simple implementation - check if .json file exists
import importlib.util
import os
# Look for .json file
for directory in sys.path:
json_file = os.path.join(directory, fullname + ".json")
if os.path.exists(json_file):
loader = JSONLoader(fullname, json_file)
spec = importlib.util.spec_from_loader(fullname, loader)
return spec
return None
# Register finder
sys.meta_path.insert(0, JSONFinder())
# Now you can import JSON files as modules!
# config.json with {"debug": true, "port": 8080}
# import config
# print(config.debug) # True
# print(config.port) # 8080Import modules by name at runtime.
import importlib
# Dynamic import by name
module_name = "json"
json_module = importlib.import_module(module_name)
print(json_module.dumps({"key": "value"}))
# Nested module
sql_module = importlib.import_module("sqlite3")
# Conditionally import
def get_database_driver(db_type):
if db_type == "postgresql":
return importlib.import_module("psycopg2")
elif db_type == "mysql":
return importlib.import_module("mysql.connector")
else:
return importlib.import_module("sqlite3")
# Dynamic dispatch pattern
def execute_handler(handler_name):
"""Load and execute handler by name."""
module = importlib.import_module(f"handlers.{handler_name}")
return module.execute()Work directly with module specs for advanced control.
import importlib.util
import importlib.machinery
import sys
# Create spec from file
spec = importlib.util.spec_from_file_location(
"mymodule",
"/path/to/mymodule.py"
)
# Create module from spec
module = importlib.util.module_from_spec(spec)
# Add to sys.modules before execution (prevents recursion)
sys.modules["mymodule"] = module
# Execute the module
spec.loader.exec_module(module)
# Now can use module
print(module.some_function())Transform code during import.
import sys
import importlib.abc
import importlib.util
class TransformingLoader(importlib.abc.Loader):
"""Loader that transforms code before execution."""
def __init__(self, name, path, transformer):
self.name = name
self.path = path
self.transformer = transformer
def exec_module(self, module):
with open(self.path, "r") as f:
source = f.read()
# Transform the source code
transformed = self.transformer(source)
# Execute transformed code
exec(compile(transformed, self.path, "exec"), module.__dict__)
def uppercase_transformer(source):
"""Example transformer - converts lowercase function names to uppercase."""
import re
# Replace 'def name' with 'def NAME'
return re.sub(r"def (\w+)", lambda m: f"def {m.group(1).upper()}", source)
class TransformingFinder(importlib.abc.MetaPathFinder):
"""Finder using transforming loader."""
def __init__(self, prefix, transformer):
self.prefix = prefix
self.transformer = transformer
def find_spec(self, fullname, path, target=None):
if not fullname.startswith(self.prefix):
return None
import os
for directory in sys.path:
filepath = os.path.join(directory, fullname + ".py")
if os.path.exists(filepath):
loader = TransformingLoader(fullname, filepath, self.transformer)
spec = importlib.util.spec_from_loader(fullname, loader)
return spec
return None
# Register transformer for modules starting with 'auto_'
# sys.meta_path.insert(0, TransformingFinder("auto_", uppercase_transformer))Build extensible plugin architecture.
import importlib
import sys
import os
from pathlib import Path
class PluginManager:
"""Manage dynamically loaded plugins."""
def __init__(self, plugin_dir):
self.plugin_dir = Path(plugin_dir)
self.plugins = {}
# Add plugin directory to path
if str(self.plugin_dir) not in sys.path:
sys.path.insert(0, str(self.plugin_dir))
def discover_plugins(self):
"""Discover available plugins."""
plugins = []
for py_file in self.plugin_dir.glob("*.py"):
if not py_file.name.startswith("_"):
module_name = py_file.stem
plugins.append(module_name)
return plugins
def load_plugin(self, plugin_name):
"""Load a plugin by name."""
if plugin_name in self.plugins:
return self.plugins[plugin_name]
try:
module = importlib.import_module(plugin_name)
# Verify plugin has required interface
if not hasattr(module, "Plugin"):
raise ImportError(f"{plugin_name} missing Plugin class")
plugin = module.Plugin()
self.plugins[plugin_name] = plugin
return plugin
except ImportError as e:
print(f"Failed to load plugin {plugin_name}: {e}")
return None
def reload_plugin(self, plugin_name):
"""Reload a plugin (for development)."""
if plugin_name in sys.modules:
del sys.modules[plugin_name]
if plugin_name in self.plugins:
del self.plugins[plugin_name]
return self.load_plugin(plugin_name)
def execute_all(self):
"""Execute all loaded plugins."""
for plugin_name, plugin in self.plugins.items():
try:
plugin.run()
print(f"✓ {plugin_name} executed")
except Exception as e:
print(f"✗ {plugin_name} failed: {e}")
# Usage
# manager = PluginManager("./plugins")
# plugins = manager.discover_plugins()
# for plugin in plugins:
# manager.load_plugin(plugin)
# manager.execute_all()import sys
import time
import importlib
class ImportTimer:
"""Measure module import times."""
def __init__(self):
self.times = {}
def start_import(self, module_name):
"""Record when import starts."""
return time.time()
def end_import(self, module_name, start_time):
"""Record when import ends."""
elapsed = time.time() - start_time
self.times[module_name] = elapsed
print(f"{module_name}: {elapsed:.4f}s")
def report(self):
"""Show import statistics."""
sorted_times = sorted(
self.times.items(),
key=lambda x: x[1],
reverse=True
)
print("\nImport Times (slowest first):")
for module, elapsed in sorted_times[:10]:
print(f" {module:30} {elapsed:.4f}s")
# Measure imports
timer = ImportTimer()
start = timer.start_import("json")
import json
timer.end_import("json", start)
start = timer.start_import("requests")
import requests
timer.end_import("requests", start)
timer.report()Ready to practice? Challenges | Quiz
Resources
Ojasa Mirai
Master AI-powered development skills through structured learning, real projects, and verified credentials. Whether you're upskilling your team or launching your career, we deliver the skills companies actually need.
Learn Deep • Build Real • Verify Skills • Launch Forward