Ojasa Mirai

Ojasa Mirai

Python

Loading...

Learning Level

🟢 Beginner🔵 Advanced
Why Loops?For Loops BasicsWhile Loops BasicsLoop ControlIterating ListsLoop PatternsNested LoopsDebugging LoopsBest Practices
Python/Loops/While Loops Basics

Advanced While Loops

While loops are essential for condition-based iteration, numerical methods, and algorithms that require dynamic termination conditions. At an advanced level, we explore convergence algorithms, numerical methods, and sophisticated termination strategies.

Convergence Algorithms

Convergence algorithms are iterative methods that gradually approach a solution. They're fundamental in numerical analysis and machine learning.

Newton-Raphson Method (Root Finding)

import math

def newton_raphson(f, f_prime, initial_guess, tolerance=1e-10, max_iterations=100):
    """
    Find the root of f(x) = 0 using Newton-Raphson method.

    f: The function
    f_prime: The derivative of f
    initial_guess: Starting point
    tolerance: Convergence threshold (epsilon)
    """
    x = initial_guess
    iteration = 0

    while iteration < max_iterations:
        fx = f(x)

        # Check convergence
        if abs(fx) < tolerance:
            return x, iteration

        fpx = f_prime(x)

        # Avoid division by zero
        if abs(fpx) < 1e-15:
            raise ValueError("Derivative too close to zero")

        # Newton-Raphson update
        x_new = x - fx / fpx

        # Check if update is small enough
        if abs(x_new - x) < tolerance:
            return x_new, iteration + 1

        x = x_new
        iteration += 1

    raise RuntimeError(f"Failed to converge after {max_iterations} iterations")

# Finding square root of 2 using Newton-Raphson
def f(x):
    return x**2 - 2

def f_prime(x):
    return 2*x

root, iterations = newton_raphson(f, f_prime, 1.0)
print(f"Root of x²-2: {root:.15f}")
print(f"Converged in {iterations} iterations")
print(f"Verification: {root**2:.15f}")

Bisection Method (Safe but Slower)

def bisection(f, a, b, tolerance=1e-10, max_iterations=1000):
    """
    Find root of f(x)=0 in interval [a, b] using bisection.

    Guarantees convergence if f(a) and f(b) have opposite signs.
    Time complexity: O(log((b-a)/tolerance))
    """

    # Verify sign change
    if f(a) * f(b) >= 0:
        raise ValueError("Function must have opposite signs at endpoints")

    iteration = 0

    while iteration < max_iterations:
        c = (a + b) / 2
        fc = f(c)

        # Check convergence
        if abs(b - a) < tolerance or abs(fc) < tolerance:
            return c, iteration

        # Narrow the interval
        if f(a) * fc < 0:
            b = c
        else:
            a = c

        iteration += 1

    return (a + b) / 2, iteration

# Finding cube root of 8
root, iterations = bisection(lambda x: x**3 - 8, 0, 3)
print(f"Cube root of 8: {root:.10f}")
print(f"Converged in {iterations} iterations")

Fixed Point Iteration

def fixed_point_iteration(g, initial_guess, tolerance=1e-10, max_iterations=1000):
    """
    Solve x = g(x) using fixed point iteration.

    Function must satisfy |g'(x)| < 1 for convergence.
    """
    x = initial_guess
    iteration = 0

    while iteration < max_iterations:
        x_new = g(x)

        # Check convergence
        if abs(x_new - x) < tolerance:
            return x_new, iteration + 1

        x = x_new
        iteration += 1

    raise RuntimeError(f"Failed to converge after {max_iterations} iterations")

# Solving x = cos(x) using fixed point iteration
root, iterations = fixed_point_iteration(math.cos, 0.7)
print(f"Solution to x = cos(x): {root:.15f}")
print(f"Converged in {iterations} iterations")
print(f"Verification: cos({root}) = {math.cos(root):.15f}")

Numerical Methods and Simulations

Golden Section Search (Optimization)

def golden_section_search(f, a, b, tolerance=1e-10):
    """
    Find the minimum of unimodal function f on [a, b].

    Golden ratio φ = (1 + √5) / 2
    Time complexity: O(log((b-a)/tolerance))
    """
    phi = (1 + math.sqrt(5)) / 2
    resphi = 2 - phi

    x1 = a + resphi * (b - a)
    x2 = b - resphi * (b - a)
    f1 = f(x1)
    f2 = f(x2)

    iteration = 0
    while abs(b - a) > tolerance:
        iteration += 1

        if f1 < f2:
            b = x2
            x2 = x1
            f2 = f1
            x1 = a + resphi * (b - a)
            f1 = f(x1)
        else:
            a = x1
            x1 = x2
            f1 = f2
            x2 = b - resphi * (b - a)
            f2 = f(x2)

    return (a + b) / 2, iteration

# Finding minimum of quadratic function
minimum, iterations = golden_section_search(lambda x: (x - 3)**2, 0, 5)
print(f"Minimum at x = {minimum:.10f}")
print(f"Converged in {iterations} iterations")

Gradient Descent (Optimization)

def gradient_descent(f_gradient, initial_point, learning_rate=0.01,
                     tolerance=1e-6, max_iterations=10000):
    """
    Minimize function using gradient descent.

    f_gradient: Function returning gradient
    initial_point: Starting point
    learning_rate: Step size (alpha)
    """
    point = initial_point
    iteration = 0
    history = [point]

    while iteration < max_iterations:
        gradient = f_gradient(point)

        # Check convergence
        gradient_magnitude = math.sqrt(sum(g**2 for g in gradient))
        if gradient_magnitude < tolerance:
            return point, iteration, history

        # Update point
        point = tuple(p - learning_rate * g for p, g in zip(point, gradient))
        history.append(point)
        iteration += 1

    return point, iteration, history

# Minimize f(x, y) = (x-2)² + (y+1)²
def gradient_2d(point):
    x, y = point
    return (2*(x - 2), 2*(y + 1))

minimum, iterations, history = gradient_descent(gradient_2d, (0, 0),
                                                 learning_rate=0.1)
print(f"Minimum found at: {minimum}")
print(f"Converged in {iterations} iterations")

Complex Loop Control Patterns

Adaptive Loop Termination

def adaptive_iteration(process_fn, initial_state, max_iterations=1000):
    """
    Iterate with adaptive termination based on improvement rate.

    Stops if improvement becomes negligible.
    """
    state = initial_state
    prev_score = float('inf')
    no_improvement_count = 0
    patience = 5  # Stop if no improvement for 5 iterations

    iteration = 0

    while iteration < max_iterations:
        state = process_fn(state)
        score = evaluate(state)

        # Check if improving
        improvement = prev_score - score

        if improvement < 1e-6:  # Negligible improvement
            no_improvement_count += 1
            if no_improvement_count >= patience:
                return state, iteration
        else:
            no_improvement_count = 0

        prev_score = score
        iteration += 1

    return state, iteration

def evaluate(state):
    """Simple evaluation function."""
    return sum(abs(x) for x in state)

def process_fn(state):
    """Simple processing function."""
    return tuple(x * 0.95 for x in state)

Time-Limited Iteration (Timeout Pattern)

import time

def timed_iteration(process_fn, initial_state, max_seconds=5.0):
    """
    Iterate until time limit is reached.

    Useful for real-time systems and interactive applications.
    """
    state = initial_state
    start_time = time.time()
    iteration = 0

    while time.time() - start_time < max_seconds:
        state = process_fn(state)
        iteration += 1

        # Optional: yield control periodically
        if iteration % 1000 == 0:
            elapsed = time.time() - start_time
            print(f"Iteration {iteration}, elapsed: {elapsed:.2f}s")

    elapsed = time.time() - start_time
    return state, iteration, elapsed

# Usage
def heavy_computation(state):
    # Simulate some computation
    import random
    time.sleep(0.001)
    return state + 1

final_state, iterations, elapsed = timed_iteration(heavy_computation, 0, 2.0)
print(f"Completed {iterations} iterations in {elapsed:.2f} seconds")

Multi-Condition Termination

def multi_condition_loop(process_fn, initial_state):
    """
    Iterate with multiple termination conditions.
    """
    state = initial_state
    iteration = 0

    # Termination conditions
    max_iterations = 1000
    iteration_without_change = 0
    max_stale_iterations = 50

    previous_state = None

    while iteration < max_iterations:
        state = process_fn(state)

        # Condition 1: State hasn't changed
        if state == previous_state:
            iteration_without_change += 1
            if iteration_without_change >= max_stale_iterations:
                print("Terminating: state unchanged for 50 iterations")
                return state, iteration
        else:
            iteration_without_change = 0

        # Condition 2: State invalid
        if not is_valid(state):
            print("Terminating: invalid state")
            return previous_state, iteration

        # Condition 3: Goal reached
        if is_goal(state):
            print("Terminating: goal reached")
            return state, iteration

        previous_state = state
        iteration += 1

    print(f"Terminating: max iterations ({max_iterations}) reached")
    return state, iteration

def is_valid(state):
    return state is not None

def is_goal(state):
    return state > 100

def process_fn(state):
    return state + 1 if state else 0

Advanced Numerical Algorithms

Secant Method (Root Finding without Derivative)

def secant_method(f, x0, x1, tolerance=1e-10, max_iterations=100):
    """
    Find root using secant method (doesn't require derivative).

    Faster convergence than bisection, slower than Newton-Raphson.
    """
    iteration = 0

    while iteration < max_iterations:
        f0 = f(x0)
        f1 = f(x1)

        # Check convergence
        if abs(f1) < tolerance:
            return x1, iteration

        # Avoid division by zero
        if abs(f1 - f0) < 1e-15:
            raise ValueError("Secant method failed: f(x1) ≈ f(x0)")

        # Secant method update
        x_new = x1 - f1 * (x1 - x0) / (f1 - f0)

        # Check convergence
        if abs(x_new - x1) < tolerance:
            return x_new, iteration + 1

        x0, x1 = x1, x_new
        iteration += 1

    raise RuntimeError(f"Failed to converge after {max_iterations} iterations")

# Finding root of x³ - 2x - 5
root, iterations = secant_method(lambda x: x**3 - 2*x - 5, 2, 2.5)
print(f"Root: {root:.10f}")
print(f"Verification: f({root}) = {lambda x: x**3 - 2*x - 5)(root):.15f}")

Stochastic Iteration (Monte Carlo Methods)

import random

def monte_carlo_integration(f, a, b, num_samples=100000):
    """
    Estimate integral of f on [a, b] using Monte Carlo method.
    """
    iteration = 0
    sum_values = 0

    while iteration < num_samples:
        x = random.uniform(a, b)
        sum_values += f(x)
        iteration += 1

    # Estimate: (b - a) * average_value
    average = sum_values / num_samples
    integral = (b - a) * average

    return integral, iteration

# Estimate π using Monte Carlo (area of circle in unit square)
def unit_circle(x):
    y = math.sqrt(max(0, 1 - x**2))
    return y

pi_estimate, samples = monte_carlo_integration(unit_circle, 0, 1, 1000000)
pi_estimate *= 4  # Quarter circle -> full area
print(f"Ï€ estimated as {pi_estimate:.6f} (actual: {math.pi:.6f})")

Error Handling in Numerical Iterations

class ConvergenceError(Exception):
    """Raised when iteration fails to converge."""
    pass

class DivergenceError(Exception):
    """Raised when iteration diverges (solution grows unbounded)."""
    pass

def robust_iteration(f, initial, tolerance=1e-10, max_iterations=1000):
    """
    Iterate with comprehensive error handling.
    """
    x = initial
    prev_x = None
    prev_prev_x = None
    divergence_threshold = 1e10

    for iteration in range(max_iterations):
        try:
            x_new = f(x)
        except (ValueError, ZeroDivisionError) as e:
            raise ConvergenceError(f"Function evaluation failed: {e}")

        # Check for divergence
        if abs(x_new) > divergence_threshold:
            raise DivergenceError(f"Solution diverging at iteration {iteration}")

        # Check for NaN or Inf
        if math.isnan(x_new) or math.isinf(x_new):
            raise ConvergenceError(f"Invalid value (NaN/Inf) at iteration {iteration}")

        # Check convergence
        if abs(x_new - x) < tolerance:
            return x_new, iteration + 1, "converged"

        # Check for oscillation
        if prev_prev_x is not None and x_new == prev_prev_x:
            return x_new, iteration + 1, "oscillating"

        prev_prev_x = prev_x
        prev_x = x
        x = x_new

    raise ConvergenceError(f"Failed to converge after {max_iterations} iterations")

# Usage
try:
    result, iterations, status = robust_iteration(lambda x: math.cos(x), 0.7)
    print(f"Result: {result}, Status: {status}, Iterations: {iterations}")
except (ConvergenceError, DivergenceError) as e:
    print(f"Error: {e}")

Performance Analysis

import time
import numpy as np

def compare_convergence_methods():
    """Compare different root-finding methods."""

    f = lambda x: x**2 - 2
    f_prime = lambda x: 2*x

    methods = {
        'Bisection': lambda: bisection(f, 1, 2),
        'Newton-Raphson': lambda: newton_raphson(f, f_prime, 1.5),
        'Secant': lambda: secant_method(f, 1, 2),
        'Fixed Point': lambda: fixed_point_iteration(lambda x: 2/x, 1.5)
    }

    results = {}

    for method_name, method_fn in methods.items():
        start = time.time()
        try:
            root, iterations = method_fn()
            elapsed = time.time() - start
            results[method_name] = {
                'root': root,
                'iterations': iterations,
                'time': elapsed
            }
        except Exception as e:
            results[method_name] = {'error': str(e)}

    return results

# Print comparison
results = compare_convergence_methods()
for method, data in results.items():
    if 'error' not in data:
        print(f"{method}: {data['iterations']} iterations, {data['time']*1000:.3f}ms")

Key Takeaways

AlgorithmConvergence RateMemoryReliabilityBest For
BisectionLinearO(1)Very HighGuaranteed convergence
Newton-RaphsonQuadraticO(1)MediumFast with derivative
Secant MethodSuper-linearO(1)HighNo derivative needed
Fixed PointLinearO(1)MediumSimple iteration
Gradient DescentLinear-SuperlinearO(n)MediumHigh-dimensional
Golden SectionLinearO(1)HighOptimization

What's Next?

Explore these advanced topics:

  • **Loop Control (Advanced)**: Exception handling and context managers
  • **Iterating Lists (Advanced)**: Custom iterators and lazy evaluation
  • **Loop Patterns (Advanced)**: Complex algorithms and dynamic programming
  • **Nested Loops (Advanced)**: Big O analysis and matrix operations
  • **Debugging Loops (Advanced)**: Performance profiling and optimization

Master numerical methods and unlock powerful problem-solving techniques!


Resources

Python Docs

Ojasa Mirai

Master AI-powered development skills through structured learning, real projects, and verified credentials. Whether you're upskilling your team or launching your career, we deliver the skills companies actually need.

Learn Deep • Build Real • Verify Skills • Launch Forward

Courses

PythonFastapiReactJSCloud

© 2026 Ojasa Mirai. All rights reserved.

TwitterGitHubLinkedIn