Migrations

Considerations When Removing Fields

Removing fields from Django models requires careful planning to avoid data loss and maintain application stability. This section covers safe field removal strategies, data preservation techniques, and best practices for handling field deprecation in production environments.

Considerations When Removing Fields

Removing fields from Django models requires careful planning to avoid data loss and maintain application stability. This section covers safe field removal strategies, data preservation techniques, and best practices for handling field deprecation in production environments.

Understanding Field Removal Risks

Data Loss and Application Impact

# Dangerous: Direct field removal
class UnsafeFieldRemoval(models.Model):
    """Example of unsafe field removal"""
    
    # Original model
    title = models.CharField(max_length=200)
    content = models.TextField()
    # deprecated_field = models.CharField(max_length=100)  # Removed directly
    created_at = models.DateTimeField(auto_now_add=True)

# Generated migration (DANGEROUS)
class Migration(migrations.Migration):
    dependencies = [
        ('blog', '0001_initial'),
    ]
    
    operations = [
        # This will permanently delete data!
        migrations.RemoveField(
            model_name='post',
            name='deprecated_field',
        ),
    ]

# Safe approach: Multi-step field removal
class SafeFieldRemovalProcess:
    """Safe process for removing fields"""
    
    # Step 1: Mark field as deprecated (no migration needed)
    class PostStep1(models.Model):
        title = models.CharField(max_length=200)
        content = models.TextField()
        # Mark as deprecated in comments and documentation
        deprecated_field = models.CharField(
            max_length=100,
            help_text="DEPRECATED: This field will be removed in v2.0"
        )
        created_at = models.DateTimeField(auto_now_add=True)
        
        class Meta:
            app_label = 'blog'
    
    # Step 2: Make field nullable (allows safe removal later)
    class Migration1(migrations.Migration):
        dependencies = [
            ('blog', '0001_initial'),
        ]
        
        operations = [
            migrations.AlterField(
                model_name='post',
                name='deprecated_field',
                field=models.CharField(max_length=100, null=True, blank=True),
            ),
        ]
    
    # Step 3: Stop using field in code (no migration)
    class PostStep3(models.Model):
        title = models.CharField(max_length=200)
        content = models.TextField()
        # Field still exists in database but not used in code
        created_at = models.DateTimeField(auto_now_add=True)
        
        class Meta:
            app_label = 'blog'
    
    # Step 4: Create data backup migration
    class Migration2(migrations.Migration):
        dependencies = [
            ('blog', '0002_make_field_nullable'),
        ]
        
        operations = [
            migrations.RunPython(
                code=backup_deprecated_field_data,
                reverse_code=restore_deprecated_field_data,
            ),
        ]
    
    # Step 5: Remove field from model and create migration
    class PostFinal(models.Model):
        title = models.CharField(max_length=200)
        content = models.TextField()
        created_at = models.DateTimeField(auto_now_add=True)
        
        class Meta:
            app_label = 'blog'
    
    # Step 6: Final removal migration
    class Migration3(migrations.Migration):
        dependencies = [
            ('blog', '0003_backup_deprecated_data'),
        ]
        
        operations = [
            migrations.RemoveField(
                model_name='post',
                name='deprecated_field',
            ),
        ]

def backup_deprecated_field_data(apps, schema_editor):
    """Backup data from deprecated field before removal"""
    
    Post = apps.get_model('blog', 'Post')
    
    # Create backup table
    from django.db import connection
    
    with connection.cursor() as cursor:
        cursor.execute("""
            CREATE TABLE IF NOT EXISTS blog_post_deprecated_backup (
                post_id INTEGER PRIMARY KEY,
                deprecated_field_value VARCHAR(100),
                backup_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
            )
        """)
        
        # Backup non-null values
        cursor.execute("""
            INSERT INTO blog_post_deprecated_backup (post_id, deprecated_field_value)
            SELECT id, deprecated_field 
            FROM blog_post 
            WHERE deprecated_field IS NOT NULL AND deprecated_field != ''
        """)
        
        affected_rows = cursor.rowcount
        print(f"Backed up {affected_rows} deprecated field values")

def restore_deprecated_field_data(apps, schema_editor):
    """Restore data if migration needs to be reversed"""
    
    from django.db import connection
    
    with connection.cursor() as cursor:
        # Restore data from backup
        cursor.execute("""
            UPDATE blog_post 
            SET deprecated_field = backup.deprecated_field_value
            FROM blog_post_deprecated_backup backup
            WHERE blog_post.id = backup.post_id
        """)
        
        print("Restored deprecated field data from backup")

Field Removal Impact Analysis

class FieldRemovalAnalyzer:
    """Analyze impact of field removal"""
    
    def __init__(self, app_label, model_name, field_name):
        self.app_label = app_label
        self.model_name = model_name
        self.field_name = field_name
    
    def analyze_removal_impact(self):
        """Analyze the impact of removing a field"""
        
        from django.apps import apps
        from django.db import connection
        
        model = apps.get_model(self.app_label, self.model_name)
        
        analysis = {
            'field_info': {},
            'data_analysis': {},
            'code_usage': {},
            'dependencies': {},
            'recommendations': []
        }
        
        # Analyze field information
        try:
            field = model._meta.get_field(self.field_name)
            
            analysis['field_info'] = {
                'type': field.__class__.__name__,
                'null': getattr(field, 'null', False),
                'blank': getattr(field, 'blank', False),
                'unique': getattr(field, 'unique', False),
                'indexed': getattr(field, 'db_index', False),
                'has_default': field.has_default(),
                'related_model': getattr(field, 'related_model', None),
            }
            
        except Exception as e:
            analysis['field_info']['error'] = str(e)
        
        # Analyze data in the field
        analysis['data_analysis'] = self._analyze_field_data(model)
        
        # Analyze code usage
        analysis['code_usage'] = self._analyze_code_usage()
        
        # Analyze dependencies
        analysis['dependencies'] = self._analyze_field_dependencies(model)
        
        # Generate recommendations
        analysis['recommendations'] = self._generate_removal_recommendations(analysis)
        
        return analysis
    
    def _analyze_field_data(self, model):
        """Analyze data in the field"""
        
        data_analysis = {
            'total_records': 0,
            'non_null_records': 0,
            'unique_values': 0,
            'sample_values': [],
            'data_distribution': {}
        }
        
        try:
            # Get total record count
            data_analysis['total_records'] = model.objects.count()
            
            if data_analysis['total_records'] > 0:
                # Analyze non-null values
                non_null_filter = {f"{self.field_name}__isnull": False}
                data_analysis['non_null_records'] = model.objects.filter(
                    **non_null_filter
                ).count()
                
                # Get unique values count
                unique_values = model.objects.values_list(
                    self.field_name, flat=True
                ).distinct()
                data_analysis['unique_values'] = len(unique_values)
                
                # Get sample values
                data_analysis['sample_values'] = list(unique_values[:10])
                
                # Analyze data distribution
                if data_analysis['non_null_records'] > 0:
                    from django.db.models import Count
                    
                    distribution = model.objects.values(
                        self.field_name
                    ).annotate(
                        count=Count('id')
                    ).order_by('-count')[:5]
                    
                    data_analysis['data_distribution'] = {
                        item[self.field_name]: item['count']
                        for item in distribution
                    }
        
        except Exception as e:
            data_analysis['error'] = str(e)
        
        return data_analysis
    
    def _analyze_code_usage(self):
        """Analyze code usage of the field"""
        
        import os
        import re
        from django.conf import settings
        
        usage_analysis = {
            'files_using_field': [],
            'usage_patterns': [],
            'potential_issues': []
        }
        
        # Search for field usage in code
        field_patterns = [
            rf'\b{self.field_name}\b',  # Direct field access
            rf'\.{self.field_name}',    # Attribute access
            rf'"{self.field_name}"',    # String references
            rf"'{self.field_name}'",    # String references
        ]
        
        # Search in project files
        project_root = settings.BASE_DIR
        
        for root, dirs, files in os.walk(project_root):
            # Skip certain directories
            dirs[:] = [d for d in dirs if d not in ['.git', '__pycache__', 'node_modules']]
            
            for file in files:
                if file.endswith(('.py', '.html', '.js')):
                    file_path = os.path.join(root, file)
                    
                    try:
                        with open(file_path, 'r', encoding='utf-8') as f:
                            content = f.read()
                            
                            for pattern in field_patterns:
                                matches = re.findall(pattern, content)
                                
                                if matches:
                                    usage_analysis['files_using_field'].append({
                                        'file': file_path,
                                        'pattern': pattern,
                                        'matches': len(matches)
                                    })
                    
                    except Exception:
                        continue  # Skip files that can't be read
        
        return usage_analysis
    
    def _analyze_field_dependencies(self, model):
        """Analyze field dependencies"""
        
        dependencies = {
            'foreign_key_references': [],
            'index_dependencies': [],
            'constraint_dependencies': [],
            'migration_dependencies': []
        }
        
        try:
            field = model._meta.get_field(self.field_name)
            
            # Check for foreign key references
            if hasattr(field, 'related_model') and field.related_model:
                dependencies['foreign_key_references'].append({
                    'related_model': field.related_model._meta.label,
                    'on_delete': getattr(field, 'on_delete', None).__name__ if hasattr(field, 'on_delete') else None
                })
            
            # Check for reverse foreign key references
            for related_object in model._meta.related_objects:
                if related_object.field.name == self.field_name:
                    dependencies['foreign_key_references'].append({
                        'referencing_model': related_object.related_model._meta.label,
                        'field_name': related_object.field.name
                    })
            
            # Check for indexes
            for index in model._meta.indexes:
                if self.field_name in index.fields:
                    dependencies['index_dependencies'].append({
                        'index_name': index.name,
                        'fields': index.fields
                    })
            
            # Check for constraints
            for constraint in model._meta.constraints:
                if hasattr(constraint, 'fields') and self.field_name in constraint.fields:
                    dependencies['constraint_dependencies'].append({
                        'constraint_name': constraint.name,
                        'type': constraint.__class__.__name__
                    })
        
        except Exception as e:
            dependencies['error'] = str(e)
        
        return dependencies
    
    def _generate_removal_recommendations(self, analysis):
        """Generate recommendations for field removal"""
        
        recommendations = []
        
        # Check data impact
        data_analysis = analysis['data_analysis']
        
        if data_analysis.get('non_null_records', 0) > 0:
            recommendations.append({
                'type': 'data_backup',
                'priority': 'high',
                'message': f"Field contains data in {data_analysis['non_null_records']} records. Create backup before removal."
            })
        
        # Check code usage
        code_usage = analysis['code_usage']
        
        if code_usage.get('files_using_field'):
            recommendations.append({
                'type': 'code_cleanup',
                'priority': 'high',
                'message': f"Field is used in {len(code_usage['files_using_field'])} files. Update code before removal."
            })
        
        # Check dependencies
        dependencies = analysis['dependencies']
        
        if dependencies.get('foreign_key_references'):
            recommendations.append({
                'type': 'dependency_cleanup',
                'priority': 'critical',
                'message': "Field has foreign key dependencies. Handle relationships before removal."
            })
        
        if dependencies.get('index_dependencies'):
            recommendations.append({
                'type': 'index_cleanup',
                'priority': 'medium',
                'message': "Field is part of database indexes. Indexes will be automatically removed."
            })
        
        # Field-specific recommendations
        field_info = analysis['field_info']
        
        if field_info.get('unique'):
            recommendations.append({
                'type': 'unique_constraint',
                'priority': 'medium',
                'message': "Field has unique constraint. Ensure no application logic depends on uniqueness."
            })
        
        if not field_info.get('null', True):
            recommendations.append({
                'type': 'not_null_constraint',
                'priority': 'high',
                'message': "Field is NOT NULL. Make nullable before removal to avoid migration issues."
            })
        
        return recommendations

Safe Field Removal Strategies

Multi-Phase Removal Process

class MultiPhaseFieldRemoval:
    """Multi-phase approach to safe field removal"""
    
    @staticmethod
    def phase1_deprecation():
        """Phase 1: Mark field as deprecated"""
        
        # No migration needed - just code changes
        class Post(models.Model):
            title = models.CharField(max_length=200)
            content = models.TextField()
            
            # Mark field as deprecated
            legacy_field = models.CharField(
                max_length=100,
                help_text="DEPRECATED: Will be removed in version 2.0. Use new_field instead.",
                # Add deprecation warning in model
            )
            
            # Add replacement field if needed
            new_field = models.CharField(max_length=100, null=True, blank=True)
            
            created_at = models.DateTimeField(auto_now_add=True)
            
            def save(self, *args, **kwargs):
                # Add deprecation warning
                if self.legacy_field:
                    import warnings
                    warnings.warn(
                        "legacy_field is deprecated and will be removed in v2.0",
                        DeprecationWarning,
                        stacklevel=2
                    )
                
                super().save(*args, **kwargs)
        
        # Update documentation and add warnings
        deprecation_notice = """
        DEPRECATION NOTICE:
        
        The 'legacy_field' in Post model is deprecated and will be removed in v2.0.
        
        Migration path:
        1. Update code to use 'new_field' instead
        2. Migrate existing data from 'legacy_field' to 'new_field'
        3. Remove references to 'legacy_field' in your code
        
        Timeline:
        - v1.5: Deprecation warning added
        - v1.8: Field will be made nullable
        - v2.0: Field will be removed
        """
        
        return deprecation_notice
    
    @staticmethod
    def phase2_make_nullable():
        """Phase 2: Make field nullable"""
        
        class Migration(migrations.Migration):
            dependencies = [
                ('blog', '0001_initial'),
            ]
            
            operations = [
                # Make field nullable to allow safe removal later
                migrations.AlterField(
                    model_name='post',
                    name='legacy_field',
                    field=models.CharField(max_length=100, null=True, blank=True),
                ),
                
                # Add new field if not already present
                migrations.AddField(
                    model_name='post',
                    name='new_field',
                    field=models.CharField(max_length=100, null=True, blank=True),
                ),
                
                # Migrate data from old field to new field
                migrations.RunPython(
                    code=migrate_legacy_to_new_field,
                    reverse_code=migrate_new_to_legacy_field,
                ),
            ]
    
    @staticmethod
    def phase3_stop_using():
        """Phase 3: Stop using field in code"""
        
        # Update model to not use deprecated field
        class Post(models.Model):
            title = models.CharField(max_length=200)
            content = models.TextField()
            
            # Field still exists in database but not used in code
            # legacy_field = models.CharField(max_length=100, null=True, blank=True)
            
            new_field = models.CharField(max_length=100, null=True, blank=True)
            created_at = models.DateTimeField(auto_now_add=True)
        
        # Create code migration checklist
        code_migration_checklist = [
            "Update all model references to use new_field",
            "Update forms to use new_field",
            "Update serializers to use new_field", 
            "Update templates to display new_field",
            "Update admin configuration",
            "Update API endpoints",
            "Update search and filtering logic",
            "Update data exports/imports",
            "Update tests to use new_field",
            "Remove legacy_field from __str__ methods",
        ]
        
        return code_migration_checklist
    
    @staticmethod
    def phase4_backup_and_remove():
        """Phase 4: Backup data and remove field"""
        
        class Migration(migrations.Migration):
            dependencies = [
                ('blog', '0002_make_nullable_and_migrate'),
            ]
            
            operations = [
                # Create backup of field data
                migrations.RunPython(
                    code=create_field_backup,
                    reverse_code=restore_field_from_backup,
                ),
                
                # Remove the field
                migrations.RemoveField(
                    model_name='post',
                    name='legacy_field',
                ),
            ]

def migrate_legacy_to_new_field(apps, schema_editor):
    """Migrate data from legacy field to new field"""
    
    Post = apps.get_model('blog', 'Post')
    
    # Migrate non-null values
    posts_to_migrate = Post.objects.filter(
        legacy_field__isnull=False,
        new_field__isnull=True
    )
    
    migrated_count = 0
    
    for post in posts_to_migrate:
        # Transform data if needed
        post.new_field = transform_legacy_data(post.legacy_field)
        post.save()
        migrated_count += 1
    
    print(f"Migrated {migrated_count} posts from legacy_field to new_field")

def migrate_new_to_legacy_field(apps, schema_editor):
    """Reverse migration: migrate data back to legacy field"""
    
    Post = apps.get_model('blog', 'Post')
    
    posts_to_migrate = Post.objects.filter(
        new_field__isnull=False,
        legacy_field__isnull=True
    )
    
    for post in posts_to_migrate:
        post.legacy_field = post.new_field
        post.save()

def transform_legacy_data(legacy_value):
    """Transform legacy data format to new format"""
    
    if not legacy_value:
        return None
    
    # Example transformation
    return legacy_value.strip().lower()

def create_field_backup(apps, schema_editor):
    """Create backup of field data before removal"""
    
    from django.db import connection
    
    with connection.cursor() as cursor:
        # Create backup table
        cursor.execute("""
            CREATE TABLE blog_post_legacy_field_backup AS
            SELECT id, legacy_field, NOW() as backup_date
            FROM blog_post
            WHERE legacy_field IS NOT NULL
        """)
        
        # Log backup statistics
        cursor.execute("SELECT COUNT(*) FROM blog_post_legacy_field_backup")
        backup_count = cursor.fetchone()[0]
        
        print(f"Created backup of {backup_count} legacy field values")

def restore_field_from_backup(apps, schema_editor):
    """Restore field data from backup (for migration reversal)"""
    
    from django.db import connection
    
    with connection.cursor() as cursor:
        # Restore data from backup
        cursor.execute("""
            UPDATE blog_post 
            SET legacy_field = backup.legacy_field
            FROM blog_post_legacy_field_backup backup
            WHERE blog_post.id = backup.id
        """)
        
        print("Restored legacy field data from backup")

Conditional Field Removal

class ConditionalFieldRemoval:
    """Handle conditional field removal based on data and usage"""
    
    @staticmethod
    def create_conditional_removal_migration():
        """Create migration that conditionally removes field"""
        
        def conditional_field_removal(apps, schema_editor):
            """Remove field only if safe to do so"""
            
            Post = apps.get_model('blog', 'Post')
            
            # Check if field has data
            posts_with_data = Post.objects.filter(
                legacy_field__isnull=False
            ).count()
            
            if posts_with_data > 0:
                print(f"Cannot remove field: {posts_with_data} posts still have data")
                print("Please migrate data before removing field")
                return
            
            # Check if field is still being used (simplified check)
            recent_updates = Post.objects.filter(
                updated_at__gte=timezone.now() - timedelta(days=30),
                legacy_field__isnull=False
            ).count()
            
            if recent_updates > 0:
                print(f"Field appears to be in use: {recent_updates} recent updates")
                print("Postponing field removal")
                return
            
            # Safe to proceed with removal
            print("Field is safe to remove - no data and no recent usage")
            
            # Actually remove the field (this would be done via schema editor)
            # In practice, you'd create a separate migration for the actual removal
            
        return conditional_field_removal
    
    @staticmethod
    def create_gradual_removal_migration():
        """Create migration that gradually removes field usage"""
        
        def gradual_field_removal(apps, schema_editor):
            """Gradually remove field data over time"""
            
            Post = apps.get_model('blog', 'Post')
            
            # Remove data in batches to avoid performance issues
            batch_size = 1000
            
            while True:
                # Find posts with legacy data
                posts_to_clean = Post.objects.filter(
                    legacy_field__isnull=False
                )[:batch_size]
                
                if not posts_to_clean:
                    break
                
                # Archive data before removal
                archived_data = []
                
                for post in posts_to_clean:
                    if post.legacy_field:
                        archived_data.append({
                            'post_id': post.id,
                            'legacy_value': post.legacy_field,
                            'archived_at': timezone.now()
                        })
                    
                    # Clear the field
                    post.legacy_field = None
                    post.save()
                
                # Store archived data (in production, use proper storage)
                if archived_data:
                    from django.core.cache import cache
                    
                    existing_archive = cache.get('legacy_field_archive', [])
                    existing_archive.extend(archived_data)
                    cache.set('legacy_field_archive', existing_archive, timeout=86400*30)
                
                print(f"Cleaned {len(posts_to_clean)} posts, archived {len(archived_data)} values")
                
                # Break if we processed less than batch_size (last batch)
                if len(posts_to_clean) < batch_size:
                    break
        
        return gradual_field_removal

# Field removal validation
class FieldRemovalValidator:
    """Validate field removal safety"""
    
    @staticmethod
    def validate_removal_safety(app_label, model_name, field_name):
        """Validate that field removal is safe"""
        
        from django.apps import apps
        
        model = apps.get_model(app_label, model_name)
        
        validation_results = {
            'safe_to_remove': True,
            'blocking_issues': [],
            'warnings': [],
            'recommendations': []
        }
        
        try:
            field = model._meta.get_field(field_name)
            
            # Check for data in field
            if hasattr(model.objects, 'filter'):
                data_count = model.objects.filter(
                    **{f"{field_name}__isnull": False}
                ).count()
                
                if data_count > 0:
                    validation_results['safe_to_remove'] = False
                    validation_results['blocking_issues'].append(
                        f"Field contains data in {data_count} records"
                    )
            
            # Check for foreign key relationships
            if hasattr(field, 'related_model'):
                validation_results['warnings'].append(
                    f"Field is a foreign key to {field.related_model._meta.label}"
                )
            
            # Check for unique constraints
            if getattr(field, 'unique', False):
                validation_results['warnings'].append(
                    "Field has unique constraint"
                )
            
            # Check for indexes
            if getattr(field, 'db_index', False):
                validation_results['warnings'].append(
                    "Field has database index"
                )
            
            # Check for NOT NULL constraint
            if not getattr(field, 'null', True):
                validation_results['safe_to_remove'] = False
                validation_results['blocking_issues'].append(
                    "Field has NOT NULL constraint - make nullable first"
                )
        
        except Exception as e:
            validation_results['safe_to_remove'] = False
            validation_results['blocking_issues'].append(f"Error analyzing field: {e}")
        
        # Generate recommendations
        if validation_results['blocking_issues']:
            validation_results['recommendations'].extend([
                "Backup field data before removal",
                "Make field nullable if it has NOT NULL constraint",
                "Migrate data to alternative storage if needed",
                "Update all code references before removal"
            ])
        
        return validation_results
    
    @staticmethod
    def create_removal_checklist(app_label, model_name, field_name):
        """Create checklist for field removal"""
        
        checklist = {
            'pre_removal': [
                f"Analyze impact of removing {field_name} from {model_name}",
                "Search codebase for all references to the field",
                "Check if field is used in forms, serializers, or templates",
                "Verify field is not used in database queries",
                "Check if field is referenced in documentation",
                "Backup existing data in the field",
                "Create data migration if field data needs preservation",
                "Make field nullable if it has NOT NULL constraint",
                "Remove field usage from application code",
                "Update tests to not reference the field",
                "Update API documentation if field is exposed"
            ],
            'removal': [
                "Create migration to remove field",
                "Test migration on copy of production data",
                "Verify migration can be reversed if needed",
                "Apply migration in staging environment",
                "Verify application functionality after removal",
                "Check for any remaining references or errors"
            ],
            'post_removal': [
                "Monitor application for errors related to missing field",
                "Verify database performance is not affected",
                "Update database documentation",
                "Clean up any backup tables if no longer needed",
                "Update model documentation",
                "Notify team of successful field removal"
            ]
        }
        
        return checklist

Production Field Removal

Production-Safe Removal Process

class ProductionFieldRemoval:
    """Handle field removal in production environments"""
    
    @staticmethod
    def create_production_removal_plan():
        """Create comprehensive plan for production field removal"""
        
        plan = {
            'timeline': {
                'week_1': 'Add deprecation warnings and documentation',
                'week_2': 'Make field nullable and create backup migration',
                'week_3': 'Remove field usage from code',
                'week_4': 'Deploy code changes without field usage',
                'week_5': 'Monitor for any issues',
                'week_6': 'Remove field from model and database'
            },
            'risk_mitigation': [
                'Create full database backup before each step',
                'Test all changes in staging environment first',
                'Have rollback plan ready for each step',
                'Monitor application metrics during deployment',
                'Keep field backup for at least 30 days after removal'
            ],
            'validation_steps': [
                'Verify no application errors after each deployment',
                'Check that all features work without the field',
                'Validate database performance is not affected',
                'Confirm backup and restore procedures work',
                'Test rollback procedures in staging'
            ]
        }
        
        return plan
    
    @staticmethod
    def create_monitoring_migration():
        """Create migration with monitoring for production"""
        
        def monitored_field_removal(apps, schema_editor):
            """Remove field with comprehensive monitoring"""
            
            import time
            import logging
            
            logger = logging.getLogger(__name__)
            
            Post = apps.get_model('blog', 'Post')
            
            # Pre-removal validation
            logger.info("Starting field removal validation")
            
            total_posts = Post.objects.count()
            posts_with_data = Post.objects.filter(
                legacy_field__isnull=False
            ).count()
            
            logger.info(f"Total posts: {total_posts}")
            logger.info(f"Posts with legacy data: {posts_with_data}")
            
            if posts_with_data > 0:
                logger.error(f"Cannot proceed: {posts_with_data} posts still have data")
                raise ValueError("Field still contains data")
            
            # Monitor system resources
            import psutil
            
            initial_memory = psutil.virtual_memory().percent
            initial_cpu = psutil.cpu_percent()
            
            logger.info(f"Initial system state - Memory: {initial_memory}%, CPU: {initial_cpu}%")
            
            # Perform removal (in this case, just clearing any remaining data)
            start_time = time.time()
            
            # Clear any remaining null values to prepare for field removal
            Post.objects.filter(legacy_field__isnull=True).update(legacy_field=None)
            
            end_time = time.time()
            
            # Post-removal monitoring
            final_memory = psutil.virtual_memory().percent
            final_cpu = psutil.cpu_percent()
            
            logger.info(f"Field removal completed in {end_time - start_time:.2f} seconds")
            logger.info(f"Final system state - Memory: {final_memory}%, CPU: {final_cpu}%")
            
            # Validate removal
            remaining_data = Post.objects.filter(
                legacy_field__isnull=False
            ).count()
            
            if remaining_data > 0:
                logger.warning(f"Warning: {remaining_data} posts still have legacy data")
            else:
                logger.info("Field removal validation successful")
        
        return monitored_field_removal
    
    @staticmethod
    def create_rollback_plan():
        """Create comprehensive rollback plan"""
        
        rollback_plan = {
            'immediate_rollback': {
                'description': 'Rollback within same deployment window',
                'steps': [
                    'Stop application servers',
                    'Rollback database migration',
                    'Deploy previous code version',
                    'Restart application servers',
                    'Verify functionality'
                ],
                'time_estimate': '15-30 minutes'
            },
            'delayed_rollback': {
                'description': 'Rollback after field has been removed',
                'steps': [
                    'Create new migration to re-add field',
                    'Restore data from backup tables',
                    'Deploy code with field usage restored',
                    'Verify data integrity',
                    'Test all functionality'
                ],
                'time_estimate': '1-2 hours'
            },
            'emergency_rollback': {
                'description': 'Emergency rollback from backup',
                'steps': [
                    'Stop all application servers',
                    'Restore database from backup',
                    'Deploy known good code version',
                    'Restart application servers',
                    'Verify system functionality'
                ],
                'time_estimate': '30 minutes - 2 hours (depending on backup size)'
            }
        }
        
        return rollback_plan

Removing fields safely requires careful planning, gradual implementation, and comprehensive monitoring. Following these practices ensures data integrity while maintaining application stability throughout the removal process.