detect/yolo11-p2.yaml #60
Replies: 1 comment
-
| 
         Transferring pretrained weights from Ultralytics models import torch
import torch.nn as nn
from collections import OrderedDict
def transfer_weights_sequential(model1_state_dict, model2, strict_matching=True):
    """
    Transfer weights from model1 to model2 where model2 has additional layers inserted.
    
    Args:
        model1_state_dict: State dict from the source model (Model 1)
        model2: Target model (Model 2) with inserted layers
        strict_matching: If True, requires exact parameter shape matching
    
    Returns:
        model2 with transferred weights
    """
    # Get model2's state dict
    model2_state_dict = model2.state_dict()
    
    # Extract layer types and shapes for mapping
    def get_layer_info(state_dict):
        layer_info = []
        for key, tensor in state_dict.items():
            # Extract layer number and parameter type (weight/bias)
            parts = key.split('.')
            if len(parts) >= 2:
                layer_num = int(parts[0])
                param_type = parts[1]  # 'weight' or 'bias'
                layer_info.append((layer_num, param_type, tensor.shape, key))
        return sorted(layer_info)
    
    model1_info = get_layer_info(model1_state_dict)
    model2_info = get_layer_info(model2_state_dict)
    
    # Create mapping between compatible layers
    mapping = {}
    model1_idx = 0
    
    for model2_idx, (m2_layer, m2_param, m2_shape, m2_key) in enumerate(model2_info):
        if model1_idx < len(model1_info):
            m1_layer, m1_param, m1_shape, m1_key = model1_info[model1_idx]
            
            # Check if parameters match (same type and shape)
            if m1_param == m2_param and m1_shape == m2_shape:
                mapping[m2_key] = m1_key
                model1_idx += 1
            # If shapes don't match but we're not in strict mode, skip this model1 layer
            elif not strict_matching and m1_param == m2_param:
                print(f"Skipping incompatible layer: {m1_key} {m1_shape} -> {m2_key} {m2_shape}")
                model1_idx += 1
    
    # Apply the mapping
    new_state_dict = model2_state_dict.copy()
    transferred_count = 0
    
    for m2_key, m1_key in mapping.items():
        new_state_dict[m2_key] = model1_state_dict[m1_key]
        transferred_count += 1
    
    # Load the new state dict
    model2.load_state_dict(new_state_dict)
    
    print(f"Successfully transferred {transferred_count} parameters")
    print(f"Model 1 had {len(model1_info)} parameters")
    print(f"Model 2 has {len(model2_info)} parameters")
    
    return model2
from ultralytics import YOLO
model = YOLO("community/cfg/detect/yolo11n-p2.yaml")
pretrained = YOLO("yolo11n.pt")
model.model.model = transfer_weights_sequential(pretrained.model.model.state_dict(), model.model.model)
model.save("yolo11n-p2.pt")  # use this checkpoint for training  | 
  
Beta Was this translation helpful? Give feedback.
                  
                    0 replies
                  
                
            
  
    Sign up for free
    to join this conversation on GitHub.
    Already have an account?
    Sign in to comment
  
        
    
Uh oh!
There was an error while loading. Please reload this page.
-
Discussion for
detect/yolo11-p2.yaml📊 Metadata
👤 author: @Y-T-G
🎯 task: detect
🔑 keywords:
cnn,yolo11,small-object📝 description: YOLO11 object detection model with an extra P2 scale for small object detecton.
⚡ flops:
-
n: 11.4-
s: 29.7-
m: 88.9-
l: 113.6-
x: 245.1🔢 parameters:
-
n: 2,740,032-
s: 9,625,968-
m: 20,591,120-
l: 26,116,368-
x: 57,849,456💻 min_version: 8.3.0
If you found this config helpful, consider upvoting this discussion! 🔼
Beta Was this translation helpful? Give feedback.
All reactions