You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

1602 lines
36 KiB

# BACKBONE Weights Documentation
## conv1.weight
- Shape: torch.Size([64, 3, 7, 7])
- Type: torch.float32
- File: conv1_weight.pt
## bn1.weight
- Shape: torch.Size([64])
- Type: torch.float32
- File: bn1_weight.pt
## bn1.bias
- Shape: torch.Size([64])
- Type: torch.float32
- File: bn1_bias.pt
## bn1.running_mean
- Shape: torch.Size([64])
- Type: torch.float32
- File: bn1_running_mean.pt
## bn1.running_var
- Shape: torch.Size([64])
- Type: torch.float32
- File: bn1_running_var.pt
## bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: bn1_num_batches_tracked.pt
## layer1.0.conv1.weight
- Shape: torch.Size([64, 64, 1, 1])
- Type: torch.float32
- File: layer1_0_conv1_weight.pt
## layer1.0.bn1.weight
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn1_weight.pt
## layer1.0.bn1.bias
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn1_bias.pt
## layer1.0.bn1.running_mean
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn1_running_mean.pt
## layer1.0.bn1.running_var
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn1_running_var.pt
## layer1.0.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_0_bn1_num_batches_tracked.pt
## layer1.0.conv2.weight
- Shape: torch.Size([64, 64, 3, 3])
- Type: torch.float32
- File: layer1_0_conv2_weight.pt
## layer1.0.bn2.weight
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn2_weight.pt
## layer1.0.bn2.bias
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn2_bias.pt
## layer1.0.bn2.running_mean
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn2_running_mean.pt
## layer1.0.bn2.running_var
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_0_bn2_running_var.pt
## layer1.0.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_0_bn2_num_batches_tracked.pt
## layer1.0.conv3.weight
- Shape: torch.Size([256, 64, 1, 1])
- Type: torch.float32
- File: layer1_0_conv3_weight.pt
## layer1.0.bn3.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_bn3_weight.pt
## layer1.0.bn3.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_bn3_bias.pt
## layer1.0.bn3.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_bn3_running_mean.pt
## layer1.0.bn3.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_bn3_running_var.pt
## layer1.0.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_0_bn3_num_batches_tracked.pt
## layer1.0.downsample.0.weight
- Shape: torch.Size([256, 64, 1, 1])
- Type: torch.float32
- File: layer1_0_downsample_0_weight.pt
## layer1.0.downsample.1.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_downsample_1_weight.pt
## layer1.0.downsample.1.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_downsample_1_bias.pt
## layer1.0.downsample.1.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_downsample_1_running_mean.pt
## layer1.0.downsample.1.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_0_downsample_1_running_var.pt
## layer1.0.downsample.1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_0_downsample_1_num_batches_tracked.pt
## layer1.1.conv1.weight
- Shape: torch.Size([64, 256, 1, 1])
- Type: torch.float32
- File: layer1_1_conv1_weight.pt
## layer1.1.bn1.weight
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn1_weight.pt
## layer1.1.bn1.bias
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn1_bias.pt
## layer1.1.bn1.running_mean
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn1_running_mean.pt
## layer1.1.bn1.running_var
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn1_running_var.pt
## layer1.1.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_1_bn1_num_batches_tracked.pt
## layer1.1.conv2.weight
- Shape: torch.Size([64, 64, 3, 3])
- Type: torch.float32
- File: layer1_1_conv2_weight.pt
## layer1.1.bn2.weight
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn2_weight.pt
## layer1.1.bn2.bias
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn2_bias.pt
## layer1.1.bn2.running_mean
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn2_running_mean.pt
## layer1.1.bn2.running_var
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_1_bn2_running_var.pt
## layer1.1.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_1_bn2_num_batches_tracked.pt
## layer1.1.conv3.weight
- Shape: torch.Size([256, 64, 1, 1])
- Type: torch.float32
- File: layer1_1_conv3_weight.pt
## layer1.1.bn3.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_1_bn3_weight.pt
## layer1.1.bn3.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_1_bn3_bias.pt
## layer1.1.bn3.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_1_bn3_running_mean.pt
## layer1.1.bn3.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_1_bn3_running_var.pt
## layer1.1.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_1_bn3_num_batches_tracked.pt
## layer1.2.conv1.weight
- Shape: torch.Size([64, 256, 1, 1])
- Type: torch.float32
- File: layer1_2_conv1_weight.pt
## layer1.2.bn1.weight
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn1_weight.pt
## layer1.2.bn1.bias
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn1_bias.pt
## layer1.2.bn1.running_mean
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn1_running_mean.pt
## layer1.2.bn1.running_var
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn1_running_var.pt
## layer1.2.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_2_bn1_num_batches_tracked.pt
## layer1.2.conv2.weight
- Shape: torch.Size([64, 64, 3, 3])
- Type: torch.float32
- File: layer1_2_conv2_weight.pt
## layer1.2.bn2.weight
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn2_weight.pt
## layer1.2.bn2.bias
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn2_bias.pt
## layer1.2.bn2.running_mean
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn2_running_mean.pt
## layer1.2.bn2.running_var
- Shape: torch.Size([64])
- Type: torch.float32
- File: layer1_2_bn2_running_var.pt
## layer1.2.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_2_bn2_num_batches_tracked.pt
## layer1.2.conv3.weight
- Shape: torch.Size([256, 64, 1, 1])
- Type: torch.float32
- File: layer1_2_conv3_weight.pt
## layer1.2.bn3.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_2_bn3_weight.pt
## layer1.2.bn3.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_2_bn3_bias.pt
## layer1.2.bn3.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_2_bn3_running_mean.pt
## layer1.2.bn3.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer1_2_bn3_running_var.pt
## layer1.2.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer1_2_bn3_num_batches_tracked.pt
## layer2.0.conv1.weight
- Shape: torch.Size([128, 256, 1, 1])
- Type: torch.float32
- File: layer2_0_conv1_weight.pt
## layer2.0.bn1.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn1_weight.pt
## layer2.0.bn1.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn1_bias.pt
## layer2.0.bn1.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn1_running_mean.pt
## layer2.0.bn1.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn1_running_var.pt
## layer2.0.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_0_bn1_num_batches_tracked.pt
## layer2.0.conv2.weight
- Shape: torch.Size([128, 128, 3, 3])
- Type: torch.float32
- File: layer2_0_conv2_weight.pt
## layer2.0.bn2.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn2_weight.pt
## layer2.0.bn2.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn2_bias.pt
## layer2.0.bn2.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn2_running_mean.pt
## layer2.0.bn2.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_0_bn2_running_var.pt
## layer2.0.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_0_bn2_num_batches_tracked.pt
## layer2.0.conv3.weight
- Shape: torch.Size([512, 128, 1, 1])
- Type: torch.float32
- File: layer2_0_conv3_weight.pt
## layer2.0.bn3.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_bn3_weight.pt
## layer2.0.bn3.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_bn3_bias.pt
## layer2.0.bn3.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_bn3_running_mean.pt
## layer2.0.bn3.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_bn3_running_var.pt
## layer2.0.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_0_bn3_num_batches_tracked.pt
## layer2.0.downsample.0.weight
- Shape: torch.Size([512, 256, 1, 1])
- Type: torch.float32
- File: layer2_0_downsample_0_weight.pt
## layer2.0.downsample.1.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_downsample_1_weight.pt
## layer2.0.downsample.1.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_downsample_1_bias.pt
## layer2.0.downsample.1.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_downsample_1_running_mean.pt
## layer2.0.downsample.1.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_0_downsample_1_running_var.pt
## layer2.0.downsample.1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_0_downsample_1_num_batches_tracked.pt
## layer2.1.conv1.weight
- Shape: torch.Size([128, 512, 1, 1])
- Type: torch.float32
- File: layer2_1_conv1_weight.pt
## layer2.1.bn1.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn1_weight.pt
## layer2.1.bn1.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn1_bias.pt
## layer2.1.bn1.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn1_running_mean.pt
## layer2.1.bn1.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn1_running_var.pt
## layer2.1.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_1_bn1_num_batches_tracked.pt
## layer2.1.conv2.weight
- Shape: torch.Size([128, 128, 3, 3])
- Type: torch.float32
- File: layer2_1_conv2_weight.pt
## layer2.1.bn2.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn2_weight.pt
## layer2.1.bn2.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn2_bias.pt
## layer2.1.bn2.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn2_running_mean.pt
## layer2.1.bn2.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_1_bn2_running_var.pt
## layer2.1.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_1_bn2_num_batches_tracked.pt
## layer2.1.conv3.weight
- Shape: torch.Size([512, 128, 1, 1])
- Type: torch.float32
- File: layer2_1_conv3_weight.pt
## layer2.1.bn3.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_1_bn3_weight.pt
## layer2.1.bn3.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_1_bn3_bias.pt
## layer2.1.bn3.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_1_bn3_running_mean.pt
## layer2.1.bn3.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_1_bn3_running_var.pt
## layer2.1.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_1_bn3_num_batches_tracked.pt
## layer2.2.conv1.weight
- Shape: torch.Size([128, 512, 1, 1])
- Type: torch.float32
- File: layer2_2_conv1_weight.pt
## layer2.2.bn1.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn1_weight.pt
## layer2.2.bn1.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn1_bias.pt
## layer2.2.bn1.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn1_running_mean.pt
## layer2.2.bn1.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn1_running_var.pt
## layer2.2.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_2_bn1_num_batches_tracked.pt
## layer2.2.conv2.weight
- Shape: torch.Size([128, 128, 3, 3])
- Type: torch.float32
- File: layer2_2_conv2_weight.pt
## layer2.2.bn2.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn2_weight.pt
## layer2.2.bn2.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn2_bias.pt
## layer2.2.bn2.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn2_running_mean.pt
## layer2.2.bn2.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_2_bn2_running_var.pt
## layer2.2.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_2_bn2_num_batches_tracked.pt
## layer2.2.conv3.weight
- Shape: torch.Size([512, 128, 1, 1])
- Type: torch.float32
- File: layer2_2_conv3_weight.pt
## layer2.2.bn3.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_2_bn3_weight.pt
## layer2.2.bn3.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_2_bn3_bias.pt
## layer2.2.bn3.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_2_bn3_running_mean.pt
## layer2.2.bn3.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_2_bn3_running_var.pt
## layer2.2.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_2_bn3_num_batches_tracked.pt
## layer2.3.conv1.weight
- Shape: torch.Size([128, 512, 1, 1])
- Type: torch.float32
- File: layer2_3_conv1_weight.pt
## layer2.3.bn1.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn1_weight.pt
## layer2.3.bn1.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn1_bias.pt
## layer2.3.bn1.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn1_running_mean.pt
## layer2.3.bn1.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn1_running_var.pt
## layer2.3.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_3_bn1_num_batches_tracked.pt
## layer2.3.conv2.weight
- Shape: torch.Size([128, 128, 3, 3])
- Type: torch.float32
- File: layer2_3_conv2_weight.pt
## layer2.3.bn2.weight
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn2_weight.pt
## layer2.3.bn2.bias
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn2_bias.pt
## layer2.3.bn2.running_mean
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn2_running_mean.pt
## layer2.3.bn2.running_var
- Shape: torch.Size([128])
- Type: torch.float32
- File: layer2_3_bn2_running_var.pt
## layer2.3.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_3_bn2_num_batches_tracked.pt
## layer2.3.conv3.weight
- Shape: torch.Size([512, 128, 1, 1])
- Type: torch.float32
- File: layer2_3_conv3_weight.pt
## layer2.3.bn3.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_3_bn3_weight.pt
## layer2.3.bn3.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_3_bn3_bias.pt
## layer2.3.bn3.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_3_bn3_running_mean.pt
## layer2.3.bn3.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer2_3_bn3_running_var.pt
## layer2.3.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer2_3_bn3_num_batches_tracked.pt
## layer3.0.conv1.weight
- Shape: torch.Size([256, 512, 1, 1])
- Type: torch.float32
- File: layer3_0_conv1_weight.pt
## layer3.0.bn1.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn1_weight.pt
## layer3.0.bn1.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn1_bias.pt
## layer3.0.bn1.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn1_running_mean.pt
## layer3.0.bn1.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn1_running_var.pt
## layer3.0.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_0_bn1_num_batches_tracked.pt
## layer3.0.conv2.weight
- Shape: torch.Size([256, 256, 3, 3])
- Type: torch.float32
- File: layer3_0_conv2_weight.pt
## layer3.0.bn2.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn2_weight.pt
## layer3.0.bn2.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn2_bias.pt
## layer3.0.bn2.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn2_running_mean.pt
## layer3.0.bn2.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_0_bn2_running_var.pt
## layer3.0.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_0_bn2_num_batches_tracked.pt
## layer3.0.conv3.weight
- Shape: torch.Size([1024, 256, 1, 1])
- Type: torch.float32
- File: layer3_0_conv3_weight.pt
## layer3.0.bn3.weight
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_bn3_weight.pt
## layer3.0.bn3.bias
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_bn3_bias.pt
## layer3.0.bn3.running_mean
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_bn3_running_mean.pt
## layer3.0.bn3.running_var
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_bn3_running_var.pt
## layer3.0.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_0_bn3_num_batches_tracked.pt
## layer3.0.downsample.0.weight
- Shape: torch.Size([1024, 512, 1, 1])
- Type: torch.float32
- File: layer3_0_downsample_0_weight.pt
## layer3.0.downsample.1.weight
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_downsample_1_weight.pt
## layer3.0.downsample.1.bias
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_downsample_1_bias.pt
## layer3.0.downsample.1.running_mean
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_downsample_1_running_mean.pt
## layer3.0.downsample.1.running_var
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_0_downsample_1_running_var.pt
## layer3.0.downsample.1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_0_downsample_1_num_batches_tracked.pt
## layer3.1.conv1.weight
- Shape: torch.Size([256, 1024, 1, 1])
- Type: torch.float32
- File: layer3_1_conv1_weight.pt
## layer3.1.bn1.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn1_weight.pt
## layer3.1.bn1.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn1_bias.pt
## layer3.1.bn1.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn1_running_mean.pt
## layer3.1.bn1.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn1_running_var.pt
## layer3.1.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_1_bn1_num_batches_tracked.pt
## layer3.1.conv2.weight
- Shape: torch.Size([256, 256, 3, 3])
- Type: torch.float32
- File: layer3_1_conv2_weight.pt
## layer3.1.bn2.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn2_weight.pt
## layer3.1.bn2.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn2_bias.pt
## layer3.1.bn2.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn2_running_mean.pt
## layer3.1.bn2.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_1_bn2_running_var.pt
## layer3.1.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_1_bn2_num_batches_tracked.pt
## layer3.1.conv3.weight
- Shape: torch.Size([1024, 256, 1, 1])
- Type: torch.float32
- File: layer3_1_conv3_weight.pt
## layer3.1.bn3.weight
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_1_bn3_weight.pt
## layer3.1.bn3.bias
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_1_bn3_bias.pt
## layer3.1.bn3.running_mean
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_1_bn3_running_mean.pt
## layer3.1.bn3.running_var
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_1_bn3_running_var.pt
## layer3.1.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_1_bn3_num_batches_tracked.pt
## layer3.2.conv1.weight
- Shape: torch.Size([256, 1024, 1, 1])
- Type: torch.float32
- File: layer3_2_conv1_weight.pt
## layer3.2.bn1.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn1_weight.pt
## layer3.2.bn1.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn1_bias.pt
## layer3.2.bn1.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn1_running_mean.pt
## layer3.2.bn1.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn1_running_var.pt
## layer3.2.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_2_bn1_num_batches_tracked.pt
## layer3.2.conv2.weight
- Shape: torch.Size([256, 256, 3, 3])
- Type: torch.float32
- File: layer3_2_conv2_weight.pt
## layer3.2.bn2.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn2_weight.pt
## layer3.2.bn2.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn2_bias.pt
## layer3.2.bn2.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn2_running_mean.pt
## layer3.2.bn2.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_2_bn2_running_var.pt
## layer3.2.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_2_bn2_num_batches_tracked.pt
## layer3.2.conv3.weight
- Shape: torch.Size([1024, 256, 1, 1])
- Type: torch.float32
- File: layer3_2_conv3_weight.pt
## layer3.2.bn3.weight
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_2_bn3_weight.pt
## layer3.2.bn3.bias
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_2_bn3_bias.pt
## layer3.2.bn3.running_mean
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_2_bn3_running_mean.pt
## layer3.2.bn3.running_var
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_2_bn3_running_var.pt
## layer3.2.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_2_bn3_num_batches_tracked.pt
## layer3.3.conv1.weight
- Shape: torch.Size([256, 1024, 1, 1])
- Type: torch.float32
- File: layer3_3_conv1_weight.pt
## layer3.3.bn1.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn1_weight.pt
## layer3.3.bn1.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn1_bias.pt
## layer3.3.bn1.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn1_running_mean.pt
## layer3.3.bn1.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn1_running_var.pt
## layer3.3.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_3_bn1_num_batches_tracked.pt
## layer3.3.conv2.weight
- Shape: torch.Size([256, 256, 3, 3])
- Type: torch.float32
- File: layer3_3_conv2_weight.pt
## layer3.3.bn2.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn2_weight.pt
## layer3.3.bn2.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn2_bias.pt
## layer3.3.bn2.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn2_running_mean.pt
## layer3.3.bn2.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_3_bn2_running_var.pt
## layer3.3.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_3_bn2_num_batches_tracked.pt
## layer3.3.conv3.weight
- Shape: torch.Size([1024, 256, 1, 1])
- Type: torch.float32
- File: layer3_3_conv3_weight.pt
## layer3.3.bn3.weight
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_3_bn3_weight.pt
## layer3.3.bn3.bias
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_3_bn3_bias.pt
## layer3.3.bn3.running_mean
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_3_bn3_running_mean.pt
## layer3.3.bn3.running_var
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_3_bn3_running_var.pt
## layer3.3.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_3_bn3_num_batches_tracked.pt
## layer3.4.conv1.weight
- Shape: torch.Size([256, 1024, 1, 1])
- Type: torch.float32
- File: layer3_4_conv1_weight.pt
## layer3.4.bn1.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn1_weight.pt
## layer3.4.bn1.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn1_bias.pt
## layer3.4.bn1.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn1_running_mean.pt
## layer3.4.bn1.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn1_running_var.pt
## layer3.4.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_4_bn1_num_batches_tracked.pt
## layer3.4.conv2.weight
- Shape: torch.Size([256, 256, 3, 3])
- Type: torch.float32
- File: layer3_4_conv2_weight.pt
## layer3.4.bn2.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn2_weight.pt
## layer3.4.bn2.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn2_bias.pt
## layer3.4.bn2.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn2_running_mean.pt
## layer3.4.bn2.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_4_bn2_running_var.pt
## layer3.4.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_4_bn2_num_batches_tracked.pt
## layer3.4.conv3.weight
- Shape: torch.Size([1024, 256, 1, 1])
- Type: torch.float32
- File: layer3_4_conv3_weight.pt
## layer3.4.bn3.weight
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_4_bn3_weight.pt
## layer3.4.bn3.bias
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_4_bn3_bias.pt
## layer3.4.bn3.running_mean
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_4_bn3_running_mean.pt
## layer3.4.bn3.running_var
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_4_bn3_running_var.pt
## layer3.4.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_4_bn3_num_batches_tracked.pt
## layer3.5.conv1.weight
- Shape: torch.Size([256, 1024, 1, 1])
- Type: torch.float32
- File: layer3_5_conv1_weight.pt
## layer3.5.bn1.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn1_weight.pt
## layer3.5.bn1.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn1_bias.pt
## layer3.5.bn1.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn1_running_mean.pt
## layer3.5.bn1.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn1_running_var.pt
## layer3.5.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_5_bn1_num_batches_tracked.pt
## layer3.5.conv2.weight
- Shape: torch.Size([256, 256, 3, 3])
- Type: torch.float32
- File: layer3_5_conv2_weight.pt
## layer3.5.bn2.weight
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn2_weight.pt
## layer3.5.bn2.bias
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn2_bias.pt
## layer3.5.bn2.running_mean
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn2_running_mean.pt
## layer3.5.bn2.running_var
- Shape: torch.Size([256])
- Type: torch.float32
- File: layer3_5_bn2_running_var.pt
## layer3.5.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_5_bn2_num_batches_tracked.pt
## layer3.5.conv3.weight
- Shape: torch.Size([1024, 256, 1, 1])
- Type: torch.float32
- File: layer3_5_conv3_weight.pt
## layer3.5.bn3.weight
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_5_bn3_weight.pt
## layer3.5.bn3.bias
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_5_bn3_bias.pt
## layer3.5.bn3.running_mean
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_5_bn3_running_mean.pt
## layer3.5.bn3.running_var
- Shape: torch.Size([1024])
- Type: torch.float32
- File: layer3_5_bn3_running_var.pt
## layer3.5.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer3_5_bn3_num_batches_tracked.pt
## layer4.0.conv1.weight
- Shape: torch.Size([512, 1024, 1, 1])
- Type: torch.float32
- File: layer4_0_conv1_weight.pt
## layer4.0.bn1.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn1_weight.pt
## layer4.0.bn1.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn1_bias.pt
## layer4.0.bn1.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn1_running_mean.pt
## layer4.0.bn1.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn1_running_var.pt
## layer4.0.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_0_bn1_num_batches_tracked.pt
## layer4.0.conv2.weight
- Shape: torch.Size([512, 512, 3, 3])
- Type: torch.float32
- File: layer4_0_conv2_weight.pt
## layer4.0.bn2.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn2_weight.pt
## layer4.0.bn2.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn2_bias.pt
## layer4.0.bn2.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn2_running_mean.pt
## layer4.0.bn2.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_0_bn2_running_var.pt
## layer4.0.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_0_bn2_num_batches_tracked.pt
## layer4.0.conv3.weight
- Shape: torch.Size([2048, 512, 1, 1])
- Type: torch.float32
- File: layer4_0_conv3_weight.pt
## layer4.0.bn3.weight
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_bn3_weight.pt
## layer4.0.bn3.bias
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_bn3_bias.pt
## layer4.0.bn3.running_mean
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_bn3_running_mean.pt
## layer4.0.bn3.running_var
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_bn3_running_var.pt
## layer4.0.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_0_bn3_num_batches_tracked.pt
## layer4.0.downsample.0.weight
- Shape: torch.Size([2048, 1024, 1, 1])
- Type: torch.float32
- File: layer4_0_downsample_0_weight.pt
## layer4.0.downsample.1.weight
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_downsample_1_weight.pt
## layer4.0.downsample.1.bias
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_downsample_1_bias.pt
## layer4.0.downsample.1.running_mean
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_downsample_1_running_mean.pt
## layer4.0.downsample.1.running_var
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_0_downsample_1_running_var.pt
## layer4.0.downsample.1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_0_downsample_1_num_batches_tracked.pt
## layer4.1.conv1.weight
- Shape: torch.Size([512, 2048, 1, 1])
- Type: torch.float32
- File: layer4_1_conv1_weight.pt
## layer4.1.bn1.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn1_weight.pt
## layer4.1.bn1.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn1_bias.pt
## layer4.1.bn1.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn1_running_mean.pt
## layer4.1.bn1.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn1_running_var.pt
## layer4.1.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_1_bn1_num_batches_tracked.pt
## layer4.1.conv2.weight
- Shape: torch.Size([512, 512, 3, 3])
- Type: torch.float32
- File: layer4_1_conv2_weight.pt
## layer4.1.bn2.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn2_weight.pt
## layer4.1.bn2.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn2_bias.pt
## layer4.1.bn2.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn2_running_mean.pt
## layer4.1.bn2.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_1_bn2_running_var.pt
## layer4.1.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_1_bn2_num_batches_tracked.pt
## layer4.1.conv3.weight
- Shape: torch.Size([2048, 512, 1, 1])
- Type: torch.float32
- File: layer4_1_conv3_weight.pt
## layer4.1.bn3.weight
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_1_bn3_weight.pt
## layer4.1.bn3.bias
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_1_bn3_bias.pt
## layer4.1.bn3.running_mean
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_1_bn3_running_mean.pt
## layer4.1.bn3.running_var
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_1_bn3_running_var.pt
## layer4.1.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_1_bn3_num_batches_tracked.pt
## layer4.2.conv1.weight
- Shape: torch.Size([512, 2048, 1, 1])
- Type: torch.float32
- File: layer4_2_conv1_weight.pt
## layer4.2.bn1.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn1_weight.pt
## layer4.2.bn1.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn1_bias.pt
## layer4.2.bn1.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn1_running_mean.pt
## layer4.2.bn1.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn1_running_var.pt
## layer4.2.bn1.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_2_bn1_num_batches_tracked.pt
## layer4.2.conv2.weight
- Shape: torch.Size([512, 512, 3, 3])
- Type: torch.float32
- File: layer4_2_conv2_weight.pt
## layer4.2.bn2.weight
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn2_weight.pt
## layer4.2.bn2.bias
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn2_bias.pt
## layer4.2.bn2.running_mean
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn2_running_mean.pt
## layer4.2.bn2.running_var
- Shape: torch.Size([512])
- Type: torch.float32
- File: layer4_2_bn2_running_var.pt
## layer4.2.bn2.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_2_bn2_num_batches_tracked.pt
## layer4.2.conv3.weight
- Shape: torch.Size([2048, 512, 1, 1])
- Type: torch.float32
- File: layer4_2_conv3_weight.pt
## layer4.2.bn3.weight
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_2_bn3_weight.pt
## layer4.2.bn3.bias
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_2_bn3_bias.pt
## layer4.2.bn3.running_mean
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_2_bn3_running_mean.pt
## layer4.2.bn3.running_var
- Shape: torch.Size([2048])
- Type: torch.float32
- File: layer4_2_bn3_running_var.pt
## layer4.2.bn3.num_batches_tracked
- Shape: torch.Size([])
- Type: torch.int64
- File: layer4_2_bn3_num_batches_tracked.pt
## fc.weight
- Shape: torch.Size([1000, 2048])
- Type: torch.float32
- File: fc_weight.pt
## fc.bias
- Shape: torch.Size([1000])
- Type: torch.float32
- File: fc_bias.pt