File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -522,7 +522,7 @@ class SwinTransformer(nn.Module):
522
522
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.0.
523
523
dropout (float): Dropout rate. Default: 0.0.
524
524
attention_dropout (float): Attention dropout rate. Default: 0.0.
525
- stochastic_depth_prob (float): Stochastic depth rate. Default: 0.0 .
525
+ stochastic_depth_prob (float): Stochastic depth rate. Default: 0.1 .
526
526
num_classes (int): Number of classes for classification head. Default: 1000.
527
527
block (nn.Module, optional): SwinTransformer Block. Default: None.
528
528
norm_layer (nn.Module, optional): Normalization layer. Default: None.
@@ -539,7 +539,7 @@ def __init__(
539
539
mlp_ratio : float = 4.0 ,
540
540
dropout : float = 0.0 ,
541
541
attention_dropout : float = 0.0 ,
542
- stochastic_depth_prob : float = 0.0 ,
542
+ stochastic_depth_prob : float = 0.1 ,
543
543
num_classes : int = 1000 ,
544
544
block : Callable [..., nn .Module ] = SwinTransformerBlock ,
545
545
norm_layer : Optional [Callable [..., nn .Module ]] = None ,
You can’t perform that action at this time.
0 commit comments