8层transformer
self.transformer = TransformerModel(
d_model=32,
nhead=4,
num_encoder_layers=8,
dim_feedforward=128,
max_len=512
)
self.transformer2 = TransformerModel(
d_model=32,
nhead=4,
num_encoder_layers=8,
dim_feedforward=128,
max_len=512
)
self.BigConv1 = unireplknet.UniRepLKNet(
in_chans=2,
num_classes=128,
depths=(3, 3, 27, 3),
dims=(48, 48 * 2, 48 * 4, 48 * 4),
drop_path_rate=0.15,
layer_scale_init_value=1e-6,
head_init_scale=1.,
kernel_sizes=None,
deploy=False,
with_cp=False,
init_cfg=None,
attempt_use_lk_impl=False,
use_sync_bn=False,
)
标签:False,48,记录,self,炼丹,init,num,128
From: https://www.cnblogs.com/FrostyForest/p/17932382.html