差别

这里会显示出您选择的修订版和当前版本之间的差别。

到此差别页面的链接

两侧同时换到之前的修订记录前一修订版
人工智能:材料科学:chemeleon2:chemeleon2-模型加载基本情况 [2026/02/02 03:49] ctbots人工智能:材料科学:chemeleon2:chemeleon2-模型加载基本情况 [2026/02/02 03:50] (当前版本) ctbots
行 85: 行 85:
 </code> </code>
  
 +===== LDM结构模型打印 =====
 +<code>
 +LDMModule(
 +  (denoiser): DiT(
 +    (x_embedder): Linear(in_features=8, out_features=768, bias=True)
 +    (t_embedder): TimestepEmbedder(
 +      (mlp): Sequential(
 +        (0): Linear(in_features=256, out_features=768, bias=True)
 +        (1): SiLU()
 +        (2): Linear(in_features=768, out_features=768, bias=True)
 +      )
 +    )
 +    (blocks): ModuleList(
 +      (0-11): 12 x DiTBlock(
 +        (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=False)
 +        (attn): MultiheadAttention(
 +          (out_proj): NonDynamicallyQuantizableLinear(in_features=768, out_features=768, bias=True)
 +        )
 +        (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=False)
 +        (mlp): Mlp(
 +          (fc1): Linear(in_features=768, out_features=3072, bias=True)
 +          (act): GELU(approximate='tanh')
 +          (drop1): Dropout(p=0, inplace=False)
 +          (norm): Identity()
 +          (fc2): Linear(in_features=3072, out_features=768, bias=True)
 +          (drop2): Dropout(p=0, inplace=False)
 +        )
 +        (adaLN_modulation): Sequential(
 +          (0): SiLU()
 +          (1): Linear(in_features=768, out_features=4608, bias=True)
 +        )
 +      )
 +    )
 +    (final_layer): FinalLayer(
 +      (norm_final): LayerNorm((768,), eps=1e-06, elementwise_affine=False)
 +      (linear): Linear(in_features=768, out_features=16, bias=True)
 +      (adaLN_modulation): Sequential(
 +        (0): SiLU()
 +        (1): Linear(in_features=768, out_features=1536, bias=True)
 +      )
 +    )
 +  )
 +  (vae): VAEModule(
 +    (encoder): TransformerEncoder(
 +      (atom_type_embedder): Embedding(100, 512)
 +      (lattices_embedder): Sequential(
 +        (0): Linear(in_features=9, out_features=512, bias=False)
 +        (1): SiLU()
 +        (2): Linear(in_features=512, out_features=512, bias=True)
 +      )
 +      (frac_coords_embedder): Sequential(
 +        (0): Linear(in_features=3, out_features=512, bias=False)
 +        (1): SiLU()
 +        (2): Linear(in_features=512, out_features=512, bias=True)
 +      )
 +      (transformer): TransformerEncoder(
 +        (layers): ModuleList(
 +          (0-7): 8 x TransformerEncoderLayer(
 +            (self_attn): MultiheadAttention(
 +              (out_proj): NonDynamicallyQuantizableLinear(in_features=512, out_features=512, bias=True)
 +            )
 +            (linear1): Linear(in_features=512, out_features=2048, bias=True)
 +            (dropout): Dropout(p=0.0, inplace=False)
 +            (linear2): Linear(in_features=2048, out_features=512, bias=True)
 +            (norm1): LayerNorm((512,), eps=1e-05, elementwise_affine=True)
 +            (norm2): LayerNorm((512,), eps=1e-05, elementwise_affine=True)
 +            (dropout1): Dropout(p=0.0, inplace=False)
 +            (dropout2): Dropout(p=0.0, inplace=False)
 +            (activation): GELU(approximate='tanh')
 +          )
 +        )
 +        (norm): LayerNorm((512,), eps=1e-05, elementwise_affine=True)
 +      )
 +    )
 +    (decoder): TransformerDecoder(
 +      (transformer): TransformerEncoder(
 +        (layers): ModuleList(
 +          (0-7): 8 x TransformerEncoderLayer(
 +            (self_attn): MultiheadAttention(
 +              (out_proj): NonDynamicallyQuantizableLinear(in_features=512, out_features=512, bias=True)
 +            )
 +            (linear1): Linear(in_features=512, out_features=2048, bias=True)
 +            (dropout): Dropout(p=0.0, inplace=False)
 +            (linear2): Linear(in_features=2048, out_features=512, bias=True)
 +            (norm1): LayerNorm((512,), eps=1e-05, elementwise_affine=True)
 +            (norm2): LayerNorm((512,), eps=1e-05, elementwise_affine=True)
 +            (dropout1): Dropout(p=0.0, inplace=False)
 +            (dropout2): Dropout(p=0.0, inplace=False)
 +            (activation): GELU(approximate='tanh')
 +          )
 +        )
 +        (norm): LayerNorm((512,), eps=1e-05, elementwise_affine=True)
 +      )
 +      (atom_types_head): Linear(in_features=512, out_features=100, bias=True)
 +      (frac_coords_head): Linear(in_features=512, out_features=3, bias=False)
 +      (lattice_head): Linear(in_features=512, out_features=6, bias=False)
 +    )
 +    (quant_conv): Linear(in_features=512, out_features=16, bias=False)
 +    (post_quant_conv): Linear(in_features=8, out_features=512, bias=False)
 +  )
 +)
 +
 +</code>