put lora back(sat), unavailable running

This commit is contained in:
zR 2025-01-02 11:48:18 +08:00
parent a88c1ede69
commit b080c6a010
3 changed files with 32 additions and 18 deletions

1
.gitignore vendored
View File

@ -22,3 +22,4 @@ venv
**/results **/results
**/*.mp4 **/*.mp4
**/validation_set **/validation_set
CogVideo-1.0

View File

@ -90,27 +90,37 @@ class SATVideoDiffusionEngine(nn.Module):
self.no_cond_log = no_cond_log self.no_cond_log = no_cond_log
self.device = args.device self.device = args.device
# put lora add here
def disable_untrainable_params(self): def disable_untrainable_params(self):
total_trainable = 0 total_trainable = 0
for n, p in self.named_parameters(): if self.lora_train:
if p.requires_grad == False: for n, p in self.named_parameters():
continue if p.requires_grad == False:
flag = False continue
for prefix in self.not_trainable_prefixes: if 'lora_layer' not in n:
if n.startswith(prefix) or prefix == "all": p.lr_scale = 0
flag = True else:
break total_trainable += p.numel()
else:
for n, p in self.named_parameters():
if p.requires_grad == False:
continue
flag = False
for prefix in self.not_trainable_prefixes:
if n.startswith(prefix) or prefix == "all":
flag = True
break
lora_prefix = ["matrix_A", "matrix_B"] lora_prefix = ['matrix_A', 'matrix_B']
for prefix in lora_prefix: for prefix in lora_prefix:
if prefix in n: if prefix in n:
flag = False flag = False
break break
if flag: if flag:
p.requires_grad_(False) p.requires_grad_(False)
else: else:
total_trainable += p.numel() total_trainable += p.numel()
print_rank0("***** Total trainable parameters: " + str(total_trainable) + " *****") print_rank0("***** Total trainable parameters: " + str(total_trainable) + " *****")

View File

@ -31,6 +31,7 @@ class ImagePatchEmbeddingMixin(BaseMixin):
def word_embedding_forward(self, input_ids, **kwargs): def word_embedding_forward(self, input_ids, **kwargs):
images = kwargs["images"] # (b,t,c,h,w) images = kwargs["images"] # (b,t,c,h,w)
emb = rearrange(images, "b t c h w -> b (t h w) c") emb = rearrange(images, "b t c h w -> b (t h w) c")
# emb = rearrange(images, "b c t h w -> b (t h w) c")
emb = rearrange( emb = rearrange(
emb, emb,
"b (t o h p w q) c -> b (t h w) (c o p q)", "b (t o h p w q) c -> b (t h w) (c o p q)",
@ -810,7 +811,9 @@ class DiffusionTransformer(BaseModel):
), ),
reinit=True, reinit=True,
) )
if "lora_config" in module_configs:
lora_config = module_configs["lora_config"]
self.add_mixin("lora", instantiate_from_config(lora_config, layer_num=self.num_layers), reinit=True)
return return
def forward(self, x, timesteps=None, context=None, y=None, **kwargs): def forward(self, x, timesteps=None, context=None, y=None, **kwargs):