# step 1 : create env and prepare models following the README.md
the codebase is from https://github.com/GuyTevet/motion-diffusion-model

# step 2 :  we use DECA to obtain FLAME parameters for the corresponding videos in the CelebV-Text dataset. Next, we prepare the CelebV-Text dataset following the structure from the HumanML3D repository on GitHub, and place it in the dataset folder. A small subset is provided to help readers understand the structure. The full CelebVText dataset is coming soon.

# step3 : train the MotionDM and EmotionDM model using the following commands:

CUDA_VISIBLE_DEVICES=0 python -m train.train_mdm \
    --save_dir save/my_celebv_trans_enc_motion_vel05_layer1_dim64_smooth_ws3 \
    --dataset CelebVText --train_platform_type TensorboardPlatform --lambda_vel 0.5 \
    --diffusion_type CelebVText_MDM  --layers 1 --latent_dim 64 --data_class motion \
    --num_steps 600000 --smooth_method mean --window_size 3

CUDA_VISIBLE_DEVICES=0 python -m train.train_mdm \
    --save_dir save/my_celebv_trans_enc_emotion_vel0_layer1_dim64_smooth_ws3 \
    --dataset CelebVText --train_platform_type TensorboardPlatform --lambda_vel 0 \
    --diffusion_type CelebVText_MDM  --layers 1 --latent_dim 64 --data_class emotion \
    --num_steps 600000 --smooth_method mean --window_size 3

# step4: generate motion(FLAME pose) and emotion(FLAME expression) sequences using trained models:

CUDA_VISIBLE_DEVICES=0 python -m sample.generate_celebvtext_batch \
    --motion_model_path save/my_celebv_trans_enc_motion_vel05_layer1_dim64_smooth_ws3/model000600000.pt \
    --emotion_model_path save/my_celebv_trans_enc_emotion_vel0_layer1_dim64_smooth_ws3/model000600000.pt \
    --motion_length 8 --data_class motion --input_text ./assets/celebv_text_prompts_batch.txt --guidance_param 7.5

# step5: use the generated FLAME parameters to construct the FLAME sequence and obtain the renderings for the FLAME2Video stage. The code for this step will be available soon, and can be referenced from the official DECA implementation on GitHub.

