conda create -n unilact python=3.10 -y
conda activate unilactgit clone https://github.com/manishgovind/uniact-vla.git
cd UniLACT
pip install -r requirements.txtexport PROJECT_UNILACT_ROOT=/path/to/UniLACTUniLACT training consists of three stages:
- Stage 1: Unified latent action learning (UniLARN)
- Stage 2: Unified latent pretraining
- Stage 3: Action fine-tuning
Training is driven by YAML configs under
unilact/configs/train/and configs underunilarn/configs/train.
# (Update this command to your UniLARN entrypoint/config if different)
cd ${PROJECT_UNILACT_ROOT}/unilarn
accelerate launch --main_process_port <master_port> train_unilarn.py --config_path "${PROJECT_UNILACT_ROOT}/unilarn/configs/train/train_unilarn_on_calvin.yaml"# (Update this command to your pretraining config if different)
cd ${PROJECT_UNILACT_ROOT}/unilact/train
accelerate launch --main_process_port <master_port> train_unilact.py --config_path "${PROJECT_UNILACT_ROOT}/unilact/configs/train/pretrain_unilact_on_calvin.yaml"cd ${PROJECT_UNILACT_ROOT}/unilact/train
accelerate launch --main_process_port <master_port> train_unilact.py --config_path "${PROJECT_UNILACT_ROOT}/unilact/configs/train/finetune_unilact_on_calvin.yaml"Install the CALVIN benchmark in the same conda environment (unilact) by following the official CALVIN repository instructions.
conda activate unilact
export PROJECT_UNILACT_ROOT=/path/to/UniLACT
cd ${PROJECT_UNILACT_ROOT}/scripts
bash evaluate_unilact_on_calvin.sh- Training Data preparation
- Support for OXE-pretraining
- Release pretrained and finetuned model checkpoints
This project builds on top of Moto, CALVIN. We thank the authors for the ir open-sourced work.
If you find our work useful, please cite:
@article{govind2026unilactdepthawarergblatent,
title= {UniLACT: Depth-Aware RGB Latent Action Learning for Vision-Language-Action Models},
author= {Manish Kumar Govind and Dominick Reilly and Pu Wang and Srijan Das},
journal={arXiv preprint arXiv:2602.20231},
year={2026}
}