Official implementation of ICCV 2025 paper Representation Shift: Unifying Token Compression with FlashAttention.
conda create -n rep_shift python=3.10
conda activate rep_shift
conda env create -n rep_shift -f rep_shift.yml
python main.py --data_path /path/to/data --eval --model deit_base --batch-size-eval 200 --use_flash True --drop_r [0.2,0,0,0.2,0,0,0.2,0,0,0,0,0]
@inproceedings{choi2025representation,
title={Representation Shift: Unifying Token Compression with FlashAttention},
author={Choi, Joonmyung and Lee, Sanghyeok and Ko, Byungoh and Kim, Eunseo and Kil, Jihyung and Kim, Hyunwoo J.},
booktitle={Proceedings of the IEEE/CVF International Conference on Computer Vision},
year={2025}
}
