diff --git a/configs/Kinetics/SLOWFAST_4x16_R50.yaml b/configs/Kinetics/SLOWFAST_4x16_R50.yaml index 9eeadf5..dafcaf2 100755 --- a/configs/Kinetics/SLOWFAST_4x16_R50.yaml +++ b/configs/Kinetics/SLOWFAST_4x16_R50.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 10 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 32 diff --git a/configs/Kinetics/SLOWFAST_8x8_R101.yaml b/configs/Kinetics/SLOWFAST_8x8_R101.yaml index 72142be..49fc836 100755 --- a/configs/Kinetics/SLOWFAST_8x8_R101.yaml +++ b/configs/Kinetics/SLOWFAST_8x8_R101.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 10 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 32 diff --git a/configs/Kinetics/SLOWFAST_8x8_R50.yaml b/configs/Kinetics/SLOWFAST_8x8_R50.yaml index 0922851..75b4e18 100755 --- a/configs/Kinetics/SLOWFAST_8x8_R50.yaml +++ b/configs/Kinetics/SLOWFAST_8x8_R50.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 10 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 32 diff --git a/configs/Kinetics/TimeSformer_divST_16x16_448.yaml b/configs/Kinetics/TimeSformer_divST_16x16_448.yaml index de4b4ff..c601604 100755 --- a/configs/Kinetics/TimeSformer_divST_16x16_448.yaml +++ b/configs/Kinetics/TimeSformer_divST_16x16_448.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 5 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 16 diff --git a/configs/Kinetics/TimeSformer_divST_8x32_224.yaml b/configs/Kinetics/TimeSformer_divST_8x32_224.yaml index 3c0e65c..b326a5c 100755 --- a/configs/Kinetics/TimeSformer_divST_8x32_224.yaml +++ b/configs/Kinetics/TimeSformer_divST_8x32_224.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 5 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 8 diff --git a/configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml b/configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml index f494b97..85b264f 100755 --- a/configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml +++ b/configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 5 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 8 diff --git a/configs/Kinetics/TimeSformer_divST_96x4_224.yaml b/configs/Kinetics/TimeSformer_divST_96x4_224.yaml index 976d550..f92a0b9 100755 --- a/configs/Kinetics/TimeSformer_divST_96x4_224.yaml +++ b/configs/Kinetics/TimeSformer_divST_96x4_224.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 5 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 96 diff --git a/configs/Kinetics/TimeSformer_jointST_8x32_224.yaml b/configs/Kinetics/TimeSformer_jointST_8x32_224.yaml index ea48c27..0d9a123 100755 --- a/configs/Kinetics/TimeSformer_jointST_8x32_224.yaml +++ b/configs/Kinetics/TimeSformer_jointST_8x32_224.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 5 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 8 diff --git a/configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml b/configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml index b6c3e7b..358223d 100755 --- a/configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml +++ b/configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml @@ -5,6 +5,8 @@ TRAIN: EVAL_PERIOD: 5 CHECKPOINT_PERIOD: 5 AUTO_RESUME: True + CHECKPOINT_FILE_PATH: /path/to/checkpoint + FINETUNE: False DATA: PATH_TO_DATA_DIR: /path/to/kinetics/ NUM_FRAMES: 8 diff --git a/tools/train_net.py b/tools/train_net.py index 9149e0a..bf7ccea 100755 --- a/tools/train_net.py +++ b/tools/train_net.py @@ -415,7 +415,7 @@ def train(cfg): optimizer = optim.construct_optimizer(model, cfg) # Load a checkpoint to resume training if applicable. - if not cfg.TRAIN.FINETUNE: + if cfg.TRAIN.FINETUNE: start_epoch = cu.load_train_checkpoint(cfg, model, optimizer) else: start_epoch = 0