From e1b48d5b2a363c1da7dbc2bd2704f97e6ef8fd76 Mon Sep 17 00:00:00 2001 From: Tianlong Ai <50650583+AI-Tianlong@users.noreply.github.com> Date: Mon, 29 Jan 2024 17:37:31 +0800 Subject: [PATCH] [New Configs] Add mmseg/configs folder & Support loveda, potsdam, schedules, default_runtime new configs (#3542) # [New Configs] Add mmseg/configs folder & Support loveda, potsdam, schedules, default_runtime new configs - As the title , the new configs path is mmseg/configs/ - The configs files for the dataset have been tested. - The purpose of this PR is to enable other community members migrating to the new config to reference the new configs files for schedules and default runtime. Hoping for a quick merge~~~. - Details of this task can be found at: https://github.com/AI-Tianlong/mmseg-new-config ![image](https://github.com/AI-Tianlong/mmseg-new-config/assets/50650583/04d40057-ff2c-492c-be44-52c6d34d3676) --- mmseg/configs/_base_/datasets/loveda.py | 79 ++++++++++++++++++ mmseg/configs/_base_/datasets/potsdam.py | 81 +++++++++++++++++++ mmseg/configs/_base_/default_runtime.py | 22 +++++ .../configs/_base_/schedules/schedule_160k.py | 43 ++++++++++ .../configs/_base_/schedules/schedule_20k.py | 36 +++++++++ .../configs/_base_/schedules/schedule_240k.py | 34 ++++++++ .../configs/_base_/schedules/schedule_25k.py | 43 ++++++++++ .../configs/_base_/schedules/schedule_320k.py | 36 +++++++++ .../configs/_base_/schedules/schedule_40k.py | 34 ++++++++ .../configs/_base_/schedules/schedule_80k.py | 42 ++++++++++ 10 files changed, 450 insertions(+) create mode 100644 mmseg/configs/_base_/datasets/loveda.py create mode 100644 mmseg/configs/_base_/datasets/potsdam.py create mode 100644 mmseg/configs/_base_/default_runtime.py create mode 100644 mmseg/configs/_base_/schedules/schedule_160k.py create mode 100644 mmseg/configs/_base_/schedules/schedule_20k.py create mode 100644 mmseg/configs/_base_/schedules/schedule_240k.py create mode 100644 mmseg/configs/_base_/schedules/schedule_25k.py create mode 100644 mmseg/configs/_base_/schedules/schedule_320k.py create mode 100644 mmseg/configs/_base_/schedules/schedule_40k.py create mode 100644 mmseg/configs/_base_/schedules/schedule_80k.py diff --git a/mmseg/configs/_base_/datasets/loveda.py b/mmseg/configs/_base_/datasets/loveda.py new file mode 100644 index 0000000000..eb3d358fe4 --- /dev/null +++ b/mmseg/configs/_base_/datasets/loveda.py @@ -0,0 +1,79 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.transforms.loading import LoadImageFromFile +from mmcv.transforms.processing import (RandomFlip, RandomResize, Resize, + TestTimeAug) +from mmengine.dataset.sampler import DefaultSampler, InfiniteSampler + +from mmseg.datasets.loveda import LoveDADataset +from mmseg.datasets.transforms.formatting import PackSegInputs +from mmseg.datasets.transforms.loading import LoadAnnotations +from mmseg.datasets.transforms.transforms import (PhotoMetricDistortion, + RandomCrop) +from mmseg.evaluation import IoUMetric + +# dataset settings +dataset_type = LoveDADataset +data_root = 'data/loveDA' +crop_size = (512, 512) +train_pipeline = [ + dict(type=LoadImageFromFile), + dict(type=LoadAnnotations, reduce_zero_label=True), + dict( + type=RandomResize, + scale=(2048, 512), + ratio_range=(0.5, 2.0), + keep_ratio=True), + dict(type=RandomCrop, crop_size=crop_size, cat_max_ratio=0.75), + dict(type=RandomFlip, prob=0.5), + dict(type=PhotoMetricDistortion), + dict(type=PackSegInputs) +] +test_pipeline = [ + dict(type=LoadImageFromFile), + dict(type=Resize, scale=(1024, 1024), keep_ratio=True), + # add loading annotation after ``Resize`` because ground truth + # does not need to do resize data transform + dict(type=LoadAnnotations, reduce_zero_label=True), + dict(type=PackSegInputs) +] +img_ratios = [0.5, 0.75, 1.0, 1.25, 1.5, 1.75] +tta_pipeline = [ + dict(type=LoadImageFromFile, backend_args=None), + dict( + type=TestTimeAug, + transforms=[[ + dict(type=Resize, scale_factor=r, keep_ratio=True) + for r in img_ratios + ], + [ + dict(type=RandomFlip, prob=0., direction='horizontal'), + dict(type=RandomFlip, prob=1., direction='horizontal') + ], [dict(type=LoadAnnotations)], + [dict(type=PackSegInputs)]]) +] +train_dataloader = dict( + batch_size=2, + num_workers=12, + persistent_workers=True, + sampler=dict(type=InfiniteSampler, shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_prefix=dict( + img_path='img_dir/train', seg_map_path='ann_dir/train'), + pipeline=train_pipeline)) + +val_dataloader = dict( + batch_size=1, + num_workers=4, + persistent_workers=True, + sampler=dict(type=DefaultSampler, shuffle=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_prefix=dict(img_path='img_dir/val', seg_map_path='ann_dir/val'), + pipeline=test_pipeline)) + +test_dataloader = val_dataloader +val_evaluator = dict(type=IoUMetric, iou_metrics=['mIoU']) +test_evaluator = val_evaluator diff --git a/mmseg/configs/_base_/datasets/potsdam.py b/mmseg/configs/_base_/datasets/potsdam.py new file mode 100644 index 0000000000..33a4ebfd8f --- /dev/null +++ b/mmseg/configs/_base_/datasets/potsdam.py @@ -0,0 +1,81 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.transforms.loading import LoadImageFromFile +from mmcv.transforms.processing import (RandomFlip, RandomResize, Resize, + TestTimeAug) +from mmengine.dataset.sampler import DefaultSampler, InfiniteSampler + +from mmseg.datasets.potsdam import PotsdamDataset +from mmseg.datasets.transforms.formatting import PackSegInputs +from mmseg.datasets.transforms.loading import LoadAnnotations +from mmseg.datasets.transforms.transforms import (PhotoMetricDistortion, + RandomCrop) +from mmseg.evaluation import IoUMetric + +# dataset settings +dataset_type = PotsdamDataset +data_root = 'data/potsdam' +crop_size = (512, 512) +train_pipeline = [ + dict(type=LoadImageFromFile), + dict(type=LoadAnnotations, reduce_zero_label=True), + dict( + type=RandomResize, + scale=(512, 512), + ratio_range=(0.5, 2.0), + keep_ratio=True), + dict(type=RandomCrop, crop_size=crop_size, cat_max_ratio=0.75), + dict(type=RandomFlip, prob=0.5), + dict(type=PhotoMetricDistortion), + dict(type=PackSegInputs) +] +test_pipeline = [ + dict(type=LoadImageFromFile), + dict(type=Resize, scale=(512, 512), keep_ratio=True), + # add loading annotation after ``Resize`` because ground truth + # does not need to do resize data transform + dict(type=LoadAnnotations, reduce_zero_label=True), + dict(type=PackSegInputs) +] +img_ratios = [0.5, 0.75, 1.0, 1.25, 1.5, 1.75] +tta_pipeline = [ + dict(type=LoadImageFromFile, backend_args=None), + dict( + type=TestTimeAug, + transforms=[[ + dict(type=Resize, scale_factor=r, keep_ratio=True) + for r in img_ratios + ], + [ + dict(type=RandomFlip, prob=0., direction='horizontal'), + dict(type=RandomFlip, prob=1., direction='horizontal') + ], [dict(type=LoadAnnotations)], + [dict(type=PackSegInputs)]]) +] + +train_dataloader = dict( + batch_size=2, + num_workers=4, + persistent_workers=True, + sampler=dict(type=InfiniteSampler, shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_prefix=dict( + img_path='img_dir/train', seg_map_path='ann_dir/train'), + pipeline=train_pipeline)) + +val_dataloader = dict( + batch_size=1, + num_workers=4, + persistent_workers=True, + sampler=dict(type=DefaultSampler, shuffle=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_prefix=dict(img_path='img_dir/val', seg_map_path='ann_dir/val'), + pipeline=test_pipeline)) +test_dataloader = val_dataloader + +val_evaluator = dict( + type=IoUMetric, iou_metrics=['mIoU']) # 'mDice', 'mFscore' +test_evaluator = val_evaluator diff --git a/mmseg/configs/_base_/default_runtime.py b/mmseg/configs/_base_/default_runtime.py new file mode 100644 index 0000000000..c90502096b --- /dev/null +++ b/mmseg/configs/_base_/default_runtime.py @@ -0,0 +1,22 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +from mmengine.visualization import LocalVisBackend + +from mmseg.models import SegTTAModel +from mmseg.visualization import SegLocalVisualizer + +env_cfg = dict( + cudnn_benchmark=False, + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + dist_cfg=dict(backend='nccl'), +) +vis_backends = [dict(type=LocalVisBackend)] +visualizer = dict( + type=SegLocalVisualizer, vis_backends=vis_backends, name='visualizer') +log_processor = dict(by_epoch=False) +log_level = 'INFO' +load_from = None +resume = False + +tta_model = dict(type=SegTTAModel) +default_scope = None diff --git a/mmseg/configs/_base_/schedules/schedule_160k.py b/mmseg/configs/_base_/schedules/schedule_160k.py new file mode 100644 index 0000000000..294d6ee3f5 --- /dev/null +++ b/mmseg/configs/_base_/schedules/schedule_160k.py @@ -0,0 +1,43 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim.optimizer.optimizer_wrapper import OptimWrapper +from mmengine.optim.scheduler.lr_scheduler import PolyLR +from mmengine.runner.loops import IterBasedTrainLoop, TestLoop, ValLoop +from torch.optim.sgd import SGD + +from mmseg.engine import SegVisualizationHook + +# optimizer +optimizer = dict( + type=SGD, + # lr=0.01, + # momentum=0.9, + # weight_decay=0.0005 +) + +optim_wrapper = dict(type=OptimWrapper, optimizer=optimizer, clip_grad=None) + +# learning policy +param_scheduler = [ + dict( + type=PolyLR, + eta_min=1e-4, + power=0.9, + begin=0, + end=160000, + by_epoch=False) +] +# training schedule for 160k + +train_cfg = dict(type=IterBasedTrainLoop, max_iters=160000, val_interval=8000) +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) + +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50, log_metric_by_epoch=False), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, by_epoch=False, interval=8000), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=SegVisualizationHook)) diff --git a/mmseg/configs/_base_/schedules/schedule_20k.py b/mmseg/configs/_base_/schedules/schedule_20k.py new file mode 100644 index 0000000000..255300a1f4 --- /dev/null +++ b/mmseg/configs/_base_/schedules/schedule_20k.py @@ -0,0 +1,36 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim.optimizer.optimizer_wrapper import OptimWrapper +from mmengine.optim.scheduler.lr_scheduler import PolyLR +from mmengine.runner.loops import IterBasedTrainLoop, TestLoop, ValLoop +from torch.optim.sgd import SGD + +from mmseg.engine import SegVisualizationHook + +# optimizer +optimizer = dict(type=SGD, lr=0.01, momentum=0.9, weight_decay=0.0005) +optim_wrapper = dict(type=OptimWrapper, optimizer=optimizer, clip_grad=None) + +# learning policy +param_scheduler = [ + dict( + type=PolyLR, + eta_min=1e-4, + power=0.9, + begin=0, + end=20000, + by_epoch=False) +] +# training schedule for 20k +train_cfg = dict(type=IterBasedTrainLoop, max_iters=20000, val_interval=2000) +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) + +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50, log_metric_by_epoch=False), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, by_epoch=False, interval=2000), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=SegVisualizationHook)) diff --git a/mmseg/configs/_base_/schedules/schedule_240k.py b/mmseg/configs/_base_/schedules/schedule_240k.py new file mode 100644 index 0000000000..cf9e5d3c94 --- /dev/null +++ b/mmseg/configs/_base_/schedules/schedule_240k.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim.optimizer.optimizer_wrapper import OptimWrapper +from mmengine.optim.scheduler.lr_scheduler import PolyLR +from mmengine.runner.loops import IterBasedTrainLoop, TestLoop, ValLoop +# from mmengine.runner.loops import EpochBasedTrainLoop +from torch.optim.sgd import SGD + +from mmseg.engine import SegVisualizationHook + +optimizer = dict(type=SGD, lr=0.01, momentum=0.9, weight_decay=0.0005) +optim_wrapper = dict(type=OptimWrapper, optimizer=optimizer, clip_grad=None) +# learning policy +param_scheduler = [ + dict( + type=PolyLR, + eta_min=1e-4, + power=0.9, + begin=0, + end=240000, + by_epoch=False) +] +# training schedule for 240k +train_cfg = dict(type=IterBasedTrainLoop, max_iters=240000, val_interval=24000) +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50, log_metric_by_epoch=False), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, by_epoch=False, interval=24000), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=SegVisualizationHook)) diff --git a/mmseg/configs/_base_/schedules/schedule_25k.py b/mmseg/configs/_base_/schedules/schedule_25k.py new file mode 100644 index 0000000000..8a3ebf405e --- /dev/null +++ b/mmseg/configs/_base_/schedules/schedule_25k.py @@ -0,0 +1,43 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim.optimizer.optimizer_wrapper import OptimWrapper +from mmengine.optim.scheduler.lr_scheduler import ConstantLR, LinearLR +from mmengine.runner.loops import IterBasedTrainLoop, TestLoop, ValLoop +# from mmengine.runner.loops import EpochBasedTrainLoop +from torch.optim.adamw import AdamW + +from mmseg.engine import SegVisualizationHook +from mmseg.engine.schedulers import PolyLRRatio + +# optimizer +optimizer = dict(type=AdamW, lr=0.01, weight_decay=0.1) + +optim_wrapper = dict(type=OptimWrapper, optimizer=optimizer, clip_grad=None) +# learning policy + +# learning policy +param_scheduler = [ + dict(type=LinearLR, start_factor=3e-2, begin=0, end=12000, by_epoch=False), + dict( + type=PolyLRRatio, + eta_min_ratio=3e-2, + power=0.9, + begin=12000, + end=24000, + by_epoch=False), + dict(type=ConstantLR, by_epoch=False, factor=1, begin=24000, end=25000) +] + +# training schedule for 25k +train_cfg = dict(type=IterBasedTrainLoop, max_iters=25000, val_interval=1000) +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) + +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50, log_metric_by_epoch=False), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, by_epoch=False, interval=1000), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=SegVisualizationHook)) diff --git a/mmseg/configs/_base_/schedules/schedule_320k.py b/mmseg/configs/_base_/schedules/schedule_320k.py new file mode 100644 index 0000000000..dae323ec48 --- /dev/null +++ b/mmseg/configs/_base_/schedules/schedule_320k.py @@ -0,0 +1,36 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim.optimizer.optimizer_wrapper import OptimWrapper +from mmengine.optim.scheduler.lr_scheduler import PolyLR +from mmengine.runner.loops import IterBasedTrainLoop, TestLoop, ValLoop +# from mmengine.runner.loops import EpochBasedTrainLoop +from torch.optim.sgd import SGD + +from mmseg.engine import SegVisualizationHook + +# optimizer +optimizer = dict(type=SGD, lr=0.01, momentum=0.9, weight_decay=0.0005) +optim_wrapper = dict(type=OptimWrapper, optimizer=optimizer, clip_grad=None) + +# learning policy +param_scheduler = [ + dict( + type=PolyLR, + eta_min=1e-4, + power=0.9, + begin=0, + end=320000, + by_epoch=False) +] +# training schedule for 320k +train_cfg = dict(type=IterBasedTrainLoop, max_iters=320000, val_interval=32000) +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50, log_metric_by_epoch=False), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, by_epoch=False, interval=32000), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=SegVisualizationHook)) diff --git a/mmseg/configs/_base_/schedules/schedule_40k.py b/mmseg/configs/_base_/schedules/schedule_40k.py new file mode 100644 index 0000000000..b4b2ea42b5 --- /dev/null +++ b/mmseg/configs/_base_/schedules/schedule_40k.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim.optimizer.optimizer_wrapper import OptimWrapper +from mmengine.optim.scheduler.lr_scheduler import PolyLR +from mmengine.runner.loops import IterBasedTrainLoop, TestLoop, ValLoop +from torch.optim.sgd import SGD + +from mmseg.engine import SegVisualizationHook + +# optimizer +optimizer = dict(type=SGD, lr=0.01, momentum=0.9, weight_decay=0.0005) +optim_wrapper = dict(type=OptimWrapper, optimizer=optimizer, clip_grad=None) + +param_scheduler = [ + dict( + type=PolyLR, + eta_min=1e-4, + power=0.9, + begin=0, + end=40000, + by_epoch=False) +] +# training schedule for 40k +train_cfg = dict(type=IterBasedTrainLoop, max_iters=40000, val_interval=4000) +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50, log_metric_by_epoch=False), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, by_epoch=False, interval=4000), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=SegVisualizationHook)) diff --git a/mmseg/configs/_base_/schedules/schedule_80k.py b/mmseg/configs/_base_/schedules/schedule_80k.py new file mode 100644 index 0000000000..3e711ca891 --- /dev/null +++ b/mmseg/configs/_base_/schedules/schedule_80k.py @@ -0,0 +1,42 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim.optimizer.optimizer_wrapper import OptimWrapper +from mmengine.optim.scheduler.lr_scheduler import PolyLR +from mmengine.runner.loops import IterBasedTrainLoop, TestLoop, ValLoop +from torch.optim.sgd import SGD + +from mmseg.engine import SegVisualizationHook + +# optimizer +optimizer = dict( + type=SGD, + # lr=0.01, + # momentum=0.9, + # weight_decay=0.0005 +) + +optim_wrapper = dict(type=OptimWrapper, optimizer=optimizer, clip_grad=None) + +# learning policy +param_scheduler = [ + dict( + type=PolyLR, + eta_min=1e-4, + power=0.9, + begin=0, + end=80000, + by_epoch=False) +] +# training schedule for 80k +train_cfg = dict(type=IterBasedTrainLoop, max_iters=80000, val_interval=8000) +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) + +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50, log_metric_by_epoch=False), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, by_epoch=False, interval=8000), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=SegVisualizationHook))