Use official datasets

own
Bobholamovic 3 years ago
parent 1932543c37
commit edd2351874
  1. 11
      paddlers/utils/download.py
  2. 4
      tutorials/train/change_detection/bit.py
  3. 4
      tutorials/train/change_detection/cdnet.py
  4. 4
      tutorials/train/change_detection/dsamnet.py
  5. 4
      tutorials/train/change_detection/dsifn.py
  6. 4
      tutorials/train/change_detection/fc_ef.py
  7. 4
      tutorials/train/change_detection/fc_siam_conc.py
  8. 4
      tutorials/train/change_detection/fc_siam_diff.py
  9. 4
      tutorials/train/change_detection/snunet.py
  10. 4
      tutorials/train/change_detection/stanet.py
  11. 52
      tutorials/train/classification/condensenetv2_b_rs_mul.py
  12. 6
      tutorials/train/classification/hrnet.py
  13. 6
      tutorials/train/classification/mobilenetv3.py
  14. 6
      tutorials/train/classification/resnet50_vd.py
  15. 15
      tutorials/train/object_detection/faster_rcnn.py
  16. 16
      tutorials/train/object_detection/ppyolo.py
  17. 16
      tutorials/train/object_detection/ppyolotiny.py
  18. 18
      tutorials/train/object_detection/ppyolov2.py
  19. 16
      tutorials/train/object_detection/yolov3.py
  20. 14
      tutorials/train/semantic_segmentation/deeplabv3p.py
  21. 14
      tutorials/train/semantic_segmentation/unet.py

@ -106,10 +106,11 @@ def download(url, path, md5sum=None):
if total_size:
download_size = 0
current_time = time.time()
for chunk in tqdm.tqdm(
pb = tqdm.tqdm(
req.iter_content(chunk_size=1024),
total=(int(total_size) + 1023) // 1024,
unit='KB'):
unit='KB')
for chunk in pb:
f.write(chunk)
download_size += 1024
if download_size % 524288 == 0:
@ -120,9 +121,9 @@ def download(url, path, md5sum=None):
speed = int(524288 / (time.time() - current_time + 0.01)
/ 1024.0)
current_time = time.time()
logging.debug(
"Downloading: TotalSize={}M, DownloadSize={}M, Speed={}KB/s"
.format(total_size_m, download_size_m, speed))
pb.set_description(
"Downloading: TotalSize={}M, DownloadedSize={}M"
.format(total_size_m, download_size_m))
else:
for chunk in req.iter_content(chunk_size=1024):
if chunk:

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/bit/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/cdnet/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/dsamnet/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/dsifn/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/fc_ef/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/fc_siam_conc/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/fc_siam_diff/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/snunet/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -16,8 +16,8 @@ EVAL_FILE_LIST_PATH = './data/airchange/eval.txt'
EXP_DIR = './output/stanet/'
# 下载和解压AirChange数据集
airchange_dataset = 'http://mplab.sztaki.hu/~bcsaba/test/SZTAKI_AirChange_Benchmark.zip'
pdrs.utils.download_and_decompress(airchange_dataset, path=DATA_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/airchange.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -1,52 +0,0 @@
import paddlers as pdrs
from paddlers import transforms as T
# 定义训练和验证时的transforms
train_transforms = T.Compose([
# 读取影像
T.DecodeImg(),
T.SelectBand([5, 10, 15, 20, 25]), # for tet
T.Resize(target_size=224),
T.RandomHorizontalFlip(),
T.Normalize(
mean=[0.5, 0.5, 0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5, 0.5, 0.5]),
T.ArrangeClassifier('train')
])
eval_transforms = T.Compose([
T.DecodeImg(), T.SelectBand([5, 10, 15, 20, 25]), T.Resize(target_size=224),
T.Normalize(
mean=[0.5, 0.5, 0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5, 0.5, 0.5]), T.ArrangeClassifier('eval')
])
# 定义训练和验证所用的数据集
train_dataset = pdrs.datasets.ClasDataset(
data_dir='tutorials/train/classification/DataSet',
file_list='tutorials/train/classification/DataSet/train_list.txt',
label_list='tutorials/train/classification/DataSet/label_list.txt',
transforms=train_transforms,
num_workers=0,
shuffle=True)
eval_dataset = pdrs.datasets.ClasDataset(
data_dir='tutorials/train/classification/DataSet',
file_list='tutorials/train/classification/DataSet/val_list.txt',
label_list='tutorials/train/classification/DataSet/label_list.txt',
transforms=eval_transforms,
num_workers=0,
shuffle=False)
# 初始化模型
num_classes = len(train_dataset.labels)
model = pdrs.tasks.CondenseNetV2_b(in_channels=5, num_classes=num_classes)
# 进行训练
model.train(
num_epochs=100,
pretrain_weights=None,
train_dataset=train_dataset,
train_batch_size=4,
eval_dataset=eval_dataset,
learning_rate=3e-4,
save_dir='output/condensenetv2_b')

@ -6,8 +6,6 @@
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/ucmerced/'
# 数据集存放目录
DATA_DIR = './data/ucmerced/'
# 训练集`file_list`文件路径
@ -20,8 +18,8 @@ LABEL_LIST_PATH = './data/ucmerced/labels.txt'
EXP_DIR = './output/hrnet/'
# 下载和解压UC Merced数据集
ucmerced_dataset = 'http://weegee.vision.ucmerced.edu/datasets/UCMerced_LandUse.zip'
pdrs.utils.download_and_decompress(ucmerced_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/ucmerced.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -6,8 +6,6 @@
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/ucmerced/'
# 数据集存放目录
DATA_DIR = './data/ucmerced/'
# 训练集`file_list`文件路径
@ -20,8 +18,8 @@ LABEL_LIST_PATH = './data/ucmerced/labels.txt'
EXP_DIR = './output/mobilenetv3/'
# 下载和解压UC Merced数据集
ucmerced_dataset = 'http://weegee.vision.ucmerced.edu/datasets/UCMerced_LandUse.zip'
pdrs.utils.download_and_decompress(ucmerced_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/ucmerced.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -6,8 +6,6 @@
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/ucmerced/'
# 数据集存放目录
DATA_DIR = './data/ucmerced/'
# 训练集`file_list`文件路径
@ -20,8 +18,8 @@ LABEL_LIST_PATH = './data/ucmerced/labels.txt'
EXP_DIR = './output/resnet50_vd/'
# 下载和解压UC Merced数据集
ucmerced_dataset = 'http://weegee.vision.ucmerced.edu/datasets/UCMerced_LandUse.zip'
pdrs.utils.download_and_decompress(ucmerced_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/ucmerced.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -8,23 +8,20 @@ import os
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/sarship/'
# 数据集存放目录
DATA_DIR = './data/sarship/sar_ship_1/'
DATA_DIR = './data/sarship/'
# 训练集`file_list`文件路径
TRAIN_FILE_LIST_PATH = './data/sarship/sar_ship_1/train.txt'
TRAIN_FILE_LIST_PATH = './data/sarship/train.txt'
# 验证集`file_list`文件路径
EVAL_FILE_LIST_PATH = './data/sarship/sar_ship_1/valid.txt'
EVAL_FILE_LIST_PATH = './data/sarship/eval.txt'
# 数据集类别信息文件路径
LABEL_LIST_PATH = './data/sarship/sar_ship_1/labels.txt'
LABEL_LIST_PATH = './data/sarship/labels.txt'
# 实验目录,保存输出的模型权重和结果
EXP_DIR = './output/faster_rcnn/'
# 下载和解压SAR影像舰船检测数据集
sarship_dataset = 'https://paddleseg.bj.bcebos.com/dataset/sar_ship_1.tar.gz'
if not os.path.exists(DATA_DIR):
pdrs.utils.download_and_decompress(sarship_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/sarship.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -8,24 +8,20 @@ import os
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/sarship/'
# 数据集存放目录
DATA_DIR = './data/sarship/sar_ship_1/'
DATA_DIR = './data/sarship/'
# 训练集`file_list`文件路径
TRAIN_FILE_LIST_PATH = './data/sarship/sar_ship_1/train.txt'
TRAIN_FILE_LIST_PATH = './data/sarship/train.txt'
# 验证集`file_list`文件路径
EVAL_FILE_LIST_PATH = './data/sarship/sar_ship_1/valid.txt'
EVAL_FILE_LIST_PATH = './data/sarship/eval.txt'
# 数据集类别信息文件路径
LABEL_LIST_PATH = './data/sarship/sar_ship_1/labels.txt'
LABEL_LIST_PATH = './data/sarship/labels.txt'
# 实验目录,保存输出的模型权重和结果
EXP_DIR = './output/ppyolo/'
# 下载和解压SAR影像舰船检测数据集
# 若目录已存在则不重复下载
sarship_dataset = 'https://paddleseg.bj.bcebos.com/dataset/sar_ship_1.tar.gz'
if not os.path.exists(DATA_DIR):
pdrs.utils.download_and_decompress(sarship_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/sarship.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -8,24 +8,20 @@ import os
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/sarship/'
# 数据集存放目录
DATA_DIR = './data/sarship/sar_ship_1/'
DATA_DIR = './data/sarship/'
# 训练集`file_list`文件路径
TRAIN_FILE_LIST_PATH = './data/sarship/sar_ship_1/train.txt'
TRAIN_FILE_LIST_PATH = './data/sarship/train.txt'
# 验证集`file_list`文件路径
EVAL_FILE_LIST_PATH = './data/sarship/sar_ship_1/valid.txt'
EVAL_FILE_LIST_PATH = './data/sarship/eval.txt'
# 数据集类别信息文件路径
LABEL_LIST_PATH = './data/sarship/sar_ship_1/labels.txt'
LABEL_LIST_PATH = './data/sarship/labels.txt'
# 实验目录,保存输出的模型权重和结果
EXP_DIR = './output/ppyolotiny/'
# 下载和解压SAR影像舰船检测数据集
# 若目录已存在则不重复下载
sarship_dataset = 'https://paddleseg.bj.bcebos.com/dataset/sar_ship_1.tar.gz'
if not os.path.exists(DATA_DIR):
pdrs.utils.download_and_decompress(sarship_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/sarship.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -8,24 +8,20 @@ import os
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/sarship/'
# 数据集存放目录
DATA_DIR = './data/sarship/sar_ship_1/'
DATA_DIR = './data/sarship/'
# 训练集`file_list`文件路径
TRAIN_FILE_LIST_PATH = './data/sarship/sar_ship_1/train.txt'
TRAIN_FILE_LIST_PATH = './data/sarship/train.txt'
# 验证集`file_list`文件路径
EVAL_FILE_LIST_PATH = './data/sarship/sar_ship_1/valid.txt'
EVAL_FILE_LIST_PATH = './data/sarship/eval.txt'
# 数据集类别信息文件路径
LABEL_LIST_PATH = './data/sarship/sar_ship_1/labels.txt'
LABEL_LIST_PATH = './data/sarship/labels.txt'
# 实验目录,保存输出的模型权重和结果
EXP_DIR = './output/ppyolov2/'
EXP_DIR = './output/ppyolo2/'
# 下载和解压SAR影像舰船检测数据集
# 若目录已存在则不重复下载
sarship_dataset = 'https://paddleseg.bj.bcebos.com/dataset/sar_ship_1.tar.gz'
if not os.path.exists(DATA_DIR):
pdrs.utils.download_and_decompress(sarship_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/sarship.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -8,24 +8,20 @@ import os
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/sarship/'
# 数据集存放目录
DATA_DIR = './data/sarship/sar_ship_1/'
DATA_DIR = './data/sarship/'
# 训练集`file_list`文件路径
TRAIN_FILE_LIST_PATH = './data/sarship/sar_ship_1/train.txt'
TRAIN_FILE_LIST_PATH = './data/sarship/train.txt'
# 验证集`file_list`文件路径
EVAL_FILE_LIST_PATH = './data/sarship/sar_ship_1/valid.txt'
EVAL_FILE_LIST_PATH = './data/sarship/eval.txt'
# 数据集类别信息文件路径
LABEL_LIST_PATH = './data/sarship/sar_ship_1/labels.txt'
LABEL_LIST_PATH = './data/sarship/labels.txt'
# 实验目录,保存输出的模型权重和结果
EXP_DIR = './output/yolov3/'
# 下载和解压SAR影像舰船检测数据集
# 若目录已存在则不重复下载
sarship_dataset = 'https://paddleseg.bj.bcebos.com/dataset/sar_ship_1.tar.gz'
if not os.path.exists(DATA_DIR):
pdrs.utils.download_and_decompress(sarship_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/sarship.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -6,16 +6,14 @@
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/rsseg/'
# 数据集存放目录
DATA_DIR = './data/rsseg/remote_sensing_seg/'
DATA_DIR = './data/rsseg/'
# 训练集`file_list`文件路径
TRAIN_FILE_LIST_PATH = './data/rsseg/remote_sensing_seg/train.txt'
TRAIN_FILE_LIST_PATH = './data/rsseg/train.txt'
# 验证集`file_list`文件路径
EVAL_FILE_LIST_PATH = './data/rsseg/remote_sensing_seg/val.txt'
EVAL_FILE_LIST_PATH = './data/rsseg/val.txt'
# 数据集类别信息文件路径
LABEL_LIST_PATH = './data/rsseg/remote_sensing_seg/labels.txt'
LABEL_LIST_PATH = './data/rsseg/labels.txt'
# 实验目录,保存输出的模型权重和结果
EXP_DIR = './output/deeplabv3p/'
@ -23,8 +21,8 @@ EXP_DIR = './output/deeplabv3p/'
NUM_BANDS = 10
# 下载和解压多光谱地块分类数据集
seg_dataset = 'https://paddleseg.bj.bcebos.com/dataset/remote_sensing_seg.zip'
pdrs.utils.download_and_decompress(seg_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/rsseg.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

@ -6,16 +6,14 @@
import paddlers as pdrs
from paddlers import transforms as T
# 下载文件存放目录
DOWNLOAD_DIR = './data/rsseg/'
# 数据集存放目录
DATA_DIR = './data/rsseg/remote_sensing_seg/'
DATA_DIR = './data/rsseg/'
# 训练集`file_list`文件路径
TRAIN_FILE_LIST_PATH = './data/rsseg/remote_sensing_seg/train.txt'
TRAIN_FILE_LIST_PATH = './data/rsseg/train.txt'
# 验证集`file_list`文件路径
EVAL_FILE_LIST_PATH = './data/rsseg/remote_sensing_seg/val.txt'
EVAL_FILE_LIST_PATH = './data/rsseg/val.txt'
# 数据集类别信息文件路径
LABEL_LIST_PATH = './data/rsseg/remote_sensing_seg/labels.txt'
LABEL_LIST_PATH = './data/rsseg/labels.txt'
# 实验目录,保存输出的模型权重和结果
EXP_DIR = './output/unet/'
@ -23,8 +21,8 @@ EXP_DIR = './output/unet/'
NUM_BANDS = 10
# 下载和解压多光谱地块分类数据集
seg_dataset = 'https://paddleseg.bj.bcebos.com/dataset/remote_sensing_seg.zip'
pdrs.utils.download_and_decompress(seg_dataset, path=DOWNLOAD_DIR)
pdrs.utils.download_and_decompress(
'https://paddlers.bj.bcebos.com/datasets/rsseg.zip', path='./data/')
# 定义训练和验证时使用的数据变换(数据增强、预处理等)
# 使用Compose组合多种变换方式。Compose中包含的变换将按顺序串行执行

Loading…
Cancel
Save