_preprocess and _postprocess as public methods

own
Bobholamovic 2 years ago
parent c3acca5a9a
commit cdc9d93884
  1. 12
      paddlers/deploy/predictor.py
  2. 12
      paddlers/tasks/change_detector.py
  3. 18
      paddlers/tasks/classifier.py
  4. 8
      paddlers/tasks/object_detector.py
  5. 12
      paddlers/tasks/segmenter.py
  6. 12
      test_tipc/infer.py

@ -146,7 +146,7 @@ class Predictor(object):
return predictor
def preprocess(self, images, transforms):
preprocessed_samples = self._model._preprocess(
preprocessed_samples = self._model.preprocess(
images, transforms, to_tensor=False)
if self._model.model_type == 'classifier':
preprocessed_samples = {'image': preprocessed_samples[0]}
@ -172,12 +172,12 @@ class Predictor(object):
def postprocess(self, net_outputs, topk=1, ori_shape=None, transforms=None):
if self._model.model_type == 'classifier':
true_topk = min(self._model.num_classes, topk)
if self._model._postprocess is None:
if self._model.postprocess is None:
self._model.build_postprocess_from_labels(topk)
# XXX: Convert ndarray to tensor as self._model._postprocess requires
# XXX: Convert ndarray to tensor as self._model.postprocess requires
assert len(net_outputs) == 1
net_outputs = paddle.to_tensor(net_outputs[0])
outputs = self._model._postprocess(net_outputs)
outputs = self._model.postprocess(net_outputs)
class_ids = map(itemgetter('class_ids'), outputs)
scores = map(itemgetter('scores'), outputs)
label_names = map(itemgetter('label_names'), outputs)
@ -187,7 +187,7 @@ class Predictor(object):
'label_names_map': n,
} for l, s, n in zip(class_ids, scores, label_names)]
elif self._model.model_type in ('segmenter', 'change_detector'):
label_map, score_map = self._model._postprocess(
label_map, score_map = self._model.postprocess(
net_outputs,
batch_origin_shape=ori_shape,
transforms=transforms.transforms)
@ -200,7 +200,7 @@ class Predictor(object):
k: v
for k, v in zip(['bbox', 'bbox_num', 'mask'], net_outputs)
}
preds = self._model._postprocess(net_outputs)
preds = self._model.postprocess(net_outputs)
else:
logging.error(
"Invalid model type {}.".format(self._model.model_type),

@ -111,10 +111,10 @@ class BaseChangeDetector(BaseModel):
if mode == 'test':
origin_shape = inputs[2]
if self.status == 'Infer':
label_map_list, score_map_list = self._postprocess(
label_map_list, score_map_list = self.postprocess(
net_out, origin_shape, transforms=inputs[3])
else:
logit_list = self._postprocess(
logit_list = self.postprocess(
logit, origin_shape, transforms=inputs[3])
label_map_list = []
score_map_list = []
@ -142,7 +142,7 @@ class BaseChangeDetector(BaseModel):
raise ValueError("Expected label.ndim == 4 but got {}".format(
label.ndim))
origin_shape = [label.shape[-2:]]
pred = self._postprocess(
pred = self.postprocess(
pred, origin_shape, transforms=inputs[3])[0] # NCHW
intersect_area, pred_area, label_area = paddleseg.utils.metrics.calculate_area(
pred, label, self.num_classes)
@ -553,7 +553,7 @@ class BaseChangeDetector(BaseModel):
images = [img_file]
else:
images = img_file
batch_im1, batch_im2, batch_origin_shape = self._preprocess(
batch_im1, batch_im2, batch_origin_shape = self.preprocess(
images, transforms, self.model_type)
self.net.eval()
data = (batch_im1, batch_im2, batch_origin_shape, transforms.transforms)
@ -664,7 +664,7 @@ class BaseChangeDetector(BaseModel):
dst_data = None
print("GeoTiff saved in {}.".format(save_file))
def _preprocess(self, images, transforms, to_tensor=True):
def preprocess(self, images, transforms, to_tensor=True):
self._check_transforms(transforms, 'test')
batch_im1, batch_im2 = list(), list()
batch_ori_shape = list()
@ -736,7 +736,7 @@ class BaseChangeDetector(BaseModel):
batch_restore_list.append(restore_list)
return batch_restore_list
def _postprocess(self, batch_pred, batch_origin_shape, transforms):
def postprocess(self, batch_pred, batch_origin_shape, transforms):
batch_restore_list = BaseChangeDetector.get_transforms_shape_info(
batch_origin_shape, transforms)
if isinstance(batch_pred, (tuple, list)) and self.status == 'Infer':

@ -61,7 +61,7 @@ class BaseClassifier(BaseModel):
self.metrics = None
self.losses = None
self.labels = None
self._postprocess = None
self.postprocess = None
if params.get('with_net', True):
params.pop('with_net', None)
self.net = self.build_net(**params)
@ -121,13 +121,12 @@ class BaseClassifier(BaseModel):
net_out = net(inputs[0])
if mode == 'test':
return self._postprocess(net_out)
return self.postprocess(net_out)
outputs = OrderedDict()
label = paddle.to_tensor(inputs[1], dtype="int64")
if mode == 'eval':
# print(self._postprocess(net_out)[0]) # for test
label = paddle.unsqueeze(label, axis=-1)
metric_dict = self.metrics(net_out, label)
outputs['top1'] = metric_dict["top1"]
@ -176,13 +175,13 @@ class BaseClassifier(BaseModel):
label_dict = dict()
for i, label in enumerate(self.labels):
label_dict[i] = label
self._postprocess = build_postprocess({
self.postprocess = build_postprocess({
"name": "Topk",
"topk": topk,
"class_id_map_file": None
})
# Add class_id_map from model.yml
self._postprocess.class_id_map = label_dict
self.postprocess.class_id_map = label_dict
def train(self,
num_epochs,
@ -247,8 +246,7 @@ class BaseClassifier(BaseModel):
if self.losses is None:
self.losses = self.default_loss()
self.metrics = self.default_metric()
self._postprocess = self.default_postprocess(train_dataset.label_list)
# print(self._postprocess.class_id_map)
self.postprocess = self.default_postprocess(train_dataset.label_list)
if optimizer is None:
num_steps_each_epoch = train_dataset.num_samples // train_batch_size
@ -454,12 +452,12 @@ class BaseClassifier(BaseModel):
images = [img_file]
else:
images = img_file
batch_im, batch_origin_shape = self._preprocess(images, transforms,
batch_im, batch_origin_shape = self.preprocess(images, transforms,
self.model_type)
self.net.eval()
data = (batch_im, batch_origin_shape, transforms.transforms)
if self._postprocess is None:
if self.postprocess is None:
self.build_postprocess_from_labels()
outputs = self.run(self.net, data, 'test')
@ -480,7 +478,7 @@ class BaseClassifier(BaseModel):
}
return prediction
def _preprocess(self, images, transforms, to_tensor=True):
def preprocess(self, images, transforms, to_tensor=True):
self._check_transforms(transforms, 'test')
batch_im = list()
batch_ori_shape = list()

@ -580,16 +580,16 @@ class BaseDetector(BaseModel):
else:
images = img_file
batch_samples = self._preprocess(images, transforms)
batch_samples = self.preprocess(images, transforms)
self.net.eval()
outputs = self.run(self.net, batch_samples, 'test')
prediction = self._postprocess(outputs)
prediction = self.postprocess(outputs)
if isinstance(img_file, (str, np.ndarray)):
prediction = prediction[0]
return prediction
def _preprocess(self, images, transforms, to_tensor=True):
def preprocess(self, images, transforms, to_tensor=True):
self._check_transforms(transforms, 'test')
batch_samples = list()
for im in images:
@ -606,7 +606,7 @@ class BaseDetector(BaseModel):
return batch_samples
def _postprocess(self, batch_pred):
def postprocess(self, batch_pred):
infer_result = {}
if 'bbox' in batch_pred:
bboxes = batch_pred['bbox']

@ -110,10 +110,10 @@ class BaseSegmenter(BaseModel):
if mode == 'test':
origin_shape = inputs[1]
if self.status == 'Infer':
label_map_list, score_map_list = self._postprocess(
label_map_list, score_map_list = self.postprocess(
net_out, origin_shape, transforms=inputs[2])
else:
logit_list = self._postprocess(
logit_list = self.postprocess(
logit, origin_shape, transforms=inputs[2])
label_map_list = []
score_map_list = []
@ -141,7 +141,7 @@ class BaseSegmenter(BaseModel):
raise ValueError("Expected label.ndim == 4 but got {}".format(
label.ndim))
origin_shape = [label.shape[-2:]]
pred = self._postprocess(
pred = self.postprocess(
pred, origin_shape, transforms=inputs[2])[0] # NCHW
intersect_area, pred_area, label_area = paddleseg.utils.metrics.calculate_area(
pred, label, self.num_classes)
@ -526,7 +526,7 @@ class BaseSegmenter(BaseModel):
images = [img_file]
else:
images = img_file
batch_im, batch_origin_shape = self._preprocess(images, transforms,
batch_im, batch_origin_shape = self.preprocess(images, transforms,
self.model_type)
self.net.eval()
data = (batch_im, batch_origin_shape, transforms.transforms)
@ -631,7 +631,7 @@ class BaseSegmenter(BaseModel):
dst_data = None
print("GeoTiff saved in {}.".format(save_file))
def _preprocess(self, images, transforms, to_tensor=True):
def preprocess(self, images, transforms, to_tensor=True):
self._check_transforms(transforms, 'test')
batch_im = list()
batch_ori_shape = list()
@ -698,7 +698,7 @@ class BaseSegmenter(BaseModel):
batch_restore_list.append(restore_list)
return batch_restore_list
def _postprocess(self, batch_pred, batch_origin_shape, transforms):
def postprocess(self, batch_pred, batch_origin_shape, transforms):
batch_restore_list = BaseSegmenter.get_transforms_shape_info(
batch_origin_shape, transforms)
if isinstance(batch_pred, (tuple, list)) and self.status == 'Infer':

@ -141,7 +141,7 @@ class TIPCPredictor(object):
return config
def preprocess(self, images, transforms):
preprocessed_samples = self._model._preprocess(
preprocessed_samples = self._model.preprocess(
images, transforms, to_tensor=False)
if self._model.model_type == 'classifier':
preprocessed_samples = {'image': preprocessed_samples[0]}
@ -167,12 +167,12 @@ class TIPCPredictor(object):
def postprocess(self, net_outputs, topk=1, ori_shape=None, transforms=None):
if self._model.model_type == 'classifier':
true_topk = min(self._model.num_classes, topk)
if self._model._postprocess is None:
if self._model.postprocess is None:
self._model.build_postprocess_from_labels(topk)
# XXX: Convert ndarray to tensor as self._model._postprocess requires
# XXX: Convert ndarray to tensor as self._model.postprocess requires
assert len(net_outputs) == 1
net_outputs = paddle.to_tensor(net_outputs[0])
outputs = self._model._postprocess(net_outputs)
outputs = self._model.postprocess(net_outputs)
class_ids = map(itemgetter('class_ids'), outputs)
scores = map(itemgetter('scores'), outputs)
label_names = map(itemgetter('label_names'), outputs)
@ -182,7 +182,7 @@ class TIPCPredictor(object):
'label_names_map': n,
} for l, s, n in zip(class_ids, scores, label_names)]
elif self._model.model_type in ('segmenter', 'change_detector'):
label_map, score_map = self._model._postprocess(
label_map, score_map = self._model.postprocess(
net_outputs,
batch_origin_shape=ori_shape,
transforms=transforms.transforms)
@ -195,7 +195,7 @@ class TIPCPredictor(object):
k: v
for k, v in zip(['bbox', 'bbox_num', 'mask'], net_outputs)
}
preds = self._model._postprocess(net_outputs)
preds = self._model.postprocess(net_outputs)
else:
logging.error(
"Invalid model type {}.".format(self._model.model_type),

Loading…
Cancel
Save