Update style

own
Bobholamovic 3 years ago
parent 9a15ed9043
commit 670519bf26
  1. 12
      paddlers/tasks/change_detector.py
  2. 2
      paddlers/tasks/classifier.py
  3. 2
      paddlers/tasks/object_detector.py
  4. 10
      paddlers/tasks/segmenter.py
  5. 8
      paddlers/transforms/batch_operators.py
  6. 34
      paddlers/transforms/operators.py
  7. 23
      tests/testing_utils.py

@ -501,7 +501,7 @@ class BaseChangeDetector(BaseModel):
Do inference. Do inference.
Args: Args:
Args: Args:
img_file(List[tuple], Tuple[str or np.ndarray]): img_file (list[tuple] | tuple[str | np.ndarray]):
Tuple of image paths or decoded image data for bi-temporal images, which also could constitute a list, Tuple of image paths or decoded image data for bi-temporal images, which also could constitute a list,
meaning all image pairs to be predicted as a mini-batch. meaning all image pairs to be predicted as a mini-batch.
transforms(paddlers.transforms.Compose or None, optional): transforms(paddlers.transforms.Compose or None, optional):
@ -556,14 +556,14 @@ class BaseChangeDetector(BaseModel):
Do inference. Do inference.
Args: Args:
Args: Args:
img_file(List[str]): img_file(list[str]):
List of image paths. List of image paths.
save_dir(str): save_dir(str):
Directory that contains saved geotiff file. Directory that contains saved geotiff file.
block_size(List[int] or Tuple[int], int): block_size(list[int] | tuple[int] | int, optional):
The size of block. Size of block.
overlap(List[int] or Tuple[int], int): overlap(list[int] | tuple[int] | int, optional):
The overlap between two blocks. Defaults to 36. Overlap between two blocks. Defaults to 36.
transforms(paddlers.transforms.Compose or None, optional): transforms(paddlers.transforms.Compose or None, optional):
Transforms for inputs. If None, the transforms for evaluation process will be used. Defaults to None. Transforms for inputs. If None, the transforms for evaluation process will be used. Defaults to None.
""" """

@ -410,7 +410,7 @@ class BaseClassifier(BaseModel):
Do inference. Do inference.
Args: Args:
Args: Args:
img_file(List[np.ndarray or str], str or np.ndarray): img_file(list[np.ndarray | str] | str | np.ndarray):
Image path or decoded image data, which also could constitute a list, meaning all images to be Image path or decoded image data, which also could constitute a list, meaning all images to be
predicted as a mini-batch. predicted as a mini-batch.
transforms(paddlers.transforms.Compose or None, optional): transforms(paddlers.transforms.Compose or None, optional):

@ -511,7 +511,7 @@ class BaseDetector(BaseModel):
""" """
Do inference. Do inference.
Args: Args:
img_file(List[np.ndarray or str], str or np.ndarray): img_file(list[np.ndarray | str] | str | np.ndarray):
Image path or decoded image data, which also could constitute a list,meaning all images to be Image path or decoded image data, which also could constitute a list,meaning all images to be
predicted as a mini-batch. predicted as a mini-batch.
transforms(paddlers.transforms.Compose or None, optional): transforms(paddlers.transforms.Compose or None, optional):

@ -478,7 +478,7 @@ class BaseSegmenter(BaseModel):
Do inference. Do inference.
Args: Args:
Args: Args:
img_file(List[np.ndarray or str], str or np.ndarray): img_file(list[np.ndarray | str] | str | np.ndarray):
Image path or decoded image data, which also could constitute a list,meaning all images to be Image path or decoded image data, which also could constitute a list,meaning all images to be
predicted as a mini-batch. predicted as a mini-batch.
transforms(paddlers.transforms.Compose or None, optional): transforms(paddlers.transforms.Compose or None, optional):
@ -533,10 +533,10 @@ class BaseSegmenter(BaseModel):
Image path. Image path.
save_dir(str): save_dir(str):
Directory that contains saved geotiff file. Directory that contains saved geotiff file.
block_size(List[int] or Tuple[int], int): block_size(list[int] | tuple[int] | int):
The size of block. Size of block.
overlap(List[int] or Tuple[int], int): overlap(list[int] | tuple[int] | int, optional):
The overlap between two blocks. Defaults to 36. Overlap between two blocks. Defaults to 36.
transforms(paddlers.transforms.Compose or None, optional): transforms(paddlers.transforms.Compose or None, optional):
Transforms for inputs. If None, the transforms for evaluation process will be used. Defaults to None. Transforms for inputs. If None, the transforms for evaluation process will be used. Defaults to None.
""" """

@ -74,7 +74,7 @@ class BatchRandomResize(Transform):
Attention: If interp is 'RANDOM', the interpolation method will be chose randomly. Attention: If interp is 'RANDOM', the interpolation method will be chose randomly.
Args: Args:
target_sizes (List[int], List[list or tuple] or Tuple[list or tuple]): target_sizes (list[int] | list[list | tuple] | tuple[list | tuple]):
Multiple target sizes, each target size is an int or list/tuple of length 2. Multiple target sizes, each target size is an int or list/tuple of length 2.
interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional): interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional):
Interpolation method of resize. Defaults to 'LINEAR'. Interpolation method of resize. Defaults to 'LINEAR'.
@ -93,7 +93,7 @@ class BatchRandomResize(Transform):
interp_dict.keys())) interp_dict.keys()))
self.interp = interp self.interp = interp
assert isinstance(target_sizes, list), \ assert isinstance(target_sizes, list), \
"target_size must be List" "target_size must be a list."
for i, item in enumerate(target_sizes): for i, item in enumerate(target_sizes):
if isinstance(item, int): if isinstance(item, int):
target_sizes[i] = (item, item) target_sizes[i] = (item, item)
@ -113,7 +113,7 @@ class BatchRandomResizeByShort(Transform):
Attention: If interp is 'RANDOM', the interpolation method will be chose randomly. Attention: If interp is 'RANDOM', the interpolation method will be chose randomly.
Args: Args:
short_sizes (List[int], Tuple[int]): Target sizes of the shorter side of the image(s). short_sizes (list[int] | tuple[int]): Target sizes of the shorter side of the image(s).
max_size (int, optional): The upper bound of longer side of the image(s). max_size (int, optional): The upper bound of longer side of the image(s).
If max_size is -1, no upper bound is applied. Defaults to -1. If max_size is -1, no upper bound is applied. Defaults to -1.
interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional): interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional):
@ -134,7 +134,7 @@ class BatchRandomResizeByShort(Transform):
interp_dict.keys())) interp_dict.keys()))
self.interp = interp self.interp = interp
assert isinstance(short_sizes, list), \ assert isinstance(short_sizes, list), \
"short_sizes must be List" "short_sizes must be a list."
self.short_sizes = short_sizes self.short_sizes = short_sizes
self.max_size = max_size self.max_size = max_size

@ -250,7 +250,7 @@ class Compose(Transform):
All input images are in Height-Width-Channel ([H, W, C]) format. All input images are in Height-Width-Channel ([H, W, C]) format.
Args: Args:
transforms (List[paddlers.transforms.Transform]): List of data preprocess or augmentations. transforms (list[paddlers.transforms.Transform]): List of data preprocess or augmentations.
Raises: Raises:
TypeError: Invalid type of transforms. TypeError: Invalid type of transforms.
ValueError: Invalid length of transforms. ValueError: Invalid length of transforms.
@ -260,7 +260,7 @@ class Compose(Transform):
super(Compose, self).__init__() super(Compose, self).__init__()
if not isinstance(transforms, list): if not isinstance(transforms, list):
raise TypeError( raise TypeError(
'Type of transforms is invalid. Must be List, but received is {}' 'Type of transforms is invalid. Must be a list, but received is {}'
.format(type(transforms))) .format(type(transforms)))
if len(transforms) < 1: if len(transforms) < 1:
raise ValueError( raise ValueError(
@ -308,7 +308,7 @@ class Resize(Transform):
Attention: If interp is 'RANDOM', the interpolation method will be chose randomly. Attention: If interp is 'RANDOM', the interpolation method will be chose randomly.
Args: Args:
target_size (int, List[int] or Tuple[int]): Target size. If int, the height and width share the same target_size. target_size (int, list[int] | tuple[int]): Target size. If int, the height and width share the same target_size.
Otherwise, target_size represents [target height, target width]. Otherwise, target_size represents [target height, target width].
interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional): interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional):
Interpolation method of resize. Defaults to 'LINEAR'. Interpolation method of resize. Defaults to 'LINEAR'.
@ -427,7 +427,7 @@ class RandomResize(Transform):
Attention: If interp is 'RANDOM', the interpolation method will be chose randomly. Attention: If interp is 'RANDOM', the interpolation method will be chose randomly.
Args: Args:
target_sizes (List[int], List[list or tuple] or Tuple[list or tuple]): target_sizes (list[int] | list[list | tuple] | tuple[list | tuple]):
Multiple target sizes, each target size is an int or list/tuple. Multiple target sizes, each target size is an int or list/tuple.
interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional): interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional):
Interpolation method of resize. Defaults to 'LINEAR'. Interpolation method of resize. Defaults to 'LINEAR'.
@ -447,7 +447,7 @@ class RandomResize(Transform):
interp_dict.keys())) interp_dict.keys()))
self.interp = interp self.interp = interp
assert isinstance(target_sizes, list), \ assert isinstance(target_sizes, list), \
"target_size must be List" "target_size must be a list."
for i, item in enumerate(target_sizes): for i, item in enumerate(target_sizes):
if isinstance(item, int): if isinstance(item, int):
target_sizes[i] = (item, item) target_sizes[i] = (item, item)
@ -507,7 +507,7 @@ class RandomResizeByShort(Transform):
Attention: If interp is 'RANDOM', the interpolation method will be chose randomly. Attention: If interp is 'RANDOM', the interpolation method will be chose randomly.
Args: Args:
short_sizes (List[int]): Target size of the shorter side of the image(s). short_sizes (list[int]): Target size of the shorter side of the image(s).
max_size (int, optional): The upper bound of longer side of the image(s). If max_size is -1, no upper bound is applied. Defaults to -1. max_size (int, optional): The upper bound of longer side of the image(s). If max_size is -1, no upper bound is applied. Defaults to -1.
interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional): Interpolation method of resize. Defaults to 'LINEAR'. interp ({'NEAREST', 'LINEAR', 'CUBIC', 'AREA', 'LANCZOS4', 'RANDOM'}, optional): Interpolation method of resize. Defaults to 'LINEAR'.
@ -526,7 +526,7 @@ class RandomResizeByShort(Transform):
interp_dict.keys())) interp_dict.keys()))
self.interp = interp self.interp = interp
assert isinstance(short_sizes, list), \ assert isinstance(short_sizes, list), \
"short_sizes must be List" "short_sizes must be a list."
self.short_sizes = short_sizes self.short_sizes = short_sizes
self.max_size = max_size self.max_size = max_size
@ -818,16 +818,16 @@ class RandomVerticalFlip(Transform):
class Normalize(Transform): class Normalize(Transform):
""" """
Apply min-max normalization to the image(s) in input. Apply normalization to the input image(s). The normalization steps are:
1. im = (im - min_value) * 1 / (max_value - min_value) 1. im = (im - min_value) * 1 / (max_value - min_value)
2. im = im - mean 2. im = im - mean
3. im = im / std 3. im = im / std
Args: Args:
mean(List[float] or Tuple[float], optional): Mean of input image(s). Defaults to [0.485, 0.456, 0.406]. mean(list[float] | tuple[float], optional): Mean of input image(s). Defaults to [0.485, 0.456, 0.406].
std(List[float] or Tuple[float], optional): Standard deviation of input image(s). Defaults to [0.229, 0.224, 0.225]. std(list[float] | tuple[float], optional): Standard deviation of input image(s). Defaults to [0.229, 0.224, 0.225].
min_val(List[float] or Tuple[float], optional): Minimum value of input image(s). Defaults to [0, 0, 0, ]. min_val(list[float] | tuple[float], optional): Minimum value of input image(s). Defaults to [0, 0, 0, ].
max_val(List[float] or Tuple[float], optional): Max value of input image(s). Defaults to [255., 255., 255.]. max_val(list[float] | tuple[float], optional): Max value of input image(s). Defaults to [255., 255., 255.].
""" """
def __init__(self, def __init__(self,
@ -917,12 +917,12 @@ class RandomCrop(Transform):
4. Resize the cropped area to crop_size by crop_size. 4. Resize the cropped area to crop_size by crop_size.
Args: Args:
crop_size(int, List[int] or Tuple[int]): Target size of the cropped area. If None, the cropped area will not be crop_size(int, list[int] | tuple[int]): Target size of the cropped area. If None, the cropped area will not be
resized. Defaults to None. resized. Defaults to None.
aspect_ratio (List[float], optional): Aspect ratio of cropped region in [min, max] format. Defaults to [.5, 2.]. aspect_ratio (list[float], optional): Aspect ratio of cropped region in [min, max] format. Defaults to [.5, 2.].
thresholds (List[float], optional): Iou thresholds to decide a valid bbox crop. thresholds (list[float], optional): Iou thresholds to decide a valid bbox crop.
Defaults to [.0, .1, .3, .5, .7, .9]. Defaults to [.0, .1, .3, .5, .7, .9].
scaling (List[float], optional): Ratio between the cropped region and the original image in [min, max] format. scaling (list[float], optional): Ratio between the cropped region and the original image in [min, max] format.
Defaults to [.3, 1.]. Defaults to [.3, 1.].
num_attempts (int, optional): The number of tries before giving up. Defaults to 50. num_attempts (int, optional): The number of tries before giving up. Defaults to 50.
allow_no_crop (bool, optional): Whether returning without doing crop is allowed. Defaults to True. allow_no_crop (bool, optional): Whether returning without doing crop is allowed. Defaults to True.
@ -1140,7 +1140,7 @@ class RandomExpand(Transform):
Args: Args:
upper_ratio(float, optional): The maximum ratio to which the original image is expanded. Defaults to 4.. upper_ratio(float, optional): The maximum ratio to which the original image is expanded. Defaults to 4..
prob(float, optional): The probability of apply expanding. Defaults to .5. prob(float, optional): The probability of apply expanding. Defaults to .5.
im_padding_value(List[float] or Tuple[float], optional): RGB filling value for the image. Defaults to (127.5, 127.5, 127.5). im_padding_value(list[float] | tuple[float], optional): RGB filling value for the image. Defaults to (127.5, 127.5, 127.5).
label_padding_value(int, optional): Filling value for the mask. Defaults to 255. label_padding_value(int, optional): Filling value for the mask. Defaults to 255.
See Also: See Also:

@ -58,9 +58,10 @@ class _CommonTestNamespace:
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
''' """
Set the decorators for all test function Set the decorators for all test function
''' """
for key, value in cls.__dict__.items(): for key, value in cls.__dict__.items():
if key.startswith('test'): if key.startswith('test'):
decorator_func_list = ["_test_places"] decorator_func_list = ["_test_places"]
@ -72,9 +73,9 @@ class _CommonTestNamespace:
setattr(cls, key, value) setattr(cls, key, value)
def _catch_warnings(func): def _catch_warnings(func):
''' """
Catch the warnings and treat them as errors for each test. Catch the warnings and treat them as errors for each test.
''' """
def wrapper(self, *args, **kwargs): def wrapper(self, *args, **kwargs):
with warnings.catch_warnings(record=True) as w: with warnings.catch_warnings(record=True) as w:
@ -90,9 +91,9 @@ class _CommonTestNamespace:
return wrapper return wrapper
def _test_places(func): def _test_places(func):
''' """
Setting the running place for each test. Setting the running place for each test.
''' """
def wrapper(self, *args, **kwargs): def wrapper(self, *args, **kwargs):
places = self.places places = self.places
@ -150,7 +151,7 @@ class _CommonTestNamespace:
expected_result, expected_result,
rtol=1.e-5, rtol=1.e-5,
atol=1.e-8): atol=1.e-8):
''' """
Check whether result and expected result are equal, including shape. Check whether result and expected result are equal, including shape.
Args: Args:
@ -162,7 +163,8 @@ class _CommonTestNamespace:
relative tolerance, default 1.e-5. relative tolerance, default 1.e-5.
atol: float atol: float
absolute tolerance, default 1.e-8 absolute tolerance, default 1.e-8
''' """
self._check_output_impl(result, expected_result, rtol, atol) self._check_output_impl(result, expected_result, rtol, atol)
def check_output_not_equal(self, def check_output_not_equal(self,
@ -170,7 +172,7 @@ class _CommonTestNamespace:
expected_result, expected_result,
rtol=1.e-5, rtol=1.e-5,
atol=1.e-8): atol=1.e-8):
''' """
Check whether result and expected result are not equal, including shape. Check whether result and expected result are not equal, including shape.
Args: Args:
@ -182,7 +184,8 @@ class _CommonTestNamespace:
relative tolerance, default 1.e-5. relative tolerance, default 1.e-5.
atol: float atol: float
absolute tolerance, default 1.e-8 absolute tolerance, default 1.e-8
''' """
self._check_output_impl( self._check_output_impl(
result, expected_result, rtol, atol, equal=False) result, expected_result, rtol, atol, equal=False)

Loading…
Cancel
Save