parent
0ac304641b
commit
5f353e6c51
20 changed files with 765 additions and 16 deletions
@ -0,0 +1,20 @@ |
||||
class Attach(object): |
||||
def __init__(self, dst): |
||||
self.dst = dst |
||||
|
||||
def __call__(self, obj, name=None): |
||||
if name is None: |
||||
# Automatically get names of functions and classes |
||||
name = obj.__name__ |
||||
if hasattr(self.dst, name): |
||||
raise RuntimeError( |
||||
f"{self.dst} already has the attribute {name}, which is {getattr(self.dst, name)}." |
||||
) |
||||
setattr(self.dst, name, obj) |
||||
if hasattr(self.dst, '__all__'): |
||||
self.dst.__all__.append(name) |
||||
return obj |
||||
|
||||
@staticmethod |
||||
def to(dst): |
||||
return Attach(dst) |
@ -0,0 +1,253 @@ |
||||
#!/usr/bin/env python |
||||
|
||||
import argparse |
||||
import os.path as osp |
||||
from collections.abc import Mapping |
||||
|
||||
import yaml |
||||
|
||||
|
||||
def _chain_maps(*maps): |
||||
chained = dict() |
||||
keys = set().union(*maps) |
||||
for key in keys: |
||||
vals = [m[key] for m in maps if key in m] |
||||
if isinstance(vals[0], Mapping): |
||||
chained[key] = _chain_maps(*vals) |
||||
else: |
||||
chained[key] = vals[0] |
||||
return chained |
||||
|
||||
|
||||
def read_config(config_path): |
||||
with open(config_path, 'r', encoding='utf-8') as f: |
||||
cfg = yaml.safe_load(f) |
||||
return cfg or {} |
||||
|
||||
|
||||
def parse_configs(cfg_path, inherit=True): |
||||
if inherit: |
||||
cfgs = [] |
||||
cfgs.append(read_config(cfg_path)) |
||||
while cfgs[-1].get('_base_'): |
||||
base_path = cfgs[-1].pop('_base_') |
||||
curr_dir = osp.dirname(cfg_path) |
||||
cfgs.append( |
||||
read_config(osp.normpath(osp.join(curr_dir, base_path)))) |
||||
return _chain_maps(*cfgs) |
||||
else: |
||||
return read_config(cfg_path) |
||||
|
||||
|
||||
def _cfg2args(cfg, parser, prefix=''): |
||||
node_keys = set() |
||||
for k, v in cfg.items(): |
||||
opt = prefix + k |
||||
if isinstance(v, list): |
||||
if len(v) == 0: |
||||
parser.add_argument( |
||||
'--' + opt, type=object, nargs='*', default=v) |
||||
else: |
||||
# Only apply to homogeneous lists |
||||
if isinstance(v[0], CfgNode): |
||||
node_keys.add(opt) |
||||
parser.add_argument( |
||||
'--' + opt, type=type(v[0]), nargs='*', default=v) |
||||
elif isinstance(v, dict): |
||||
# Recursively parse a dict |
||||
_, new_node_keys = _cfg2args(v, parser, opt + '.') |
||||
node_keys.update(new_node_keys) |
||||
elif isinstance(v, CfgNode): |
||||
node_keys.add(opt) |
||||
_, new_node_keys = _cfg2args(v.to_dict(), parser, opt + '.') |
||||
node_keys.update(new_node_keys) |
||||
elif isinstance(v, bool): |
||||
parser.add_argument('--' + opt, action='store_true', default=v) |
||||
else: |
||||
parser.add_argument('--' + opt, type=type(v), default=v) |
||||
return parser, node_keys |
||||
|
||||
|
||||
def _args2cfg(cfg, args, node_keys): |
||||
args = vars(args) |
||||
for k, v in args.items(): |
||||
pos = k.find('.') |
||||
if pos != -1: |
||||
# Iteratively parse a dict |
||||
dict_ = cfg |
||||
while pos != -1: |
||||
dict_.setdefault(k[:pos], {}) |
||||
dict_ = dict_[k[:pos]] |
||||
k = k[pos + 1:] |
||||
pos = k.find('.') |
||||
dict_[k] = v |
||||
else: |
||||
cfg[k] = v |
||||
|
||||
for k in node_keys: |
||||
pos = k.find('.') |
||||
if pos != -1: |
||||
# Iteratively parse a dict |
||||
dict_ = cfg |
||||
while pos != -1: |
||||
dict_.setdefault(k[:pos], {}) |
||||
dict_ = dict_[k[:pos]] |
||||
k = k[pos + 1:] |
||||
pos = k.find('.') |
||||
v = dict_[k] |
||||
dict_[k] = [CfgNode(v_) for v_ in v] if isinstance( |
||||
v, list) else CfgNode(v) |
||||
else: |
||||
v = cfg[k] |
||||
cfg[k] = [CfgNode(v_) for v_ in v] if isinstance( |
||||
v, list) else CfgNode(v) |
||||
|
||||
return cfg |
||||
|
||||
|
||||
def parse_args(*args, **kwargs): |
||||
cfg_parser = argparse.ArgumentParser(add_help=False) |
||||
cfg_parser.add_argument('--config', type=str, default='') |
||||
cfg_parser.add_argument('--inherit_off', action='store_true') |
||||
cfg_args = cfg_parser.parse_known_args()[0] |
||||
cfg_path = cfg_args.config |
||||
inherit_on = not cfg_args.inherit_off |
||||
|
||||
# Main parser |
||||
parser = argparse.ArgumentParser( |
||||
conflict_handler='resolve', parents=[cfg_parser]) |
||||
# Global settings |
||||
parser.add_argument('cmd', choices=['train', 'eval']) |
||||
parser.add_argument('task', choices=['cd', 'clas', 'det', 'seg']) |
||||
|
||||
# Data |
||||
parser.add_argument('--datasets', type=dict, default={}) |
||||
parser.add_argument('--transforms', type=dict, default={}) |
||||
parser.add_argument('--download_on', action='store_true') |
||||
parser.add_argument('--download_url', type=str, default='') |
||||
parser.add_argument('--download_path', type=str, default='./') |
||||
|
||||
# Optimizer |
||||
parser.add_argument('--optimizer', type=dict, default={}) |
||||
|
||||
# Training related |
||||
parser.add_argument('--num_epochs', type=int, default=100) |
||||
parser.add_argument('--train_batch_size', type=int, default=8) |
||||
parser.add_argument('--save_interval_epochs', type=int, default=1) |
||||
parser.add_argument('--log_interval_steps', type=int, default=1) |
||||
parser.add_argument('--save_dir', default='../exp/') |
||||
parser.add_argument('--learning_rate', type=float, default=0.01) |
||||
parser.add_argument('--early_stop', action='store_true') |
||||
parser.add_argument('--early_stop_patience', type=int, default=5) |
||||
parser.add_argument('--use_vdl', action='store_true') |
||||
parser.add_argument('--resume_checkpoint', type=str) |
||||
parser.add_argument('--train', type=dict, default={}) |
||||
|
||||
# Loss |
||||
parser.add_argument('--losses', type=dict, nargs='+', default={}) |
||||
|
||||
# Model |
||||
parser.add_argument('--model', type=dict, default={}) |
||||
|
||||
if osp.exists(cfg_path): |
||||
cfg = parse_configs(cfg_path, inherit_on) |
||||
parser, node_keys = _cfg2args(cfg, parser, '') |
||||
node_keys = sorted(node_keys, reverse=True) |
||||
args = parser.parse_args(*args, **kwargs) |
||||
return _args2cfg(dict(), args, node_keys) |
||||
elif cfg_path != '': |
||||
raise FileNotFoundError |
||||
else: |
||||
args = parser.parse_args() |
||||
return _args2cfg(dict(), args, set()) |
||||
|
||||
|
||||
class _CfgNodeMeta(yaml.YAMLObjectMetaclass): |
||||
def __call__(cls, obj): |
||||
if isinstance(obj, CfgNode): |
||||
return obj |
||||
return super(_CfgNodeMeta, cls).__call__(obj) |
||||
|
||||
|
||||
class CfgNode(yaml.YAMLObject, metaclass=_CfgNodeMeta): |
||||
yaml_tag = u'!Node' |
||||
yaml_loader = yaml.SafeLoader |
||||
# By default use a lexical scope |
||||
ctx = globals() |
||||
|
||||
def __init__(self, dict_): |
||||
super().__init__() |
||||
self.type = dict_['type'] |
||||
self.args = dict_.get('args', []) |
||||
self.module = dict_.get('module', '') |
||||
|
||||
@classmethod |
||||
def set_context(cls, ctx): |
||||
# TODO: Implement dynamic scope with inspect.stack() |
||||
old_ctx = cls.ctx |
||||
cls.ctx = ctx |
||||
return old_ctx |
||||
|
||||
def build_object(self, mod=None): |
||||
if mod is None: |
||||
mod = self._get_module(self.module) |
||||
cls = getattr(mod, self.type) |
||||
if isinstance(self.args, list): |
||||
args = build_objects(self.args) |
||||
obj = cls(*args) |
||||
elif isinstance(self.args, dict): |
||||
args = build_objects(self.args) |
||||
obj = cls(**args) |
||||
else: |
||||
raise NotImplementedError |
||||
return obj |
||||
|
||||
def _get_module(self, s): |
||||
mod = None |
||||
while s: |
||||
idx = s.find('.') |
||||
if idx == -1: |
||||
next_ = s |
||||
s = '' |
||||
else: |
||||
next_ = s[:idx] |
||||
s = s[idx + 1:] |
||||
if mod is None: |
||||
mod = self.ctx[next_] |
||||
else: |
||||
mod = getattr(mod, next_) |
||||
return mod |
||||
|
||||
@staticmethod |
||||
def build_objects(cfg, mod=None): |
||||
if isinstance(cfg, list): |
||||
return [CfgNode.build_objects(c, mod=mod) for c in cfg] |
||||
elif isinstance(cfg, CfgNode): |
||||
return cfg.build_object(mod=mod) |
||||
elif isinstance(cfg, dict): |
||||
return { |
||||
k: CfgNode.build_objects( |
||||
v, mod=mod) |
||||
for k, v in cfg.items() |
||||
} |
||||
else: |
||||
return cfg |
||||
|
||||
def __repr__(self): |
||||
return f"(type={self.type}, args={self.args}, module={self.module or ' '})" |
||||
|
||||
@classmethod |
||||
def from_yaml(cls, loader, node): |
||||
map_ = loader.construct_mapping(node) |
||||
return cls(map_) |
||||
|
||||
def items(self): |
||||
yield from [('type', self.type), ('args', self.args), ('module', |
||||
self.module)] |
||||
|
||||
def to_dict(self): |
||||
return dict(self.items()) |
||||
|
||||
|
||||
def build_objects(cfg, mod=None): |
||||
return CfgNode.build_objects(cfg, mod=mod) |
@ -0,0 +1,6 @@ |
||||
_base_: ./levircd.yaml |
||||
|
||||
save_dir: ./exp/bit/ |
||||
|
||||
model: !Node |
||||
type: BIT |
@ -0,0 +1,12 @@ |
||||
_base_: ../levircd.yaml |
||||
|
||||
save_dir: ./exp/custom_model/iter2_gamma01/ |
||||
|
||||
model: !Node |
||||
type: IterativeBIT |
||||
args: |
||||
num_iters: 2 |
||||
gamma: 0.1 |
||||
num_classes: 2 |
||||
bit_kwargs: |
||||
in_channels: 4 |
@ -0,0 +1,12 @@ |
||||
_base_: ../levircd.yaml |
||||
|
||||
save_dir: ./exp/custom_model/iter2_gamma02/ |
||||
|
||||
model: !Node |
||||
type: IterativeBIT |
||||
args: |
||||
num_iters: 2 |
||||
gamma: 0.2 |
||||
num_classes: 2 |
||||
bit_kwargs: |
||||
in_channels: 4 |
@ -0,0 +1,12 @@ |
||||
_base_: ../levircd.yaml |
||||
|
||||
save_dir: ./exp/custom_model/iter2_gamma05/ |
||||
|
||||
model: !Node |
||||
type: IterativeBIT |
||||
args: |
||||
num_iters: 2 |
||||
gamma: 0.5 |
||||
num_classes: 2 |
||||
bit_kwargs: |
||||
in_channels: 4 |
@ -0,0 +1,12 @@ |
||||
_base_: ../levircd.yaml |
||||
|
||||
save_dir: ./exp/custom_model/iter3_gamma01/ |
||||
|
||||
model: !Node |
||||
type: IterativeBIT |
||||
args: |
||||
num_iters: 3 |
||||
gamma: 0.1 |
||||
num_classes: 2 |
||||
bit_kwargs: |
||||
in_channels: 4 |
@ -0,0 +1,12 @@ |
||||
_base_: ../levircd.yaml |
||||
|
||||
save_dir: ./exp/custom_model/iter3_gamma02/ |
||||
|
||||
model: !Node |
||||
type: IterativeBIT |
||||
args: |
||||
num_iters: 3 |
||||
gamma: 0.2 |
||||
num_classes: 2 |
||||
bit_kwargs: |
||||
in_channels: 4 |
@ -0,0 +1,12 @@ |
||||
_base_: ../levircd.yaml |
||||
|
||||
save_dir: ./exp/custom_model/iter3_gamma05/ |
||||
|
||||
model: !Node |
||||
type: IterativeBIT |
||||
args: |
||||
num_iters: 3 |
||||
gamma: 0.5 |
||||
num_classes: 2 |
||||
bit_kwargs: |
||||
in_channels: 4 |
@ -0,0 +1,12 @@ |
||||
_base_: ../levircd.yaml |
||||
|
||||
save_dir: ./exp/custom_model/iter3_gamma10/ |
||||
|
||||
model: !Node |
||||
type: IterativeBIT |
||||
args: |
||||
num_iters: 3 |
||||
gamma: 1.0 |
||||
num_classes: 2 |
||||
bit_kwargs: |
||||
in_channels: 4 |
@ -0,0 +1,74 @@ |
||||
# Basic configurations of LEVIR-CD dataset |
||||
|
||||
datasets: |
||||
train: !Node |
||||
type: CDDataset |
||||
args: |
||||
data_dir: ./data/levircd/ |
||||
file_list: ./data/levircd/train.txt |
||||
label_list: null |
||||
num_workers: 2 |
||||
shuffle: True |
||||
with_seg_labels: False |
||||
binarize_labels: True |
||||
eval: !Node |
||||
type: CDDataset |
||||
args: |
||||
data_dir: ./data/levircd/ |
||||
file_list: ./data/levircd/val.txt |
||||
label_list: null |
||||
num_workers: 0 |
||||
shuffle: False |
||||
with_seg_labels: False |
||||
binarize_labels: True |
||||
transforms: |
||||
train: |
||||
- !Node |
||||
type: DecodeImg |
||||
- !Node |
||||
type: RandomFlipOrRotate |
||||
args: |
||||
probs: [0.35, 0.35] |
||||
probsf: [0.5, 0.5, 0, 0, 0] |
||||
probsr: [0.33, 0.34, 0.33] |
||||
- !Node |
||||
type: Normalize |
||||
args: |
||||
mean: [0.5, 0.5, 0.5] |
||||
std: [0.5, 0.5, 0.5] |
||||
- !Node |
||||
type: ArrangeChangeDetector |
||||
args: ['train'] |
||||
eval: |
||||
- !Node |
||||
type: DecodeImg |
||||
- !Node |
||||
type: Normalize |
||||
args: |
||||
mean: [0.5, 0.5, 0.5] |
||||
std: [0.5, 0.5, 0.5] |
||||
- !Node |
||||
type: ArrangeChangeDetector |
||||
args: ['eval'] |
||||
download_on: False |
||||
|
||||
num_epochs: 40 |
||||
train_batch_size: 8 |
||||
optimizer: !Node |
||||
type: Adam |
||||
args: |
||||
learning_rate: !Node |
||||
type: StepDecay |
||||
module: paddle.optimizer.lr |
||||
args: |
||||
learning_rate: 0.002 |
||||
step_size: 30 |
||||
gamma: 0.2 |
||||
save_interval_epochs: 10 |
||||
log_interval_steps: 500 |
||||
save_dir: ./exp/ |
||||
learning_rate: 0.002 |
||||
early_stop: False |
||||
early_stop_patience: 5 |
||||
use_vdl: True |
||||
resume_checkpoint: '' |
@ -0,0 +1,58 @@ |
||||
import paddle |
||||
import paddle.nn as nn |
||||
import paddle.nn.functional as F |
||||
import paddlers |
||||
from paddlers.rs_models.cd import BIT |
||||
from attach_tools import Attach |
||||
|
||||
attach = Attach.to(paddlers.rs_models.cd) |
||||
|
||||
|
||||
@attach |
||||
class IterativeBIT(nn.Layer): |
||||
def __init__(self, num_iters=1, gamma=0.1, num_classes=2, bit_kwargs=None): |
||||
super().__init__() |
||||
|
||||
if num_iters <= 0: |
||||
raise ValueError( |
||||
f"`num_iters` should have positive value, but got {num_iters}.") |
||||
|
||||
self.num_iters = num_iters |
||||
self.gamma = gamma |
||||
|
||||
if bit_kwargs is None: |
||||
bit_kwargs = dict() |
||||
|
||||
if 'num_classes' in bit_kwargs: |
||||
raise KeyError("'num_classes' should not be set in `bit_kwargs`.") |
||||
bit_kwargs['num_classes'] = num_classes |
||||
|
||||
self.bit = BIT(**bit_kwargs) |
||||
|
||||
def forward(self, t1, t2): |
||||
rate_map = self._init_rate_map(t1.shape) |
||||
|
||||
for it in range(self.num_iters): |
||||
# Construct inputs |
||||
x1 = self._constr_iter_input(t1, rate_map) |
||||
x2 = self._constr_iter_input(t2, rate_map) |
||||
# Get logits |
||||
logits_list = self.bit(x1, x2) |
||||
# Construct rate map |
||||
prob_map = F.softmax(logits_list[0], axis=1) |
||||
rate_map = self._constr_rate_map(prob_map) |
||||
|
||||
return logits_list |
||||
|
||||
def _constr_iter_input(self, im, rate_map): |
||||
return paddle.concat([im.rate_map], axis=1) |
||||
|
||||
def _init_rate_map(self, im_shape): |
||||
b, _, h, w = im_shape |
||||
return paddle.zeros((b, 1, h, w)) |
||||
|
||||
def _constr_rate_map(self, prob_map): |
||||
if prob_map.shape[1] != 2: |
||||
raise ValueError( |
||||
f"`prob_map.shape[1]` must be 2, but got {prob_map.shape[1]}.") |
||||
return (prob_map[:, 1:2] * self.gamma) |
@ -0,0 +1,29 @@ |
||||
import paddlers |
||||
from paddlers.tasks.change_detector import BaseChangeDetector |
||||
|
||||
from attach_tools import Attach |
||||
|
||||
attach = Attach.to(paddlers.tasks.change_detector) |
||||
|
||||
|
||||
@attach |
||||
class IterativeBIT(BaseChangeDetector): |
||||
def __init__(self, |
||||
num_classes=2, |
||||
use_mixed_loss=False, |
||||
losses=None, |
||||
num_iters=1, |
||||
gamma=0.1, |
||||
bit_kwargs=None, |
||||
**params): |
||||
params.update({ |
||||
'num_iters': num_iters, |
||||
'gamma': gamma, |
||||
'bit_kwargs': bit_kwargs |
||||
}) |
||||
super().__init__( |
||||
model_name='IterativeBIT', |
||||
num_classes=num_classes, |
||||
use_mixed_loss=use_mixed_loss, |
||||
losses=losses, |
||||
**params) |
After Width: | Height: | Size: 48 KiB |
@ -0,0 +1,115 @@ |
||||
#!/usr/bin/env python |
||||
|
||||
import os |
||||
|
||||
import paddle |
||||
import paddlers |
||||
from paddlers import transforms as T |
||||
|
||||
import custom_model |
||||
import custom_trainer |
||||
from config_utils import parse_args, build_objects, CfgNode |
||||
|
||||
|
||||
def format_cfg(cfg, indent=0): |
||||
s = '' |
||||
if isinstance(cfg, dict): |
||||
for i, (k, v) in enumerate(sorted(cfg.items())): |
||||
s += ' ' * indent + str(k) + ': ' |
||||
if isinstance(v, (dict, list, CfgNode)): |
||||
s += '\n' + format_cfg(v, indent=indent + 1) |
||||
else: |
||||
s += str(v) |
||||
if i != len(cfg) - 1: |
||||
s += '\n' |
||||
elif isinstance(cfg, list): |
||||
for i, v in enumerate(cfg): |
||||
s += ' ' * indent + '- ' |
||||
if isinstance(v, (dict, list, CfgNode)): |
||||
s += '\n' + format_cfg(v, indent=indent + 1) |
||||
else: |
||||
s += str(v) |
||||
if i != len(cfg) - 1: |
||||
s += '\n' |
||||
elif isinstance(cfg, CfgNode): |
||||
s += ' ' * indent + f"type: {cfg.type}" + '\n' |
||||
s += ' ' * indent + f"module: {cfg.module}" + '\n' |
||||
s += ' ' * indent + 'args: \n' + format_cfg(cfg.args, indent + 1) |
||||
return s |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
CfgNode.set_context(globals()) |
||||
|
||||
cfg = parse_args() |
||||
print(format_cfg(cfg)) |
||||
|
||||
# Automatically download data |
||||
if cfg['download_on']: |
||||
paddlers.utils.download_and_decompress( |
||||
cfg['download_url'], path=cfg['download_path']) |
||||
|
||||
if cfg['cmd'] == 'train': |
||||
if not isinstance(cfg['datasets']['train'].args, dict): |
||||
raise ValueError("args of train dataset must be a dict!") |
||||
if cfg['datasets']['train'].args.get('transforms', None) is not None: |
||||
raise ValueError( |
||||
"Found key 'transforms' in args of train dataset and the value is not None." |
||||
) |
||||
train_transforms = T.Compose( |
||||
build_objects( |
||||
cfg['transforms']['train'], mod=T)) |
||||
# Inplace modification |
||||
cfg['datasets']['train'].args['transforms'] = train_transforms |
||||
train_dataset = build_objects( |
||||
cfg['datasets']['train'], mod=paddlers.datasets) |
||||
if not isinstance(cfg['datasets']['eval'].args, dict): |
||||
raise ValueError("args of eval dataset must be a dict!") |
||||
if cfg['datasets']['eval'].args.get('transforms', None) is not None: |
||||
raise ValueError( |
||||
"Found key 'transforms' in args of eval dataset and the value is not None." |
||||
) |
||||
eval_transforms = T.Compose(build_objects(cfg['transforms']['eval'], mod=T)) |
||||
# Inplace modification |
||||
cfg['datasets']['eval'].args['transforms'] = eval_transforms |
||||
eval_dataset = build_objects(cfg['datasets']['eval'], mod=paddlers.datasets) |
||||
|
||||
model = build_objects( |
||||
cfg['model'], mod=getattr(paddlers.tasks, cfg['task'])) |
||||
|
||||
if cfg['cmd'] == 'train': |
||||
if cfg['optimizer']: |
||||
if len(cfg['optimizer'].args) == 0: |
||||
cfg['optimizer'].args = {} |
||||
if not isinstance(cfg['optimizer'].args, dict): |
||||
raise TypeError("args of optimizer must be a dict!") |
||||
if cfg['optimizer'].args.get('parameters', None) is not None: |
||||
raise ValueError( |
||||
"Found key 'parameters' in args of optimizer and the value is not None." |
||||
) |
||||
cfg['optimizer'].args['parameters'] = model.net.parameters() |
||||
optimizer = build_objects(cfg['optimizer'], mod=paddle.optimizer) |
||||
else: |
||||
optimizer = None |
||||
|
||||
model.train( |
||||
num_epochs=cfg['num_epochs'], |
||||
train_dataset=train_dataset, |
||||
train_batch_size=cfg['train_batch_size'], |
||||
eval_dataset=eval_dataset, |
||||
optimizer=optimizer, |
||||
save_interval_epochs=cfg['save_interval_epochs'], |
||||
log_interval_steps=cfg['log_interval_steps'], |
||||
save_dir=cfg['save_dir'], |
||||
learning_rate=cfg['learning_rate'], |
||||
early_stop=cfg['early_stop'], |
||||
early_stop_patience=cfg['early_stop_patience'], |
||||
use_vdl=cfg['use_vdl'], |
||||
resume_checkpoint=cfg['resume_checkpoint'] or None, |
||||
**cfg['train']) |
||||
elif cfg['cmd'] == 'eval': |
||||
state_dict = paddle.load( |
||||
os.path.join(cfg['resume_checkpoint'], 'model.pdparams')) |
||||
model.net.set_state_dict(state_dict) |
||||
res = model.evaluate(eval_dataset) |
||||
print(res) |
Loading…
Reference in new issue