Skip to content

Commit 6723b91

Browse files
committed
fix: remove useless func to avoid import error, update note inside.
1 parent c494507 commit 6723b91

2 files changed

Lines changed: 1 addition & 26 deletions

File tree

scripts/network/models/basic/decoder.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ def __init__(self, pseudoimage_channels: int = 64, num_iters: int = 4):
144144

145145
self.offset_encoder = nn.Linear(3, 64)
146146

147-
# FIXME: not sure which one should be hidden or input?
147+
# NOTE: voxel feature is hidden input, point offset is input, check paper's Fig. 3
148148
self.gru = ConvGRU(input_dim=64, hidden_dim=pseudoimage_channels*2)
149149

150150
self.decoder = nn.Sequential(
@@ -175,7 +175,6 @@ def forward_single(self, before_pseudoimage: torch.Tensor,
175175
# [N, 128] -> [N, 128, 1]
176176
concatenated_vectors = concatenated_vectors.unsqueeze(2)
177177

178-
# FIXME: how many iters?
179178
for itr in range(self.num_iters):
180179
concatenated_vectors = self.gru(concatenated_vectors, point_offsets_feature.unsqueeze(2))
181180

scripts/pl_model.py

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
BASE_DIR = os.path.abspath(os.path.join( os.path.dirname( __file__ ), '..' ))
2626
sys.path.append(BASE_DIR)
2727
from scripts.utils.mics import import_func, weights_init, zip_res
28-
from scripts.utils.optim import WarmupCosLR
2928
from scripts.network.loss_func import evaluate_leaderboard
3029
from scripts.utils.av2_eval import write_output_file
3130
from scripts.network.models.basic import cal_pose0to1
@@ -69,12 +68,6 @@ def __init__(self, cfg, eval=False):
6968
self.av2_mode = None
7069
if cfg.pretrained_weights is not None:
7170
self.model.load_from_checkpoint(cfg.pretrained_weights)
72-
73-
self.turn_lr_scheduler = False
74-
if 'lr_scheduler' in cfg:
75-
self.turn_lr_scheduler = cfg.lr_scheduler
76-
self.min_lr = cfg.min_lr
77-
self.warmup_epochs = max(1, int(self.epochs / 10))
7871

7972
if 'dataset_path' in cfg:
8073
self.dataset_path = cfg.dataset_path
@@ -228,28 +221,11 @@ def validation_step(self, batch, batch_idx):
228221
self.train_validation_step_(batch, res_dict)
229222

230223
def configure_optimizers(self):
231-
# optimizer = optim.Adam(self.model.parameters(), lr=self.lr)
232-
233-
# if self.turn_lr_scheduler:
234-
# scheduler = WarmupCosLR(optimizer = optimizer,
235-
# min_lr = self.min_lr,
236-
# lr = self.lr,
237-
# warmup_epochs = self.warmup_epochs,
238-
# epochs = self.epochs)
239-
# return [optimizer], [scheduler]
240-
241-
# return optimizer
242224
optimizer = optim.Adam(self.model.parameters(), lr=self.lr)
243225
return optimizer
244226

245227
def on_train_epoch_start(self):
246228
self.time_start_train_epoch = time.time()
247-
# if self.current_epoch < self.warmup_epochs * 2:
248-
# if self.current_epoch == 0:
249-
# self.add_seloss.remove('cluster_flow_loss')
250-
# else:
251-
# if 'cluster_flow_loss' not in self.add_seloss:
252-
# self.add_seloss.append('cluster_flow_loss')
253229

254230
def on_train_epoch_end(self):
255231
self.log("pre_epoch_cost (mins)", (time.time()-self.time_start_train_epoch)/60.0, on_step=False, on_epoch=True, sync_dist=True)

0 commit comments

Comments
 (0)