Skip to content

Commit

Permalink
✅Enhanced coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
carefree0910 committed Oct 16, 2024
1 parent c803d6b commit 7a2f83e
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 6 deletions.
3 changes: 1 addition & 2 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
[report]
exclude_lines =
pragma: no cover
exclude_also =
if TYPE_CHECKING:
2 changes: 1 addition & 1 deletion core/learn/callbacks/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def before_loop_with_loaders(
return None
if valid_loader is not None:
loader = valid_loader
else:
else: # pragma: no cover
if (
console.ask(
"no validation loader found, do you want to calculate resumed-metrics from the training loader?",
Expand Down
4 changes: 2 additions & 2 deletions core/learn/pipeline/blocks/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -978,7 +978,7 @@ def save_extra(self, folder: TPath) -> None:
accelerator = self.build_trainer.trainer.accelerator
if accelerator is not None:
scaler = accelerator.scaler
if scaler is not None:
if scaler is not None: # pragma: no cover
torch.save(scaler.state_dict(), os.path.join(folder, self.scaler_file))
state = self.build_trainer.trainer.state
if state is not None:
Expand All @@ -1004,7 +1004,7 @@ def load_from(self, folder: TPath) -> None:
if k_sch is not None:
k_sch.load_state_dict(states)
accelerator = self.build_trainer.trainer.accelerator
if accelerator is not None:
if accelerator is not None: # pragma: no cover
scaler_path = folder / self.scaler_file
if scaler_path.is_file():
scaler = accelerator.scaler
Expand Down
6 changes: 6 additions & 0 deletions tests/test_learn/test_losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@ def test_corr_loss(self) -> None:
torch.testing.assert_close(corr, gt_corr)

def test_multi_loss(self) -> None:
@cflearn.register_loss("foo")
class _(cflearn.ILoss):
def forward(self, forward_results, batch, state=None) -> None:
return None

x = torch.randn(13, 1)
y = torch.randn(13, 1)
mse = get_loss("mae", x, y)
Expand All @@ -44,6 +49,7 @@ def test_multi_loss(self) -> None:
losses=[
{"name": "mse", "weight": 0.17},
{"name": "corr", "weight": 0.19},
{"name": "foo", "weight": 123.456},
],
)[cflearn.LOSS_KEY]
multi = get_loss(
Expand Down
5 changes: 4 additions & 1 deletion tests/test_toolkit/test_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -820,9 +820,12 @@ def test_format_float(self):

def test_track(self):
for _ in track(range(2)):
time.sleep(1.01)
time.sleep(1.1)
for _ in track(range(3), leave=False):
pass
for _ in track(range(3), leave=False):
for _ in track(range(3), leave=False):
pass
nums = []
for _ in track(range(3), update_callback=lambda *_: nums.append(1)):
pass
Expand Down

0 comments on commit 7a2f83e

Please sign in to comment.