Controls which config parts are saved by Lightning loggers.
Additionally saves number of model parameters
Parameters:
Name |
Type |
Description |
Default |
object_dict
|
dict[str, Any]
|
A dictionary containing the following objects: cfg, model, trainer.
|
required
|
Source code in src/utils/logging_utils.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54 | @rank_zero_only
def log_hyperparameters(object_dict: dict[str, Any]) -> None:
"""Controls which config parts are saved by Lightning loggers.
Additionally saves number of model parameters
Args:
object_dict: A dictionary containing the following objects: cfg, model, trainer.
"""
hparams = {}
cfg = OmegaConf.to_container(object_dict["cfg"])
model = object_dict["model"]
trainer = object_dict["trainer"]
if not trainer.logger:
log.warning("Logger not found! Skipping hyperparameter logging...")
return
hparams["model"] = cfg["model"]
# save number of model parameters
hparams["model/params/total"] = sum(p.numel() for p in model.parameters())
hparams["model/params/trainable"] = sum(p.numel() for p in model.parameters() if p.requires_grad)
hparams["model/params/non_trainable"] = sum(p.numel() for p in model.parameters() if not p.requires_grad)
hparams["data"] = cfg["data"]
hparams["trainer"] = cfg["trainer"]
hparams["callbacks"] = cfg.get("callbacks")
hparams["extras"] = cfg.get("extras")
hparams["task_name"] = cfg.get("task_name")
hparams["tags"] = cfg.get("tags")
hparams["ckpt_path"] = cfg.get("ckpt_path")
hparams["seed"] = cfg.get("seed")
hparams["execution_command"] = f"python {' '.join(sys.argv)}"
# send hparams to all loggers
for logger in trainer.loggers:
logger.log_hyperparams(hparams)
|