def prepare_object_to_dump(obj) -> str:
if isinstance(obj, str):
return f"'{obj}'" # Double quotes
elif isinstance(obj, dict):
dict_str = ", ".join(
{f"{str(k)}: {prepare_object_to_dump(v)}" for k, v in obj.items()}
)
return f"{{{dict_str}}}"
elif isinstance(obj, list):
return f"[{', '.join([prepare_object_to_dump(v) for v in obj])}]"
elif isinstance(obj, set):
return f"[{', '.join([prepare_object_to_dump(v) for v in list(obj)])}]"
# return [prepare_object_to_dump(v) for v in list(obj)]
elif isinstance(obj, tuple):
return f"[{', '.join([prepare_object_to_dump(v) for v in obj])}]"
elif isinstance(obj, enum.Enum):
return repr(obj)
elif isinstance(obj, torch.Tensor):
# We only print the 'draft' of the tensor to not expose sensitive data
# and to get some metadata in case of CUDA runtime crashed
return f"Tensor(shape={obj.shape}, device={obj.device},dtype={obj.dtype})"
elif hasattr(obj, "anon_repr"):
return obj.anon_repr()
elif hasattr(obj, "__dict__"):
items = obj.__dict__.items()
dict_str = ", ".join(
[f"{str(k)}={prepare_object_to_dump(v)}" for k, v in items]
)
return f"{type(obj).__name__}({dict_str})"
else:
# Hacky way to make sure we can serialize the object in JSON format
try:
return json.dumps(obj)
except (TypeError, OverflowError):
return repr(obj)