{ "activation": "silu", "architectures": [ "UNet1DModel" ], "autoencoder": true, "channels": [ 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384 ], "dropout": 0.1, "kernel_size": 3, "max_length": 48000, "model_type": "unet1d", "norm_type": "layer", "num_groups": null, "sample_rate": 48000, "torch_dtype": "bfloat16", "transformers_version": "4.46.3" }