Taejin's picture
Adding safetensor for HF transformer support
1dd84ea
{
"architectures": [
"SortformerOffline"
],
"ats_weight": 0.5,
"dtype": "float32",
"fc_encoder_config": {
"activation_dropout": 0.1,
"attention_bias": true,
"attention_dropout": 0.1,
"conv_kernel_size": 9,
"dropout": 0.1,
"dropout_positions": 0.0,
"hidden_act": "silu",
"hidden_size": 512,
"initializer_range": 0.02,
"intermediate_size": 2048,
"layerdrop": 0.1,
"max_position_embeddings": 5000,
"model_type": "sortformer_fc_encoder",
"num_attention_heads": 8,
"num_hidden_layers": 18,
"num_key_value_heads": 8,
"num_mel_bins": 80,
"scale_input": true,
"subsampling_conv_channels": 256,
"subsampling_conv_kernel_size": 3,
"subsampling_conv_stride": 2,
"subsampling_factor": 8
},
"initializer_range": 0.02,
"model_type": "sortformer",
"modules_config": {
"causal_attn_rate": 0.5,
"causal_attn_rc": 30,
"chunk_left_context": 1,
"chunk_len": 188,
"chunk_right_context": 1,
"dropout_rate": 0.5,
"fc_d_model": 512,
"fifo_len": 0,
"max_index": 10000,
"min_pos_scores_rate": 0.5,
"model_type": "sortformer_modules",
"num_speakers": 4,
"pred_score_threshold": 1e-06,
"scores_add_rnd": 2.0,
"scores_boost_latest": 0.5,
"sil_threshold": 0.1,
"spkcache_len": 188,
"spkcache_sil_frames_per_spk": 5,
"spkcache_update_period": 188,
"strong_boost_rate": 0.3,
"subsampling_factor": 8,
"tf_d_model": 192,
"weak_boost_rate": 0.7
},
"num_speakers": 4,
"pil_weight": 0.5,
"tf_encoder_config": {
"activation_dropout": 0.5,
"activation_function": "relu",
"attention_dropout": 0.5,
"d_model": 192,
"dropout": 0.5,
"encoder_attention_heads": 8,
"encoder_ffn_dim": 768,
"encoder_layerdrop": 0.5,
"encoder_layers": 18,
"init_std": 0.02,
"initializer_range": 0.02,
"layer_norm_eps": 1e-05,
"max_source_positions": 1500,
"model_type": "sortformer_tf_encoder",
"num_mel_bins": 80,
"scale_embedding": false
},
"transformers_version": "5.0.0.dev0"
}