jaketae commited on
Commit
592fed6
1 Parent(s): 68ac767

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -9
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "jaketae/fastspeech2-lj-en",
3
  "add_postnet": false,
4
  "architectures": [
5
  "FastSpeech2Model"
@@ -8,35 +7,32 @@
8
  "decoder_attention_heads": 2,
9
  "decoder_embed_dim": 256,
10
  "decoder_layers": 4,
11
- "dropout": 0.2,
12
  "encoder_attention_heads": 2,
13
  "encoder_embed_dim": 256,
14
  "encoder_layers": 4,
15
  "energy_max": 3.2244551181793213,
16
  "energy_min": -4.9544901847839355,
 
17
  "fft_hidden_dim": 1024,
18
  "fft_kernel_size": 9,
19
  "initializer_range": 0.0625,
20
  "max_source_positions": 1024,
21
- "mean": true,
22
  "model_type": "fastspeech2",
23
- "n_frames_per_step": 1,
24
  "num_speakers": 1,
25
- "output_frame_dim": 80,
26
  "pad_token_id": 1,
27
  "pitch_max": 5.733940816898645,
28
  "pitch_min": -4.660287183665281,
29
  "postnet_conv_dim": 512,
30
  "postnet_conv_kernel_size": 5,
31
- "postnet_dropout": 0,
32
  "postnet_layers": 5,
33
  "speaker_embed_dim": 64,
34
- "std": true,
35
  "torch_dtype": "float32",
36
- "transformers_version": "4.17.0.dev0",
 
 
37
  "var_pred_dropout": 0.5,
38
  "var_pred_hidden_dim": 256,
39
  "var_pred_kernel_size": 3,
40
- "var_pred_n_bins": 256,
41
  "vocab_size": 75
42
  }
 
1
  {
 
2
  "add_postnet": false,
3
  "architectures": [
4
  "FastSpeech2Model"
 
7
  "decoder_attention_heads": 2,
8
  "decoder_embed_dim": 256,
9
  "decoder_layers": 4,
 
10
  "encoder_attention_heads": 2,
11
  "encoder_embed_dim": 256,
12
  "encoder_layers": 4,
13
  "energy_max": 3.2244551181793213,
14
  "energy_min": -4.9544901847839355,
15
+ "fft_dropout": 0.2,
16
  "fft_hidden_dim": 1024,
17
  "fft_kernel_size": 9,
18
  "initializer_range": 0.0625,
19
  "max_source_positions": 1024,
 
20
  "model_type": "fastspeech2",
 
21
  "num_speakers": 1,
 
22
  "pad_token_id": 1,
23
  "pitch_max": 5.733940816898645,
24
  "pitch_min": -4.660287183665281,
25
  "postnet_conv_dim": 512,
26
  "postnet_conv_kernel_size": 5,
27
+ "postnet_dropout": 0.5,
28
  "postnet_layers": 5,
29
  "speaker_embed_dim": 64,
 
30
  "torch_dtype": "float32",
31
+ "transformers_version": "4.19.0.dev0",
32
+ "use_mean": true,
33
+ "use_standard_deviation": true,
34
  "var_pred_dropout": 0.5,
35
  "var_pred_hidden_dim": 256,
36
  "var_pred_kernel_size": 3,
 
37
  "vocab_size": 75
38
  }