lhallee commited on
Commit
5a65522
·
verified ·
1 Parent(s): 4da13f4

Upload FastEsmForMaskedLM

Browse files
Files changed (1) hide show
  1. config.json +2 -4
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "/tmp/facebook/esm2_t30_150M_UR50D",
3
  "architectures": [
4
  "FastEsmForMaskedLM"
5
  ],
@@ -13,6 +12,7 @@
13
  "AutoModelForTokenClassification": "modeling_fastesm.FastEsmForTokenClassification"
14
  },
15
  "classifier_dropout": null,
 
16
  "emb_layer_norm_before": false,
17
  "esmfold_config": null,
18
  "hidden_act": "gelu",
@@ -31,9 +31,7 @@
31
  "position_embedding_type": "rotary",
32
  "tie_word_embeddings": false,
33
  "token_dropout": true,
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.48.1",
36
- "use_cache": true,
37
  "vocab_list": null,
38
  "vocab_size": 33
39
  }
 
1
  {
 
2
  "architectures": [
3
  "FastEsmForMaskedLM"
4
  ],
 
12
  "AutoModelForTokenClassification": "modeling_fastesm.FastEsmForTokenClassification"
13
  },
14
  "classifier_dropout": null,
15
+ "dtype": "float32",
16
  "emb_layer_norm_before": false,
17
  "esmfold_config": null,
18
  "hidden_act": "gelu",
 
31
  "position_embedding_type": "rotary",
32
  "tie_word_embeddings": false,
33
  "token_dropout": true,
34
+ "transformers_version": "5.2.0",
 
 
35
  "vocab_list": null,
36
  "vocab_size": 33
37
  }