{ "architectures": [ "LTX2GemmaTextEncoderModel" ], "hidden_size": 3840, "num_hidden_layers": 48, "num_attention_heads": 30, "text_len": 1024, "pad_token_id": 0, "eos_token_id": 2, "gemma_model_path": "gemma", "gemma_dtype": "bfloat16", "padding_side": "left", "feature_extractor_in_features": 188160, "feature_extractor_out_features": 3840, "connector_num_attention_heads": 30, "connector_attention_head_dim": 128, "connector_num_layers": 2, "connector_positional_embedding_theta": 10000.0, "connector_positional_embedding_max_pos": [ 4096 ], "connector_rope_type": "split", "connector_double_precision_rope": true, "connector_num_learnable_registers": 128 }