{ "model_name": "KothaGPT/bn-en-readability-classifier", "architecture": "BertForSequenceClassification", "base_model": "ai4bharat/indic-bert", "num_labels": 4, "epochs": 4, "batch_size": 16, "learning_rate": 3e-5, "max_seq_length": 256, "optimizer": "AdamW", "dropout": 0.1, "mixed_precision": true, "train_dataset_size": 900000, "eval_dataset_size": 100000, "loss_fn": "cross_entropy", "gradient_accumulation_steps": 2, "scheduler": "linear", "seed": 42, "early_stopping": true, "save_total_limit": 2, "evaluation_strategy": "epoch", "logging_strategy": "steps", "logging_steps": 100 }