Text Generation
Transformers
Safetensors
llama
model: llama
repo_name: llama_block_0_tracking_shuffled_objects_Complete Random
file_name: llama_block_0_tracking_shuffled_objects_Complete Random_5000_5.pt
pruning_style: block
community: 0
pruning_ratio: 20
dataset_label: tracking_shuffled_objects
sparsity_ratio: 20
['tasksource/bigbench', 'tracking_shuffled_objects']
finetune: Complete Random
modules_size: 27
modules: ['8_attn.k', '13_attn.v', '25_gate', '23_gate', '9_mlp.up', '26_mlp.down', '5_mlp.up', '6_attn.v', '27_attn.q', '11_gate', '10_attn.q', '27_attn.v', '20_gate', '10_mlp.down', '10_attn.o', '3_attn.k', '19_mlp.down', '8_attn.v', '19_mlp.up', '21_attn.o', '24_attn.k', '6_attn.q', '13_gate', '17_attn.k', '7_attn.k', '18_mlp.down', '17_attn.v']
rank: 2
tags: ['model: llama', 'repo_name: llama_block_0_tracking_shuffled_objects_Complete Random', 'file_name: llama_block_0_tracking_shuffled_objects_Complete Random_5000_5.pt', 'base_model: meta-llama/Llama-2-7b-hf', 'pruning_style: block', 'community: 0', 'pruning_ratio: 20', 'dataset_label: tracking_shuffled_objects', 'sparsity_ratio: 20', "dataset: ['tasksource/bigbench', 'tracking_shuffled_objects']", 'finetune: Complete Random', 'modules_size: 27', "modules: ['8_attn.k', '13_attn.v', '25_gate', '23_gate', '9_mlp.up', '26_mlp.down', '5_mlp.up', '6_attn.v', '27_attn.q', '11_gate', '10_attn.q', '27_attn.v', '20_gate', '10_mlp.down', '10_attn.o', '3_attn.k', '19_mlp.down', '8_attn.v', '19_mlp.up', '21_attn.o', '24_attn.k', '6_attn.q', '13_gate', '17_attn.k', '7_attn.k', '18_mlp.down', '17_attn.v']", 'rank: 2']
text-generation-inference
KBhandari11's picture
Upload LlamaForCausalLM
752bf21 verified
raw
history blame contribute delete
183 Bytes
{
"bos_token_id": 1,
"do_sample": true,
"eos_token_id": 2,
"max_length": 4096,
"pad_token_id": 0,
"temperature": 0.6,
"top_p": 0.9,
"transformers_version": "4.52.4"
}