| { | |
| "_name_or_path": "aimagelab/ReT2-MBEIR-CLIP-ViT-L", | |
| "architectures": [ | |
| "Ret2Model" | |
| ], | |
| "attention_dropout": 0.05, | |
| "dropout_p": 0.05, | |
| "forget_gate_bias": 0.0, | |
| "hidden_size": 1024, | |
| "input_gate_bias": 0.0, | |
| "logit_scale_clamp": 100.0, | |
| "logit_scale_init_value": 4.605170249938965, | |
| "model_type": "ret2", | |
| "text_frozen": false, | |
| "text_global_feats_proj_size": 0, | |
| "text_layer_strategy": "vit_b", | |
| "text_name_or_path": "openai/clip-vit-large-patch14", | |
| "tokenizer_max_length": null, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.49.0", | |
| "vision_frozen": false, | |
| "vision_global_feats_proj_size": 0, | |
| "vision_layer_strategy": "vit_l", | |
| "vision_name_or_path": "openai/clip-vit-large-patch14", | |
| "w_final_size": 768 | |
| } | |