{ "architectures": [ "PatchGPT" ], "dropout": 0.1, "head_type": "pretrain_mask", "model_type": "patchgpt", "n_channels": 6, "n_embd": 384, "n_head": 6, "n_labels": 18, "n_layer": 6, "n_positions": 1024, "n_static": 2, "patch_size": 7, "position_embedding": "relative_key", "random_mask_ratio": 0.5, "torch_dtype": "float32", "transformers_version": "4.39.3" }