w601sxs commited on
Commit
e65a7f5
1 Parent(s): f2bd7a2

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +1 -1
  2. adapter_model.bin +1 -1
adapter_config.json CHANGED
@@ -3,7 +3,7 @@
3
  "base_model_name_or_path": "w601sxs/b1ade-1b",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
- "inference_mode": false,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
 
3
  "base_model_name_or_path": "w601sxs/b1ade-1b",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
+ "inference_mode": true,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bac0780e8301c2aff947a494fe05edb690f1fd6398484ddcfa9458526f4612a2
3
  size 4205905
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b2ca53f336cb70a54b93eb17941d5ec3db006717a7ea288220da703dc40ddc2
3
  size 4205905