sharpenb commited on
Commit
6137635
·
verified ·
1 Parent(s): d1e7e32

Upload folder using huggingface_hub (#3)

Browse files

- 728a60d76f66e4b1a289802b83ce6cf38bc2f279042f32303c39f25672199c65 (d5e3a341cf08e88cce84811caefd275a06c412da)

Files changed (2) hide show
  1. config.json +1 -1
  2. smash_config.json +1 -1
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_attn_implementation_autoset": true,
3
- "_name_or_path": "/tmp/models/tmptb6pmugd/tmp6wcf0ao4",
4
  "architectures": [
5
  "PhiForCausalLM"
6
  ],
 
1
  {
2
  "_attn_implementation_autoset": true,
3
+ "_name_or_path": "/tmp/models/tmpq12z8iqv/tmp4mlt6q9p",
4
  "architectures": [
5
  "PhiForCausalLM"
6
  ],
smash_config.json CHANGED
@@ -11,7 +11,7 @@
11
  "quant_hqq_weight_bits": 4,
12
  "max_batch_size": 1,
13
  "device": "cuda",
14
- "cache_dir": "/tmp/models/tmptb6pmugd",
15
  "task": "",
16
  "save_load_fn": "hqq",
17
  "save_load_fn_args": {},
 
11
  "quant_hqq_weight_bits": 4,
12
  "max_batch_size": 1,
13
  "device": "cuda",
14
+ "cache_dir": "/tmp/models/tmpq12z8iqv",
15
  "task": "",
16
  "save_load_fn": "hqq",
17
  "save_load_fn_args": {},