Update README.md
Browse files
README.md
CHANGED
@@ -102,12 +102,27 @@ For detailed usage refer to the [colab_notebook](https://colab.research.google.c
|
|
102 |
|
103 |
|
104 |
|
105 |
-
###
|
106 |
|
107 |
```bash
|
108 |
pip install transformers
|
|
|
109 |
```
|
110 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
### Prompt
|
112 |
|
113 |
|
|
|
102 |
|
103 |
|
104 |
|
105 |
+
### Model Use
|
106 |
|
107 |
```bash
|
108 |
pip install transformers
|
109 |
+
pip install accelerate
|
110 |
```
|
111 |
|
112 |
+
```python
|
113 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
114 |
+
from accelerate import Accelerator
|
115 |
+
model =AutoModelForCausalLM.from_pretrained("PipableAI/pip-code-bandit",torch_dtype=torch.bfloat16,device_map="auto")
|
116 |
+
tokenizer = tokenizer = AutoTokenizer.from_pretrained("PipableAI/pip-code-bandit")
|
117 |
+
|
118 |
+
inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
|
119 |
+
outputs = model.generate(**inputs, max_new_tokens=new_tokens,)
|
120 |
+
out = (
|
121 |
+
tokenizer.decode(outputs[0][inputs.input_ids.shape[-1]:], skip_special_tokens=True)
|
122 |
+
)
|
123 |
+
```
|
124 |
+
|
125 |
+
|
126 |
### Prompt
|
127 |
|
128 |
|