Arsala Grey
refactored code
fac66ea
raw
history blame
2.25 kB
const { createApp, ref, onMounted } = Vue;
import { HfInference } from "https://cdn.skypack.dev/@huggingface/[email protected]";
const app = createApp({
setup() {
const token = ref(localStorage.getItem("token") || "");
const userPrompt = ref("Write an essay about Star Wars");
const generatedText = ref("");
let controller;
const createTextGenerationStream = (hfInstance, prompt, abortControllerSignal) => {
return hfInstance.textGenerationStream(
{
model: "mistralai/Mistral-7B-Instruct-v0.1",
inputs: `[INST]${prompt}[/INST]`,
parameters: { max_new_tokens: 450 },
},
{
use_cache: false,
signal: abortControllerSignal,
}
);
};
const generateTextStream = async function* (hfInstance, abortSignal, prompt) {
let textFragments = [];
for await (const output of createTextGenerationStream(hfInstance, prompt, abortSignal)) {
textFragments.push(output.token.text);
generatedText.value += output.token.text;
yield;
}
};
const run = async () => {
controller = new AbortController();
localStorage.setItem("token", token.value);
const hfInstance = new HfInference(token.value);
try {
for await (const text of generateTextStream(
hfInstance,
controller.signal,
userPrompt.value
)) {
console.log(text);
}
} catch (e) {
console.log(e);
}
};
const stop = () => {
if (controller) {
controller.abort();
}
};
onMounted(() => {
if (localStorage.getItem("token")) {
token.value = localStorage.getItem("token");
}
});
return {
token,
userPrompt,
generatedText,
run,
stop,
};
},
});
app.mount("#app");