Spaces:
Running
Running
File size: 1,234 Bytes
5ff2e6d 03e84b5 5ff2e6d 03e84b5 648b4e8 03e84b5 60ef1f4 03e84b5 5ff2e6d 648b4e8 54a5aad 92f26be 648b4e8 23b1143 5ff2e6d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import fs from "fs";
import yaml from "js-yaml";
const file = fs.readFileSync("chart/env/prod.yaml", "utf8");
// have to do a weird stringify/parse because of some node error
const prod = JSON.parse(JSON.stringify(yaml.load(file)));
const vars = prod.envVars as Record<string, string>;
let PUBLIC_CONFIG = "";
Object.entries(vars)
// filter keys used in prod with the proxy
.filter(([key]) => !["XFF_DEPTH", "ADDRESS_HEADER"].includes(key))
.forEach(([key, value]) => {
PUBLIC_CONFIG += `${key}=\`${value}\`\n`;
});
const SECRET_CONFIG =
(fs.existsSync(".env.SECRET_CONFIG")
? fs.readFileSync(".env.SECRET_CONFIG", "utf8")
: process.env.SECRET_CONFIG) ?? "";
// Prepend the content of the env variable SECRET_CONFIG
let full_config = `${PUBLIC_CONFIG}\n${SECRET_CONFIG}`;
// replace the internal proxy url with the public endpoint
full_config = full_config.replaceAll(
"https://internal.api-inference.huggingface.co",
"https://router.huggingface.co/hf-inference"
);
full_config = full_config.replaceAll("COOKIE_SECURE=`true`", "COOKIE_SECURE=`false`");
full_config = full_config.replaceAll("LOG_LEVEL=`debug`", "LOG_LEVEL=`info`");
// Write full_config to .env.local
fs.writeFileSync(".env.local", full_config);
|