Forked from will-lms/openai-compat-endpoint
src / config.ts
// src/config.ts
import { createConfigSchematics } from "@lmstudio/sdk";
import { FREE_MODELS } from "./constants";
/**
* Example:
* "x-ai/grok-4.1-fast:free" -> "Grok 4.1 Fast (free)"
* "meta-llama/llama-3.1-8b-instruct:free" -> "Llama 3.1 8B Instruct (free)"
* "openrouter/bert-nebulon-alpha" -> "Bert-Nebulon Alpha"
*/
function generateDisplayName(model: string): string {
let name = model.replace(/^[^\/]+\/(.+)/, "$1"); // del provider
const isFree = name.includes(":free");
name = name.replace(":free", "");
name = name
.replace(/-/g, " ");
// .replace(/\./g, " ");
// В generateDisplayName, после replace(/[-]/g, " ")
name = name.replace(/(\d+)\.(\d+)/g, "$1.$2");
// full: name = name.replace(/[-]/g, " ").replace(/\b(\d+(?:\.\d+)?)\b/g, m => m);
// Caps
name = name.replace(/\b\w/g, char => char.toUpperCase());
// add (free)
if (isFree) {
name += " (free)";
}
return name;
}
export const globalConfigSchematics = createConfigSchematics()
.field(
"apiKey",
"string",
{
displayName: "API Key",
isProtected: true,
placeholder: "sk-or-v1-..."
},
""
)
.field(
"baseUrl",
"string",
{
displayName: "Base URL",
subtitle: "Base URL for API calls.",
placeholder: "https://openrouter.ai/api/v1"
},
"https://openrouter.ai/api/v1"
)
.build();
export const configSchematics = createConfigSchematics()
.field(
"model",
"select",
{
displayName: "Model",
subtitle: "Choose a model (fallback if empty)",
options: [
{ value: "auto", displayName: "— Auto —" },
...FREE_MODELS.map(model => ({
value: model,
displayName: generateDisplayName(model)
})),
{ value: "placeholder/free-model", displayName: "Legacy fallback" }
]
},
"auto"
)
.build();