mobile version styled -ready for new version 2.0.1

This commit is contained in:
2025-10-22 18:37:08 +06:00
parent 344f763bb4
commit e37599d3ef
28 changed files with 4998 additions and 2224 deletions

View File

@@ -7,23 +7,24 @@ import (
)
type Config struct {
Cwd string
Host string
ProxyUrl string
Completions string
Model string
Prompt string
ApiKeyFile string
ResultFolder string
PromptFolder string
ProviderType string
JwtToken string
PromptID string
Timeout string
ResultHistory string
NoHistoryEnv string
MainFlags MainFlags
Server ServerConfig
Cwd string
Host string
ProxyUrl string
Completions string
Model string
Prompt string
ApiKeyFile string
ResultFolder string
PromptFolder string
ProviderType string
JwtToken string
PromptID string
Timeout string
ResultHistory string
NoHistoryEnv string
AllowExecution bool
MainFlags MainFlags
Server ServerConfig
}
type MainFlags struct {
@@ -61,21 +62,22 @@ func Load() Config {
promptFolder := getEnv("LCG_PROMPT_FOLDER", path.Join(homedir, ".config", "lcg", "gpt_sys_prompts"))
return Config{
Cwd: cwd,
Host: getEnv("LCG_HOST", "http://192.168.87.108:11434/"),
ProxyUrl: getEnv("LCG_PROXY_URL", "/api/v1/protected/sberchat/chat"),
Completions: getEnv("LCG_COMPLETIONS_PATH", "api/chat"),
Model: getEnv("LCG_MODEL", "hf.co/yandex/YandexGPT-5-Lite-8B-instruct-GGUF:Q4_K_M"),
Prompt: getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols."),
ApiKeyFile: getEnv("LCG_API_KEY_FILE", ".openai_api_key"),
ResultFolder: resultFolder,
PromptFolder: promptFolder,
ProviderType: getEnv("LCG_PROVIDER", "ollama"),
JwtToken: getEnv("LCG_JWT_TOKEN", ""),
PromptID: getEnv("LCG_PROMPT_ID", "1"),
Timeout: getEnv("LCG_TIMEOUT", "300"),
ResultHistory: getEnv("LCG_RESULT_HISTORY", path.Join(resultFolder, "lcg_history.json")),
NoHistoryEnv: getEnv("LCG_NO_HISTORY", ""),
Cwd: cwd,
Host: getEnv("LCG_HOST", "http://192.168.87.108:11434/"),
ProxyUrl: getEnv("LCG_PROXY_URL", "/api/v1/protected/sberchat/chat"),
Completions: getEnv("LCG_COMPLETIONS_PATH", "api/chat"),
Model: getEnv("LCG_MODEL", "hf.co/yandex/YandexGPT-5-Lite-8B-instruct-GGUF:Q4_K_M"),
Prompt: getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols."),
ApiKeyFile: getEnv("LCG_API_KEY_FILE", ".openai_api_key"),
ResultFolder: resultFolder,
PromptFolder: promptFolder,
ProviderType: getEnv("LCG_PROVIDER", "ollama"),
JwtToken: getEnv("LCG_JWT_TOKEN", ""),
PromptID: getEnv("LCG_PROMPT_ID", "1"),
Timeout: getEnv("LCG_TIMEOUT", "300"),
ResultHistory: getEnv("LCG_RESULT_HISTORY", path.Join(resultFolder, "lcg_history.json")),
NoHistoryEnv: getEnv("LCG_NO_HISTORY", ""),
AllowExecution: isAllowExecutionEnabled(),
Server: ServerConfig{
Port: getEnv("LCG_SERVER_PORT", "8080"),
Host: getEnv("LCG_SERVER_HOST", "localhost"),
@@ -92,6 +94,15 @@ func (c Config) IsNoHistoryEnabled() bool {
return vLower == "1" || vLower == "true"
}
func isAllowExecutionEnabled() bool {
v := strings.TrimSpace(getEnv("LCG_ALLOW_EXECUTION", ""))
if v == "" {
return false
}
vLower := strings.ToLower(v)
return vLower == "1" || vLower == "true"
}
var AppConfig Config
func init() {