Compare commits

...

8 Commits

Author SHA1 Message Date
e99fe76bef release v1.1.0 2025-08-11 13:06:14 +06:00
cc242e1192 improoved version 2025-08-11 13:01:27 +06:00
3e1c4594b1 added new proxy llm provider 2025-08-08 17:42:26 +06:00
ec2486ce3d release v1.0.4 2024-12-05 15:48:53 +06:00
46a0d9e45a release v1.0.3 2024-12-05 13:25:58 +06:00
12cd3fe6db moved to cli framework 2024-12-05 13:17:35 +06:00
7136fe4607 before refactor to cli framework 2024-12-05 11:15:38 +06:00
fa0a8565c3 release v1.0.2 2024-12-03 18:00:10 +06:00
18 changed files with 1459 additions and 297 deletions

4
.gitignore vendored
View File

@@ -12,4 +12,6 @@ dist/
shell-code/build.env
bin-linux-amd64/*
bin-linux-arm64/*
binaries-for-upload/*
binaries-for-upload/*
gpt_results
shell-code/jwt.admin.token

View File

@@ -1,8 +1,9 @@
FROM --platform=${BUILDPLATFORM} golang:1.23-alpine AS builder
FROM --platform=${BUILDPLATFORM} golang:1.24.6-alpine3.22 AS builder
ARG TARGETARCH
RUN apk add git && go install mvdan.cc/garble@latest
# RUN apk add git
#&& go install mvdan.cc/garble@latest
WORKDIR /app
@@ -11,8 +12,8 @@ COPY . .
RUN echo $BUILDPLATFORM > buildplatform
RUN echo $TARGETARCH > targetarch
# RUN GOOS=linux GOARCH=$TARGETARCH go build -o output/go-ansible-vault
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} garble -literals -tiny build -ldflags="-w -s" -o /app/go-lcg .
RUN GOOS=linux GOARCH=$TARGETARCH go build -ldflags="-w -s" -o /app/go-lcg .
#RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} garble -literals -tiny build -ldflags="-w -s" -o /app/go-lcg .
FROM alpine:latest

View File

@@ -1,15 +1,16 @@
FROM --platform=${BUILDPLATFORM} golang:1.23-alpine AS build
FROM --platform=${BUILDPLATFORM} golang:1.24.6-alpine3.22 AS build
ARG TARGETOS
ARG TARGETARCH
RUN apk add git && go install mvdan.cc/garble@latest
# RUN apk add git
#&& go install mvdan.cc/garble@latest
WORKDIR /src
ENV CGO_ENABLED=0
COPY go.* .
RUN go mod download
COPY . .
# RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o /out/go-ansible-vault .
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} garble -literals -tiny build -ldflags="-w -s" -o /out/go-lcg .
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -ldflags="-w -s -buildid=" -trimpath -o /out/go-lcg .
# RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} garble -literals -tiny build -ldflags="-w -s" -o /out/go-lcg .
FROM scratch AS bin-unix
COPY --from=build /out/go-lcg /lcg

View File

@@ -1,8 +1,11 @@
## Linux Command GPT (lcg)
# Linux Command GPT (lcg)
Get Linux commands in natural language with the power of ChatGPT.
### Installation
## Installation
Build from source
```bash
> git clone --depth 1 https://github.com/asrul10/linux-command-gpt.git ~/.linux-command-gpt
> cd ~/.linux-command-gpt
@@ -13,7 +16,7 @@ Build from source
Or you can [download lcg executable file](https://github.com/asrul10/linux-command-gpt/releases)
### Example Usage
## Example Usage
```bash
> lcg I want to extract linux-command-gpt.tar.gz file
@@ -39,24 +42,30 @@ for host in "${hosts[@]}"; do
ssh $host "echo 'Hello, world!' > /tmp/hello.txt"
done
```
This script defines an array `hosts` that contains the names of the hosts to connect to. The loop iterates over each element in the array and uses the `ssh` command to execute a simple command on the remote host. In this case, the command is `echo 'Hello, world!' > /tmp/hello.txt`, which writes the string "Hello, world!" to a file called `/tmp/hello.txt`.
You can modify the script to run any command you like by replacing the `echo` command with your desired command. For example, if you want to run a Python script on each host, you could use the following command:
```bash
ssh $host "python /path/to/script.py"
```
This will execute the Python script located at `/path/to/script.py` on the remote host.
You can also modify the script to run multiple commands in a single SSH session by using the `&&` operator to chain the commands together. For example:
```bash
ssh $host "echo 'Hello, world!' > /tmp/hello.txt && python /path/to/script.py"
```
This will execute both the `echo` command and the Python script in a single SSH session.
I hope this helps! Let me know if you have any questions or need further assistance.
Do you want to (c)opy, (r)egenerate, or take (N)o action on the command? (c/r/N):
```
``` text
To use the "copy to clipboard" feature, you need to install either the `xclip` or `xsel` package.
@@ -69,4 +78,25 @@ To use the "copy to clipboard" feature, you need to install either the `xclip` o
--file -f read command from file
--update-key -u update the API key
--delete-key -d delete the API key
# ollama example
export LCG_PROVIDER=ollama
export LCG_HOST=http://192.168.87.108:11434/
export LCG_MODEL=codegeex4
lcg "I want to extract linux-command-gpt.tar.gz file"
export LCG_PROVIDER=proxy
export LCG_HOST=http://localhost:8080
export LCG_MODEL=GigaChat-2
export LCG_JWT_TOKEN=your_jwt_token_here
lcg "I want to extract linux-command-gpt.tar.gz file"
lcg health
lcg config
lcg update-jwt
```

View File

@@ -1 +1 @@
v1.0.1
v1.1.0

224
_main.go Normal file
View File

@@ -0,0 +1,224 @@
// package main
// import (
// _ "embed"
// "fmt"
// "math"
// "os"
// "os/user"
// "path"
// "strings"
// "time"
// "github.com/atotto/clipboard"
// "github.com/direct-dev-ru/linux-command-gpt/gpt"
// "github.com/direct-dev-ru/linux-command-gpt/reader"
// )
// //go:embed VERSION.txt
// var Version string
// var cwd, _ = os.Getwd()
// var (
// HOST = getEnv("LCG_HOST", "http://192.168.87.108:11434/")
// COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat") // relative part of endpoint
// MODEL = getEnv("LCG_MODEL", "codegeex4")
// PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
// API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
// RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
// // HOST = "https://api.openai.com/v1/"
// // COMPLETIONS = "chat/completions"
// // MODEL = "gpt-4o-mini"
// // MODEL = "codellama:13b"
// // This file is created in the user's home directory
// // Example: /home/username/.openai_api_key
// // API_KEY_FILE = ".openai_api_key"
// HELP = `
// Usage: lcg [options]
// --help -h output usage information
// --version -v output the version number
// --file -f read part of command from file or bash feature $(...)
// --update-key -u update the API key
// --delete-key -d delete the API key
// Example Usage: lcg I want to extract linux-command-gpt.tar.gz file
// Example Usage: lcg --file /path/to/file.json I want to print object questions with jq
// Env Vars:
// LCG_HOST - defaults to "http://192.168.87.108:11434/" - endpoint for Ollama or other LLM API
// LCG_COMPLETIONS_PATH -defaults to "api/chat" - relative part of endpoint
// LCG_MODEL - defaults to "codegeex4"
// LCG_PROMPT - defaults to Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks.
// LCG_API_KEY_FILE - defaults to ${HOME}/.openai_api_key - file with API key
// LCG_RESULT_FOLDER - defaults to $(pwd)/gpt_results - folder to save results
// `
// VERSION = Version
// CMD_HELP = 100
// CMD_VERSION = 101
// CMD_UPDATE = 102
// CMD_DELETE = 103
// CMD_COMPLETION = 110
// )
// // getEnv retrieves the value of the environment variable `key` or returns `defaultValue` if not set.
// func getEnv(key, defaultValue string) string {
// if value, exists := os.LookupEnv(key); exists {
// return value
// }
// return defaultValue
// }
// func handleCommand(cmd string) int {
// if cmd == "" || cmd == "--help" || cmd == "-h" {
// return CMD_HELP
// }
// if cmd == "--version" || cmd == "-v" {
// return CMD_VERSION
// }
// if cmd == "--update-key" || cmd == "-u" {
// return CMD_UPDATE
// }
// if cmd == "--delete-key" || cmd == "-d" {
// return CMD_DELETE
// }
// return CMD_COMPLETION
// }
// func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
// gpt3.InitKey()
// s := time.Now()
// done := make(chan bool)
// go func() {
// loadingChars := []rune{'-', '\\', '|', '/'}
// i := 0
// for {
// select {
// case <-done:
// fmt.Printf("\r")
// return
// default:
// fmt.Printf("\rLoading %c", loadingChars[i])
// i = (i + 1) % len(loadingChars)
// time.Sleep(30 * time.Millisecond)
// }
// }
// }()
// r := gpt3.Completions(cmd)
// done <- true
// elapsed := time.Since(s).Seconds()
// elapsed = math.Round(elapsed*100) / 100
// if r == "" {
// return "", elapsed
// }
// return r, elapsed
// }
// func main() {
// currentUser, err := user.Current()
// if err != nil {
// panic(err)
// }
// args := os.Args
// cmd := ""
// file := ""
// if len(args) > 1 {
// start := 1
// if args[1] == "--file" || args[1] == "-f" {
// file = args[2]
// start = 3
// }
// cmd = strings.Join(args[start:], " ")
// }
// if file != "" {
// err := reader.FileToPrompt(&cmd, file)
// if err != nil {
// fmt.Println(err)
// return
// }
// }
// if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
// os.MkdirAll(RESULT_FOLDER, 0755)
// }
// h := handleCommand(cmd)
// if h == CMD_HELP {
// fmt.Println(HELP)
// return
// }
// if h == CMD_VERSION {
// fmt.Println(VERSION)
// return
// }
// gpt3 := gpt.Gpt3{
// CompletionUrl: HOST + COMPLETIONS,
// Model: MODEL,
// Prompt: PROMPT,
// HomeDir: currentUser.HomeDir,
// ApiKeyFile: API_KEY_FILE,
// Temperature: 0.01,
// }
// if h == CMD_UPDATE {
// gpt3.UpdateKey()
// return
// }
// if h == CMD_DELETE {
// gpt3.DeleteKey()
// return
// }
// c := "R"
// r := ""
// elapsed := 0.0
// for c == "R" || c == "r" {
// r, elapsed = getCommand(gpt3, cmd)
// c = "N"
// fmt.Printf("Completed in %v seconds\n\n", elapsed)
// fmt.Println(r)
// fmt.Print("\nDo you want to (c)opy, (s)ave to file, (r)egenerate, or take (N)o action on the command? (c/r/N): ")
// fmt.Scanln(&c)
// // no action
// if c == "N" || c == "n" {
// return
// }
// }
// if r == "" {
// return
// }
// // Copy to clipboard
// if c == "C" || c == "c" {
// clipboard.WriteAll(r)
// fmt.Println("\033[33mCopied to clipboard")
// return
// }
// if c == "S" || c == "s" {
// timestamp := time.Now().Format("2006-01-02_15-04-05") // Format: YYYY-MM-DD_HH-MM-SS
// filename := fmt.Sprintf("gpt_request_%s(%s).md", timestamp, gpt3.Model)
// filePath := path.Join(RESULT_FOLDER, filename)
// resultString := fmt.Sprintf("## Prompt:\n\n%s\n\n------------------\n\n## Response:\n\n%s\n\n", cmd+". "+gpt3.Prompt, r)
// os.WriteFile(filePath, []byte(resultString), 0644)
// fmt.Println("\033[33mSaved to file")
// return
// }
// }

7
go.mod
View File

@@ -3,3 +3,10 @@ module github.com/direct-dev-ru/linux-command-gpt
go 1.18
require github.com/atotto/clipboard v0.1.4
require (
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/urfave/cli/v2 v2.27.5
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
)

8
go.sum
View File

@@ -1,2 +1,10 @@
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w=
github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=

View File

@@ -1,24 +1,40 @@
package gpt
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"strings"
)
// ProxySimpleChatRequest структура для простого запроса
type ProxySimpleChatRequest struct {
Message string `json:"message"`
Model string `json:"model,omitempty"`
}
// ProxySimpleChatResponse структура ответа для простого запроса
type ProxySimpleChatResponse struct {
Response string `json:"response"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage,omitempty"`
Model string `json:"model,omitempty"`
Timeout int `json:"timeout_seconds,omitempty"`
}
// Gpt3 обновленная структура с поддержкой разных провайдеров
type Gpt3 struct {
CompletionUrl string
Prompt string
Model string
HomeDir string
ApiKeyFile string
ApiKey string
Temperature float64
Provider Provider
Prompt string
Model string
HomeDir string
ApiKeyFile string
ApiKey string
Temperature float64
ProviderType string // "ollama", "proxy"
}
type Chat struct {
@@ -135,6 +151,11 @@ func (gpt3 *Gpt3) DeleteKey() {
}
func (gpt3 *Gpt3) InitKey() {
// Для ollama и proxy провайдеров не нужен API ключ
if gpt3.ProviderType == "ollama" || gpt3.ProviderType == "proxy" {
return
}
load := gpt3.loadApiKey()
if load {
return
@@ -145,55 +166,51 @@ func (gpt3 *Gpt3) InitKey() {
gpt3.storeApiKey(apiKey)
}
func (gpt3 *Gpt3) Completions(ask string) string {
req, err := http.NewRequest("POST", gpt3.CompletionUrl, nil)
if err != nil {
panic(err)
}
req.Header.Set("Content-Type", "application/json")
// req.Header.Set("Authorization", "Bearer "+strings.TrimSpace(gpt3.ApiKey))
// NewGpt3 создает новый экземпляр GPT с выбранным провайдером
func NewGpt3(providerType, host, apiKey, model, prompt string, temperature float64, timeout int) *Gpt3 {
var provider Provider
switch providerType {
case "proxy":
provider = NewProxyAPIProvider(host, apiKey, model, timeout) // apiKey используется как JWT токен
case "ollama":
provider = NewOllamaProvider(host, model, temperature, timeout)
default:
provider = NewOllamaProvider(host, model, temperature, timeout)
}
return &Gpt3{
Provider: provider,
Prompt: prompt,
Model: model,
ApiKey: apiKey,
Temperature: temperature,
ProviderType: providerType,
}
}
// Completions обновленный метод с поддержкой разных провайдеров
func (gpt3 *Gpt3) Completions(ask string) string {
messages := []Chat{
{"system", gpt3.Prompt},
{"user", ask + "." + gpt3.Prompt},
}
payload := Gpt3Request{
Model: gpt3.Model,
Messages: messages,
Stream: false,
Options: Gpt3Options{gpt3.Temperature},
{"user", ask + ". " + gpt3.Prompt},
}
payloadJson, err := json.Marshal(payload)
response, err := gpt3.Provider.Chat(messages)
if err != nil {
panic(err)
}
req.Body = io.NopCloser(bytes.NewBuffer(payloadJson))
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
panic(err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
panic(err)
}
if resp.StatusCode != http.StatusOK {
fmt.Println(string(body))
fmt.Printf("Ошибка при выполнении запроса: %v\n", err)
return ""
}
// var res Gpt3Response
var res OllamaResponse
err = json.Unmarshal(body, &res)
if err != nil {
panic(err)
}
// return strings.TrimSpace(res.Choices[0].Message.Content)
return strings.TrimSpace(res.Message.Content)
return response
}
// Health проверяет состояние провайдера
func (gpt3 *Gpt3) Health() error {
return gpt3.Provider.Health()
}
// GetAvailableModels возвращает список доступных моделей
func (gpt3 *Gpt3) GetAvailableModels() ([]string, error) {
return gpt3.Provider.GetAvailableModels()
}

203
gpt/prompts.go Normal file
View File

@@ -0,0 +1,203 @@
package gpt
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
)
// SystemPrompt представляет системный промпт
type SystemPrompt struct {
ID int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
Content string `json:"content"`
}
// PromptManager управляет системными промптами
type PromptManager struct {
Prompts []SystemPrompt
ConfigFile string
HomeDir string
}
// NewPromptManager создает новый менеджер промптов
func NewPromptManager(homeDir string) *PromptManager {
configFile := filepath.Join(homeDir, ".lcg_prompts.json")
pm := &PromptManager{
ConfigFile: configFile,
HomeDir: homeDir,
}
// Загружаем предустановленные промпты
pm.loadDefaultPrompts()
// Загружаем пользовательские промпты
pm.loadCustomPrompts()
return pm
}
// loadDefaultPrompts загружает предустановленные промпты
func (pm *PromptManager) loadDefaultPrompts() {
defaultPrompts := []SystemPrompt{
{
ID: 1,
Name: "linux-command",
Description: "Generate Linux commands (default)",
Content: "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.",
},
{
ID: 2,
Name: "linux-command-with-explanation",
Description: "Generate Linux commands with explanation",
Content: "Generate a Linux command and provide a brief explanation of what it does. Format: COMMAND: explanation",
},
{
ID: 3,
Name: "linux-command-safe",
Description: "Generate safe Linux commands",
Content: "Generate a safe Linux command that won't cause data loss or system damage. Reply with linux command and nothing else. Output with plain response - no need formatting.",
},
{
ID: 4,
Name: "linux-command-verbose",
Description: "Generate Linux commands with detailed explanation",
Content: "Generate a Linux command and provide detailed explanation including what each flag does and potential alternatives.",
},
{
ID: 5,
Name: "linux-command-simple",
Description: "Generate simple Linux commands",
Content: "Generate a simple, easy-to-understand Linux command. Avoid complex flags and options when possible.",
},
}
pm.Prompts = defaultPrompts
}
// loadCustomPrompts загружает пользовательские промпты из файла
func (pm *PromptManager) loadCustomPrompts() {
if _, err := os.Stat(pm.ConfigFile); os.IsNotExist(err) {
return
}
data, err := os.ReadFile(pm.ConfigFile)
if err != nil {
return
}
var customPrompts []SystemPrompt
if err := json.Unmarshal(data, &customPrompts); err != nil {
return
}
// Добавляем пользовательские промпты с новыми ID
for i, prompt := range customPrompts {
prompt.ID = len(pm.Prompts) + i + 1
pm.Prompts = append(pm.Prompts, prompt)
}
}
// saveCustomPrompts сохраняет пользовательские промпты
func (pm *PromptManager) saveCustomPrompts() error {
// Находим пользовательские промпты (ID > 5)
var customPrompts []SystemPrompt
for _, prompt := range pm.Prompts {
if prompt.ID > 5 {
customPrompts = append(customPrompts, prompt)
}
}
data, err := json.MarshalIndent(customPrompts, "", " ")
if err != nil {
return err
}
return os.WriteFile(pm.ConfigFile, data, 0644)
}
// GetPromptByID возвращает промпт по ID
func (pm *PromptManager) GetPromptByID(id int) (*SystemPrompt, error) {
for _, prompt := range pm.Prompts {
if prompt.ID == id {
return &prompt, nil
}
}
return nil, fmt.Errorf("промпт с ID %d не найден", id)
}
// GetPromptByName возвращает промпт по имени
func (pm *PromptManager) GetPromptByName(name string) (*SystemPrompt, error) {
for _, prompt := range pm.Prompts {
if strings.EqualFold(prompt.Name, name) {
return &prompt, nil
}
}
return nil, fmt.Errorf("промпт с именем '%s' не найден", name)
}
// ListPrompts выводит список всех доступных промптов
func (pm *PromptManager) ListPrompts() {
fmt.Println("Available system prompts:")
fmt.Println("ID | Name | Description")
fmt.Println("---+---------------------------+--------------------------------")
for _, prompt := range pm.Prompts {
description := prompt.Description
if len(description) > 80 {
description = description[:77] + "..."
}
fmt.Printf("%-2d | %-25s | %s\n",
prompt.ID,
truncateString(prompt.Name, 25),
description)
}
}
// AddCustomPrompt добавляет новый пользовательский промпт
func (pm *PromptManager) AddCustomPrompt(name, description, content string) error {
// Проверяем, что имя уникально
for _, prompt := range pm.Prompts {
if strings.EqualFold(prompt.Name, name) {
return fmt.Errorf("промпт с именем '%s' уже существует", name)
}
}
newPrompt := SystemPrompt{
ID: len(pm.Prompts) + 1,
Name: name,
Description: description,
Content: content,
}
pm.Prompts = append(pm.Prompts, newPrompt)
return pm.saveCustomPrompts()
}
// DeleteCustomPrompt удаляет пользовательский промпт
func (pm *PromptManager) DeleteCustomPrompt(id int) error {
if id <= 5 {
return fmt.Errorf("нельзя удалить предустановленный промпт")
}
for i, prompt := range pm.Prompts {
if prompt.ID == id {
pm.Prompts = append(pm.Prompts[:i], pm.Prompts[i+1:]...)
return pm.saveCustomPrompts()
}
}
return fmt.Errorf("промпт с ID %d не найден", id)
}
// truncateString обрезает строку до указанной длины
func truncateString(s string, maxLen int) string {
if len(s) <= maxLen {
return s
}
return s[:maxLen-3] + "..."
}

296
gpt/providers.go Normal file
View File

@@ -0,0 +1,296 @@
package gpt
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
)
// Provider интерфейс для работы с разными LLM провайдерами
type Provider interface {
Chat(messages []Chat) (string, error)
Health() error
GetAvailableModels() ([]string, error)
}
// ProxyAPIProvider реализация для прокси API (gin-restapi)
type ProxyAPIProvider struct {
BaseURL string
JWTToken string
Model string
HTTPClient *http.Client
}
// ProxyChatRequest структура запроса к прокси API
type ProxyChatRequest struct {
Messages []Chat `json:"messages"`
Model string `json:"model,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Stream bool `json:"stream,omitempty"`
SystemContent string `json:"system_content,omitempty"`
UserContent string `json:"user_content,omitempty"`
RandomWords []string `json:"random_words,omitempty"`
FallbackString string `json:"fallback_string,omitempty"`
}
// ProxyChatResponse структура ответа от прокси API
type ProxyChatResponse struct {
Response string `json:"response"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage,omitempty"`
Error string `json:"error,omitempty"`
Model string `json:"model,omitempty"`
Timeout int `json:"timeout_seconds,omitempty"`
}
// ProxyHealthResponse структура ответа health check
type ProxyHealthResponse struct {
Status string `json:"status"`
Message string `json:"message"`
Model string `json:"default_model,omitempty"`
Timeout int `json:"default_timeout_seconds,omitempty"`
}
// OllamaProvider реализация для Ollama API
type OllamaProvider struct {
BaseURL string
Model string
Temperature float64
HTTPClient *http.Client
}
// OllamaTagsResponse структура ответа для получения списка моделей
type OllamaTagsResponse struct {
Models []struct {
Name string `json:"name"`
ModifiedAt string `json:"modified_at"`
Size int64 `json:"size"`
} `json:"models"`
}
func NewProxyAPIProvider(baseURL, jwtToken, model string, timeout int) *ProxyAPIProvider {
return &ProxyAPIProvider{
BaseURL: strings.TrimSuffix(baseURL, "/"),
JWTToken: jwtToken,
Model: model,
HTTPClient: &http.Client{Timeout: time.Duration(timeout) * time.Second},
}
}
func NewOllamaProvider(baseURL, model string, temperature float64, timeout int) *OllamaProvider {
return &OllamaProvider{
BaseURL: strings.TrimSuffix(baseURL, "/"),
Model: model,
Temperature: temperature,
HTTPClient: &http.Client{Timeout: time.Duration(timeout) * time.Second},
}
}
// Chat для ProxyAPIProvider
func (p *ProxyAPIProvider) Chat(messages []Chat) (string, error) {
// Используем основной endpoint /api/v1/protected/sberchat/chat
payload := ProxyChatRequest{
Messages: messages,
Model: p.Model,
Temperature: 0.5,
TopP: 0.5,
Stream: false,
RandomWords: []string{"linux", "command", "gpt"},
FallbackString: "I'm sorry, I can't help with that. Please try again.",
}
jsonData, err := json.Marshal(payload)
if err != nil {
return "", fmt.Errorf("ошибка маршалинга запроса: %w", err)
}
req, err := http.NewRequest("POST", p.BaseURL+"/api/v1/protected/sberchat/chat", bytes.NewBuffer(jsonData))
if err != nil {
return "", fmt.Errorf("ошибка создания запроса: %w", err)
}
req.Header.Set("Content-Type", "application/json")
if p.JWTToken != "" {
req.Header.Set("Authorization", "Bearer "+p.JWTToken)
}
resp, err := p.HTTPClient.Do(req)
if err != nil {
return "", fmt.Errorf("ошибка выполнения запроса: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", fmt.Errorf("ошибка чтения ответа: %w", err)
}
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("ошибка API: %d - %s", resp.StatusCode, string(body))
}
var response ProxyChatResponse
if err := json.Unmarshal(body, &response); err != nil {
return "", fmt.Errorf("ошибка парсинга ответа: %w", err)
}
if response.Error != "" {
return "", fmt.Errorf("ошибка прокси API: %s", response.Error)
}
if response.Response == "" {
return "", fmt.Errorf("пустой ответ от API")
}
return strings.TrimSpace(response.Response), nil
}
// Health для ProxyAPIProvider
func (p *ProxyAPIProvider) Health() error {
req, err := http.NewRequest("GET", p.BaseURL+"/api/v1/protected/sberchat/health", nil)
if err != nil {
return fmt.Errorf("ошибка создания health check запроса: %w", err)
}
if p.JWTToken != "" {
req.Header.Set("Authorization", "Bearer "+p.JWTToken)
}
resp, err := p.HTTPClient.Do(req)
if err != nil {
return fmt.Errorf("ошибка health check: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("health check failed: %d", resp.StatusCode)
}
var healthResponse ProxyHealthResponse
body, err := io.ReadAll(resp.Body)
if err != nil {
return fmt.Errorf("ошибка чтения health check ответа: %w", err)
}
if err := json.Unmarshal(body, &healthResponse); err != nil {
return fmt.Errorf("ошибка парсинга health check ответа: %w", err)
}
if healthResponse.Status != "ok" {
return fmt.Errorf("health check status: %s - %s", healthResponse.Status, healthResponse.Message)
}
return nil
}
// Chat для OllamaProvider
func (o *OllamaProvider) Chat(messages []Chat) (string, error) {
payload := Gpt3Request{
Model: o.Model,
Messages: messages,
Stream: false,
Options: Gpt3Options{o.Temperature},
}
jsonData, err := json.Marshal(payload)
if err != nil {
return "", fmt.Errorf("ошибка маршалинга запроса: %w", err)
}
req, err := http.NewRequest("POST", o.BaseURL+"/api/chat", bytes.NewBuffer(jsonData))
if err != nil {
return "", fmt.Errorf("ошибка создания запроса: %w", err)
}
req.Header.Set("Content-Type", "application/json")
resp, err := o.HTTPClient.Do(req)
if err != nil {
return "", fmt.Errorf("ошибка выполнения запроса: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", fmt.Errorf("ошибка чтения ответа: %w", err)
}
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("ошибка API: %d - %s", resp.StatusCode, string(body))
}
var response OllamaResponse
if err := json.Unmarshal(body, &response); err != nil {
return "", fmt.Errorf("ошибка парсинга ответа: %w", err)
}
return strings.TrimSpace(response.Message.Content), nil
}
// Health для OllamaProvider
func (o *OllamaProvider) Health() error {
req, err := http.NewRequest("GET", o.BaseURL+"/api/tags", nil)
if err != nil {
return fmt.Errorf("ошибка создания health check запроса: %w", err)
}
resp, err := o.HTTPClient.Do(req)
if err != nil {
return fmt.Errorf("ошибка health check: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("health check failed: %d", resp.StatusCode)
}
return nil
}
// GetAvailableModels для ProxyAPIProvider возвращает фиксированный список
func (p *ProxyAPIProvider) GetAvailableModels() ([]string, error) {
return []string{"GigaChat-2", "GigaChat-2-Pro", "GigaChat-2-Max"}, nil
}
// GetAvailableModels возвращает список доступных моделей для провайдера
func (o *OllamaProvider) GetAvailableModels() ([]string, error) {
req, err := http.NewRequest("GET", o.BaseURL+"/api/tags", nil)
if err != nil {
return nil, fmt.Errorf("ошибка создания запроса: %w", err)
}
resp, err := o.HTTPClient.Do(req)
if err != nil {
return nil, fmt.Errorf("ошибка получения моделей: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("ошибка чтения ответа: %w", err)
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("ошибка API: %d - %s", resp.StatusCode, string(body))
}
var response OllamaTagsResponse
if err := json.Unmarshal(body, &response); err != nil {
return nil, fmt.Errorf("ошибка парсинга ответа: %w", err)
}
var models []string
for _, model := range response.Models {
models = append(models, model.Name)
}
return models, nil
}

760
main.go
View File

@@ -5,62 +5,566 @@ import (
"fmt"
"math"
"os"
"os/exec"
"os/user"
"path"
"strconv"
"strings"
"time"
"github.com/atotto/clipboard"
"github.com/direct-dev-ru/linux-command-gpt/gpt"
"github.com/direct-dev-ru/linux-command-gpt/reader"
"github.com/urfave/cli/v2"
)
//go:embed VERSION.txt
var Version string
var cwd, _ = os.Getwd()
var (
cwd, _ = os.Getwd()
HOST = getEnv("LCG_HOST", "http://192.168.87.108:11434/")
COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat") // relative part of endpoint
COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat")
MODEL = getEnv("LCG_MODEL", "codegeex4")
PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
// HOST = "https://api.openai.com/v1/"
// COMPLETIONS = "chat/completions"
// MODEL = "gpt-4o-mini"
// MODEL = "codellama:13b"
// This file is created in the user's home directory
// Example: /home/username/.openai_api_key
// API_KEY_FILE = ".openai_api_key"
HELP = `
Usage: lcg [options]
--help -h output usage information
--version -v output the version number
--file -f read command from file
--update-key -u update the API key
--delete-key -d delete the API key
Example Usage: lcg I want to extract linux-command-gpt.tar.gz file
Example Usage: lcg --file /path/to/file.json I want to print object questions with jq
`
VERSION = Version
CMD_HELP = 100
CMD_VERSION = 101
CMD_UPDATE = 102
CMD_DELETE = 103
CMD_COMPLETION = 110
PROVIDER_TYPE = getEnv("LCG_PROVIDER", "ollama") // "ollama", "proxy"
JWT_TOKEN = getEnv("LCG_JWT_TOKEN", "")
PROMPT_ID = getEnv("LCG_PROMPT_ID", "1") // ID промпта по умолчанию
TIMEOUT = getEnv("LCG_TIMEOUT", "120") // Таймаут в секундах по умолчанию
)
// getEnv retrieves the value of the environment variable `key` or returns `defaultValue` if not set.
const (
colorRed = "\033[31m"
colorGreen = "\033[32m"
colorYellow = "\033[33m"
colorBlue = "\033[34m"
colorPurple = "\033[35m"
colorCyan = "\033[36m"
colorReset = "\033[0m"
colorBold = "\033[1m"
)
func main() {
app := &cli.App{
Name: "lcg",
Usage: "Linux Command GPT - Генерация Linux команд из описаний",
Version: Version,
Commands: getCommands(),
UsageText: `
lcg [опции] <описание команды>
Примеры:
lcg "хочу извлечь файл linux-command-gpt.tar.gz"
lcg --file /path/to/file.txt "хочу вывести все директории с помощью ls"
`,
Description: `
Linux Command GPT - инструмент для генерации Linux команд из описаний на естественном языке.
Поддерживает чтение частей промпта из файлов и позволяет сохранять, копировать или перегенерировать результаты.
Переменные окружения:
LCG_HOST Endpoint для LLM API (по умолчанию: http://192.168.87.108:11434/)
LCG_MODEL Название модели (по умолчанию: codegeex4)
LCG_PROMPT Текст промпта по умолчанию
LCG_PROVIDER Тип провайдера: "ollama" или "proxy" (по умолчанию: ollama)
LCG_JWT_TOKEN JWT токен для proxy провайдера
`,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "file",
Aliases: []string{"f"},
Usage: "Read part of the command from a file",
},
&cli.StringFlag{
Name: "sys",
Aliases: []string{"s"},
Usage: "System prompt content or ID",
DefaultText: "Use prompt ID from LCG_PROMPT_ID or default prompt",
Value: "",
},
&cli.IntFlag{
Name: "prompt-id",
Aliases: []string{"pid"},
Usage: "System prompt ID (1-5 for default prompts)",
DefaultText: "1",
Value: 1,
},
&cli.IntFlag{
Name: "timeout",
Aliases: []string{"t"},
Usage: "Request timeout in seconds",
DefaultText: "120",
Value: 120,
},
},
Action: func(c *cli.Context) error {
file := c.String("file")
system := c.String("sys")
promptID := c.Int("prompt-id")
timeout := c.Int("timeout")
args := c.Args().Slice()
if len(args) == 0 {
cli.ShowAppHelp(c)
showTips()
return nil
}
// Если указан prompt-id, загружаем соответствующий промпт
if system == "" && promptID > 0 {
currentUser, _ := user.Current()
pm := gpt.NewPromptManager(currentUser.HomeDir)
if prompt, err := pm.GetPromptByID(promptID); err == nil {
system = prompt.Content
} else {
fmt.Printf("Warning: Prompt ID %d not found, using default prompt\n", promptID)
}
}
executeMain(file, system, strings.Join(args, " "), timeout)
return nil
},
}
cli.VersionFlag = &cli.BoolFlag{
Name: "version",
Aliases: []string{"V", "v"},
Usage: "prints out version",
}
cli.VersionPrinter = func(cCtx *cli.Context) {
fmt.Printf("%s\n", cCtx.App.Version)
}
if err := app.Run(os.Args); err != nil {
fmt.Println("Error:", err)
os.Exit(1)
}
}
func getCommands() []*cli.Command {
return []*cli.Command{
{
Name: "update-key",
Aliases: []string{"u"},
Usage: "Update the API key",
Action: func(c *cli.Context) error {
if PROVIDER_TYPE == "ollama" || PROVIDER_TYPE == "proxy" {
fmt.Println("API key is not needed for ollama and proxy providers")
return nil
}
timeout := 120 // default timeout
if t, err := strconv.Atoi(TIMEOUT); err == nil {
timeout = t
}
gpt3 := initGPT(PROMPT, timeout)
gpt3.UpdateKey()
fmt.Println("API key updated.")
return nil
},
},
{
Name: "delete-key",
Aliases: []string{"d"},
Usage: "Delete the API key",
Action: func(c *cli.Context) error {
if PROVIDER_TYPE == "ollama" || PROVIDER_TYPE == "proxy" {
fmt.Println("API key is not needed for ollama and proxy providers")
return nil
}
timeout := 120 // default timeout
if t, err := strconv.Atoi(TIMEOUT); err == nil {
timeout = t
}
gpt3 := initGPT(PROMPT, timeout)
gpt3.DeleteKey()
fmt.Println("API key deleted.")
return nil
},
},
{
Name: "update-jwt",
Aliases: []string{"j"},
Usage: "Update the JWT token for proxy API",
Action: func(c *cli.Context) error {
if PROVIDER_TYPE != "proxy" {
fmt.Println("JWT token is only needed for proxy provider")
return nil
}
var jwtToken string
fmt.Print("JWT Token: ")
fmt.Scanln(&jwtToken)
currentUser, _ := user.Current()
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
if err := os.WriteFile(jwtFile, []byte(strings.TrimSpace(jwtToken)), 0600); err != nil {
fmt.Printf("Ошибка сохранения JWT токена: %v\n", err)
return err
}
fmt.Println("JWT token updated.")
return nil
},
},
{
Name: "delete-jwt",
Aliases: []string{"dj"},
Usage: "Delete the JWT token for proxy API",
Action: func(c *cli.Context) error {
if PROVIDER_TYPE != "proxy" {
fmt.Println("JWT token is only needed for proxy provider")
return nil
}
currentUser, _ := user.Current()
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
if err := os.Remove(jwtFile); err != nil && !os.IsNotExist(err) {
fmt.Printf("Ошибка удаления JWT токена: %v\n", err)
return err
}
fmt.Println("JWT token deleted.")
return nil
},
},
{
Name: "models",
Aliases: []string{"m"},
Usage: "Show available models",
Action: func(c *cli.Context) error {
timeout := 120 // default timeout
if t, err := strconv.Atoi(TIMEOUT); err == nil {
timeout = t
}
gpt3 := initGPT(PROMPT, timeout)
models, err := gpt3.GetAvailableModels()
if err != nil {
fmt.Printf("Ошибка получения моделей: %v\n", err)
return err
}
fmt.Printf("Доступные модели для провайдера %s:\n", PROVIDER_TYPE)
for i, model := range models {
fmt.Printf(" %d. %s\n", i+1, model)
}
return nil
},
},
{
Name: "health",
Aliases: []string{"he"}, // Изменено с "h" на "he"
Usage: "Check API health",
Action: func(c *cli.Context) error {
timeout := 120 // default timeout
if t, err := strconv.Atoi(TIMEOUT); err == nil {
timeout = t
}
gpt3 := initGPT(PROMPT, timeout)
if err := gpt3.Health(); err != nil {
fmt.Printf("Health check failed: %v\n", err)
return err
}
fmt.Println("API is healthy.")
return nil
},
},
{
Name: "config",
Aliases: []string{"co"}, // Изменено с "c" на "co"
Usage: "Show current configuration",
Action: func(c *cli.Context) error {
fmt.Printf("Provider: %s\n", PROVIDER_TYPE)
fmt.Printf("Host: %s\n", HOST)
fmt.Printf("Model: %s\n", MODEL)
fmt.Printf("Prompt: %s\n", PROMPT)
fmt.Printf("Timeout: %s seconds\n", TIMEOUT)
if PROVIDER_TYPE == "proxy" {
fmt.Printf("JWT Token: %s\n", func() string {
if JWT_TOKEN != "" {
return "***set***"
}
currentUser, _ := user.Current()
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
if _, err := os.Stat(jwtFile); err == nil {
return "***from file***"
}
return "***not set***"
}())
}
return nil
},
},
{
Name: "history",
Aliases: []string{"hist"},
Usage: "Show command history",
Action: func(c *cli.Context) error {
showHistory()
return nil
},
},
{
Name: "prompts",
Aliases: []string{"p"},
Usage: "Manage system prompts",
Subcommands: []*cli.Command{
{
Name: "list",
Aliases: []string{"l"},
Usage: "List all available prompts",
Action: func(c *cli.Context) error {
currentUser, _ := user.Current()
pm := gpt.NewPromptManager(currentUser.HomeDir)
pm.ListPrompts()
return nil
},
},
{
Name: "add",
Aliases: []string{"a"},
Usage: "Add a new custom prompt",
Action: func(c *cli.Context) error {
currentUser, _ := user.Current()
pm := gpt.NewPromptManager(currentUser.HomeDir)
var name, description, content string
fmt.Print("Название промпта: ")
fmt.Scanln(&name)
fmt.Print("Описание: ")
fmt.Scanln(&description)
fmt.Print("Содержание промпта: ")
fmt.Scanln(&content)
if err := pm.AddCustomPrompt(name, description, content); err != nil {
fmt.Printf("Ошибка добавления промпта: %v\n", err)
return err
}
fmt.Println("Промпт успешно добавлен!")
return nil
},
},
{
Name: "delete",
Aliases: []string{"d"},
Usage: "Delete a custom prompt",
Action: func(c *cli.Context) error {
if c.NArg() == 0 {
fmt.Println("Укажите ID промпта для удаления")
return nil
}
var id int
if _, err := fmt.Sscanf(c.Args().First(), "%d", &id); err != nil {
fmt.Println("Неверный ID промпта")
return err
}
currentUser, _ := user.Current()
pm := gpt.NewPromptManager(currentUser.HomeDir)
if err := pm.DeleteCustomPrompt(id); err != nil {
fmt.Printf("Ошибка удаления промпта: %v\n", err)
return err
}
fmt.Println("Промпт успешно удален!")
return nil
},
},
},
},
{
Name: "test-prompt",
Aliases: []string{"tp"},
Usage: "Test a specific prompt ID",
Action: func(c *cli.Context) error {
if c.NArg() == 0 {
fmt.Println("Usage: lcg test-prompt <prompt-id> <command>")
return nil
}
var promptID int
if _, err := fmt.Sscanf(c.Args().First(), "%d", &promptID); err != nil {
fmt.Println("Invalid prompt ID")
return err
}
currentUser, _ := user.Current()
pm := gpt.NewPromptManager(currentUser.HomeDir)
prompt, err := pm.GetPromptByID(promptID)
if err != nil {
fmt.Printf("Prompt ID %d not found\n", promptID)
return err
}
fmt.Printf("Testing prompt ID %d: %s\n", promptID, prompt.Name)
fmt.Printf("Description: %s\n", prompt.Description)
fmt.Printf("Content: %s\n", prompt.Content)
if len(c.Args().Slice()) > 1 {
command := strings.Join(c.Args().Slice()[1:], " ")
fmt.Printf("\nTesting with command: %s\n", command)
timeout := 120 // default timeout
if t, err := strconv.Atoi(TIMEOUT); err == nil {
timeout = t
}
executeMain("", prompt.Content, command, timeout)
}
return nil
},
},
}
}
func executeMain(file, system, commandInput string, timeout int) {
if file != "" {
if err := reader.FileToPrompt(&commandInput, file); err != nil {
printColored(fmt.Sprintf("❌ Ошибка чтения файла: %v\n", err), colorRed)
return
}
}
// Если system пустой, используем дефолтный промпт
if system == "" {
system = PROMPT
}
if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
if err := os.MkdirAll(RESULT_FOLDER, 0755); err != nil {
printColored(fmt.Sprintf("❌ Ошибка создания папки результатов: %v\n", err), colorRed)
return
}
}
gpt3 := initGPT(system, timeout)
printColored("🤖 Запрос: ", colorCyan)
fmt.Printf("%s\n", commandInput)
response, elapsed := getCommand(gpt3, commandInput)
if response == "" {
printColored("❌ Ответ не получен. Проверьте подключение к API.\n", colorRed)
return
}
printColored(fmt.Sprintf("✅ Выполнено за %.2f сек\n", elapsed), colorGreen)
printColored("\n📋 Команда:\n", colorYellow)
printColored(fmt.Sprintf(" %s\n\n", response), colorBold+colorGreen)
saveToHistory(commandInput, response)
handlePostResponse(response, gpt3, system, commandInput)
}
func initGPT(system string, timeout int) gpt.Gpt3 {
currentUser, _ := user.Current()
// Загружаем JWT токен в зависимости от провайдера
var jwtToken string
if PROVIDER_TYPE == "proxy" {
jwtToken = JWT_TOKEN
if jwtToken == "" {
// Пытаемся загрузить из файла
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
if data, err := os.ReadFile(jwtFile); err == nil {
jwtToken = strings.TrimSpace(string(data))
}
}
}
return *gpt.NewGpt3(PROVIDER_TYPE, HOST, jwtToken, MODEL, system, 0.01, timeout)
}
func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
gpt3.InitKey()
start := time.Now()
done := make(chan bool)
go func() {
loadingChars := []string{"⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"}
i := 0
for {
select {
case <-done:
fmt.Printf("\r%s", strings.Repeat(" ", 50))
fmt.Print("\r")
return
default:
fmt.Printf("\r%s Обрабатываю запрос...", loadingChars[i])
i = (i + 1) % len(loadingChars)
time.Sleep(100 * time.Millisecond)
}
}
}()
response := gpt3.Completions(cmd)
done <- true
elapsed := math.Round(time.Since(start).Seconds()*100) / 100
return response, elapsed
}
func handlePostResponse(response string, gpt3 gpt.Gpt3, system, cmd string) {
fmt.Printf("Действия: (c)копировать, (s)сохранить, (r)перегенерировать, (e)выполнить, (n)ничего: ")
var choice string
fmt.Scanln(&choice)
switch strings.ToLower(choice) {
case "c":
clipboard.WriteAll(response)
fmt.Println("✅ Команда скопирована в буфер обмена")
case "s":
saveResponse(response, gpt3, cmd)
case "r":
fmt.Println("🔄 Перегенерирую...")
executeMain("", system, cmd, 120) // Use default timeout for regeneration
case "e":
executeCommand(response)
default:
fmt.Println(" До свидания!")
}
}
func saveResponse(response string, gpt3 gpt.Gpt3, cmd string) {
timestamp := time.Now().Format("2006-01-02_15-04-05")
filename := fmt.Sprintf("gpt_request_%s_%s.md", gpt3.Model, timestamp)
filePath := path.Join(RESULT_FOLDER, filename)
content := fmt.Sprintf("## Prompt:\n\n%s\n\n## Response:\n\n%s\n", cmd+". "+gpt3.Prompt, response)
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
fmt.Println("Failed to save response:", err)
} else {
fmt.Printf("Response saved to %s\n", filePath)
}
}
func executeCommand(command string) {
fmt.Printf("🚀 Выполняю: %s\n", command)
fmt.Print("Продолжить? (y/N): ")
var confirm string
fmt.Scanln(&confirm)
if strings.ToLower(confirm) == "y" || strings.ToLower(confirm) == "yes" {
cmd := exec.Command("bash", "-c", command)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
fmt.Printf("❌ Ошибка выполнения: %v\n", err)
} else {
fmt.Println("✅ Команда выполнена успешно")
}
} else {
fmt.Println("❌ Выполнение отменено")
}
}
func getEnv(key, defaultValue string) string {
if value, exists := os.LookupEnv(key); exists {
return value
@@ -68,149 +572,55 @@ func getEnv(key, defaultValue string) string {
return defaultValue
}
func handleCommand(cmd string) int {
if cmd == "" || cmd == "--help" || cmd == "-h" {
return CMD_HELP
}
if cmd == "--version" || cmd == "-v" {
return CMD_VERSION
}
if cmd == "--update-key" || cmd == "-u" {
return CMD_UPDATE
}
if cmd == "--delete-key" || cmd == "-d" {
return CMD_DELETE
}
return CMD_COMPLETION
type CommandHistory struct {
Command string
Response string
Timestamp time.Time
}
func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
gpt3.InitKey()
s := time.Now()
done := make(chan bool)
go func() {
loadingChars := []rune{'-', '\\', '|', '/'}
i := 0
for {
select {
case <-done:
fmt.Printf("\r")
return
default:
fmt.Printf("\rLoading %c", loadingChars[i])
i = (i + 1) % len(loadingChars)
time.Sleep(30 * time.Millisecond)
}
}
}()
var commandHistory []CommandHistory
r := gpt3.Completions(cmd)
done <- true
elapsed := time.Since(s).Seconds()
elapsed = math.Round(elapsed*100) / 100
func saveToHistory(cmd, response string) {
commandHistory = append(commandHistory, CommandHistory{
Command: cmd,
Response: response,
Timestamp: time.Now(),
})
if r == "" {
return "", elapsed
}
return r, elapsed
}
func main() {
currentUser, err := user.Current()
if err != nil {
panic(err)
}
args := os.Args
cmd := ""
file := ""
if len(args) > 1 {
start := 1
if args[1] == "--file" || args[1] == "-f" {
file = args[2]
start = 3
}
cmd = strings.Join(args[start:], " ")
}
if file != "" {
err := reader.FileToPrompt(&cmd, file)
if err != nil {
fmt.Println(err)
return
}
}
if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
os.MkdirAll(RESULT_FOLDER, 0755)
}
h := handleCommand(cmd)
if h == CMD_HELP {
fmt.Println(HELP)
return
}
if h == CMD_VERSION {
fmt.Println(VERSION)
return
}
gpt3 := gpt.Gpt3{
CompletionUrl: HOST + COMPLETIONS,
Model: MODEL,
Prompt: PROMPT,
HomeDir: currentUser.HomeDir,
ApiKeyFile: API_KEY_FILE,
Temperature: 0.01,
}
if h == CMD_UPDATE {
gpt3.UpdateKey()
return
}
if h == CMD_DELETE {
gpt3.DeleteKey()
return
}
c := "R"
r := ""
elapsed := 0.0
for c == "R" || c == "r" {
r, elapsed = getCommand(gpt3, cmd)
c = "N"
fmt.Printf("Completed in %v seconds\n\n", elapsed)
fmt.Println(r)
fmt.Print("\nDo you want to (c)opy, (s)ave to file, (r)egenerate, or take (N)o action on the command? (c/r/N): ")
fmt.Scanln(&c)
// no action
if c == "N" || c == "n" {
return
}
}
if r == "" {
return
}
// Copy to clipboard
if c == "C" || c == "c" {
clipboard.WriteAll(r)
fmt.Println("\033[33mCopied to clipboard")
return
}
if c == "S" || c == "s" {
timestamp := time.Now().Format("2006-01-02_15-04-05") // Format: YYYY-MM-DD_HH-MM-SS
filename := fmt.Sprintf("gpt_request_%s(%s).md", timestamp, gpt3.Model)
filePath := path.Join(RESULT_FOLDER, filename)
resultString := fmt.Sprintf("## Prompt:\n\n%s\n\n------------------\n\n## Response:\n\n%s\n\n", cmd+". "+gpt3.Prompt, r)
os.WriteFile(filePath, []byte(resultString), 0644)
fmt.Println("\033[33mSaved to file")
return
// Ограничиваем историю 100 командами
if len(commandHistory) > 100 {
commandHistory = commandHistory[1:]
}
}
func showHistory() {
if len(commandHistory) == 0 {
printColored("📝 История пуста\n", colorYellow)
return
}
printColored("📝 История команд:\n", colorYellow)
for i, hist := range commandHistory {
fmt.Printf("%d. %s → %s (%s)\n",
i+1,
hist.Command,
hist.Response,
hist.Timestamp.Format("15:04:05"))
}
}
func printColored(text, color string) {
fmt.Printf("%s%s%s", color, text, colorReset)
}
func showTips() {
printColored("💡 Подсказки:\n", colorCyan)
fmt.Println(" • Используйте --file для чтения из файла")
fmt.Println(" • Используйте --sys для изменения системного промпта")
fmt.Println(" • Используйте --prompt-id для выбора предустановленного промпта")
fmt.Println(" • Используйте --timeout для установки таймаута запроса")
fmt.Println(" • Команда 'prompts list' покажет все доступные промпты")
fmt.Println(" • Команда 'history' покажет историю запросов")
fmt.Println(" • Команда 'config' покажет текущие настройки")
fmt.Println(" • Команда 'health' проверит доступность API")
}

View File

@@ -1,33 +1 @@
package main
import (
"testing"
)
func TestHandleCommand(t *testing.T) {
tests := []struct {
command string
expected int
}{
{"", CMD_HELP},
{"--help", CMD_HELP},
{"-h", CMD_HELP},
{"--version", CMD_VERSION},
{"-v", CMD_VERSION},
{"--update-key", CMD_UPDATE},
{"-u", CMD_UPDATE},
{"--delete-key", CMD_DELETE},
{"-d", CMD_DELETE},
{"random strings", CMD_COMPLETION},
{"--test", CMD_COMPLETION},
{"-test", CMD_COMPLETION},
{"how to extract test.tar.gz", CMD_COMPLETION},
}
for _, test := range tests {
result := handleCommand(test.command)
if result != test.expected {
t.Error("Expected", test.expected, "got", result)
}
}
}

View File

@@ -4,7 +4,7 @@
REPO=kuznetcovay/go-lcg
VERSION=$1
if [ -z "$VERSION" ]; then
VERSION=v1.0.1
VERSION=v1.1.0
fi
BRANCH=main
@@ -16,7 +16,7 @@ export GOCACHE="${HOME}/.cache/go-build"
CURRENT_BRANCH=$(git branch --show-current)
# Function to restore the original branch
function restore_branch {
restore_branch() {
echo "Restoring original branch: ${CURRENT_BRANCH}"
git checkout "${CURRENT_BRANCH}"
}

View File

@@ -4,5 +4,5 @@ docker build -f Dockerfiles/LocalCompile/Dockerfile --target bin-linux --output
docker build -f Dockerfiles/LocalCompile/Dockerfile --target bin-linux --output bin-linux-arm64/ --platform linux/arm64 .
# in linux setuid
# sudo chown root:root bin-linux/go-ansible-vault
# sudo chmod +s bin-linux/go-ansible-vault
# sudo chown root:root bin-linux/lcg
# sudo chmod +s bin-linux/lcg

View File

@@ -1,13 +1,13 @@
#!/bin/bash
REPO=kuznetcovay/go-ansible-vault
REPO=kuznetcovay/go-lcg
VERSION=$1
if [ -z "$VERSION" ]; then
VERSION=v1.0.8
fi
BRANCH=main
echo ${VERSION} > VERSION.txt
echo "${VERSION}" > VERSION.txt
export GOCACHE="${HOME}/.cache/go-build"
# Save the current branch
@@ -34,7 +34,7 @@ if ! go test -v -run=^Test; then
fi
# Push multi-platform images
docker buildx build --push --platform linux/amd64,linux/arm64 -t ${REPO}:${VERSION} . ||
docker buildx build --push --platform linux/amd64,linux/arm64 -t ${REPO}:"${VERSION}" . ||
{
echo "docker buildx build --push failed. Exiting with code 1."
exit 1

View File

@@ -0,0 +1,6 @@
#!/usr/bin/bash
# shellcheck disable=SC2034
LCG_PROVIDER=proxy LCG_HOST=http://localhost:8080 LCG_MODEL=GigaChat-2-Max LCG_JWT_TOKEN=$(go-ansible-vault -a -i shell-code/jwt.admin.token get -m 'JWT_TOKEN' -q) go run . $1 $2 $3 $4 $5 $6 $7 $8 $9
LCG_PROVIDER=proxy LCG_HOST=https://direct-dev.ru LCG_MODEL=GigaChat-2-Max LCG_JWT_TOKEN=$(go-ansible-vault --key $(cat ~/.config/gak) -i ~/.config/jwt.direct-dev.ru get -m 'JWT_TOKEN' -q) go run . [your question here]

View File

@@ -5,28 +5,18 @@ VERSION_FILE="VERSION.txt"
GITHUB_TOKEN="${GITHUB_TOKEN}" # Replace with your GitHub token
REPO="Direct-Dev-Ru/binaries.git" # Replace with your GitHub username/repo
REPO="direct-dev-ru/binaries" # Replace with your GitHub username/repo
TAG=go-ansible-vault.$(cat "$VERSION_FILE")
TAG=lcg.$(cat "$VERSION_FILE")
echo $TAG
echo TAG: $TAG
RELEASE_NAME="Binaries ${TAG}" # Replace with your release title
RELEASE_DIR="/home/su/projects/golang/ansible-vault/binaries-for-upload"
# Create a new release
# response=$(curl -s -X POST \
# -H "Authorization: Bearer ${GITHUB_TOKEN}" \
# -H "Accept: application/vnd.github+json" \
# -H "X-GitHub-Api-Version: 2022-11-28" \
# https://api.github.com/repos/$REPO/releases \
# -d "{\"tag_name\": \"$TAG\", \"name\": \"$RELEASE_NAME\"}")
RELEASE_DIR="/home/su/projects/golang/linux-command-gpt/binaries-for-upload"
body="{\"tag_name\":\"${TAG}\", \"target_commitish\":\"main\", \"name\":\"${TAG}\", \
\"body\":\"${TAG}\", \"draft\":false, \"prerelease\":false, \"generate_release_notes\":false}"
echo $body
echo BODY: $body
response=$(curl -L -X POST \
-H "Accept: application/vnd.github+json" \
@@ -35,7 +25,6 @@ response=$(curl -L -X POST \
https://api.github.com/repos/direct-dev-ru/binaries/releases \
-d $body)
echo $response
# Extract the upload URL from the response