mirror of
https://github.com/Direct-Dev-Ru/go-lcg.git
synced 2025-11-16 01:29:55 +00:00
added new proxy llm provider
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
FROM --platform=${BUILDPLATFORM} golang:1.23-alpine AS builder
|
FROM --platform=${BUILDPLATFORM} golang:1.24.6-alpine3.22 AS builder
|
||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM --platform=${BUILDPLATFORM} golang:1.23-alpine AS build
|
FROM --platform=${BUILDPLATFORM} golang:1.24.6-alpine3.22 AS build
|
||||||
ARG TARGETOS
|
ARG TARGETOS
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
RUN apk add git && go install mvdan.cc/garble@latest
|
RUN apk add git && go install mvdan.cc/garble@latest
|
||||||
|
|||||||
38
README.md
38
README.md
@@ -1,8 +1,11 @@
|
|||||||
## Linux Command GPT (lcg)
|
# Linux Command GPT (lcg)
|
||||||
|
|
||||||
Get Linux commands in natural language with the power of ChatGPT.
|
Get Linux commands in natural language with the power of ChatGPT.
|
||||||
|
|
||||||
### Installation
|
## Installation
|
||||||
|
|
||||||
Build from source
|
Build from source
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
> git clone --depth 1 https://github.com/asrul10/linux-command-gpt.git ~/.linux-command-gpt
|
> git clone --depth 1 https://github.com/asrul10/linux-command-gpt.git ~/.linux-command-gpt
|
||||||
> cd ~/.linux-command-gpt
|
> cd ~/.linux-command-gpt
|
||||||
@@ -13,7 +16,7 @@ Build from source
|
|||||||
|
|
||||||
Or you can [download lcg executable file](https://github.com/asrul10/linux-command-gpt/releases)
|
Or you can [download lcg executable file](https://github.com/asrul10/linux-command-gpt/releases)
|
||||||
|
|
||||||
### Example Usage
|
## Example Usage
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
> lcg I want to extract linux-command-gpt.tar.gz file
|
> lcg I want to extract linux-command-gpt.tar.gz file
|
||||||
@@ -39,24 +42,30 @@ for host in "${hosts[@]}"; do
|
|||||||
ssh $host "echo 'Hello, world!' > /tmp/hello.txt"
|
ssh $host "echo 'Hello, world!' > /tmp/hello.txt"
|
||||||
done
|
done
|
||||||
```
|
```
|
||||||
|
|
||||||
This script defines an array `hosts` that contains the names of the hosts to connect to. The loop iterates over each element in the array and uses the `ssh` command to execute a simple command on the remote host. In this case, the command is `echo 'Hello, world!' > /tmp/hello.txt`, which writes the string "Hello, world!" to a file called `/tmp/hello.txt`.
|
This script defines an array `hosts` that contains the names of the hosts to connect to. The loop iterates over each element in the array and uses the `ssh` command to execute a simple command on the remote host. In this case, the command is `echo 'Hello, world!' > /tmp/hello.txt`, which writes the string "Hello, world!" to a file called `/tmp/hello.txt`.
|
||||||
|
|
||||||
You can modify the script to run any command you like by replacing the `echo` command with your desired command. For example, if you want to run a Python script on each host, you could use the following command:
|
You can modify the script to run any command you like by replacing the `echo` command with your desired command. For example, if you want to run a Python script on each host, you could use the following command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
ssh $host "python /path/to/script.py"
|
ssh $host "python /path/to/script.py"
|
||||||
```
|
```
|
||||||
|
|
||||||
This will execute the Python script located at `/path/to/script.py` on the remote host.
|
This will execute the Python script located at `/path/to/script.py` on the remote host.
|
||||||
|
|
||||||
You can also modify the script to run multiple commands in a single SSH session by using the `&&` operator to chain the commands together. For example:
|
You can also modify the script to run multiple commands in a single SSH session by using the `&&` operator to chain the commands together. For example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
ssh $host "echo 'Hello, world!' > /tmp/hello.txt && python /path/to/script.py"
|
ssh $host "echo 'Hello, world!' > /tmp/hello.txt && python /path/to/script.py"
|
||||||
```
|
```
|
||||||
|
|
||||||
This will execute both the `echo` command and the Python script in a single SSH session.
|
This will execute both the `echo` command and the Python script in a single SSH session.
|
||||||
|
|
||||||
I hope this helps! Let me know if you have any questions or need further assistance.
|
I hope this helps! Let me know if you have any questions or need further assistance.
|
||||||
|
|
||||||
Do you want to (c)opy, (r)egenerate, or take (N)o action on the command? (c/r/N):
|
Do you want to (c)opy, (r)egenerate, or take (N)o action on the command? (c/r/N):
|
||||||
```
|
|
||||||
|
``` text
|
||||||
|
|
||||||
To use the "copy to clipboard" feature, you need to install either the `xclip` or `xsel` package.
|
To use the "copy to clipboard" feature, you need to install either the `xclip` or `xsel` package.
|
||||||
|
|
||||||
@@ -69,4 +78,25 @@ To use the "copy to clipboard" feature, you need to install either the `xclip` o
|
|||||||
--file -f read command from file
|
--file -f read command from file
|
||||||
--update-key -u update the API key
|
--update-key -u update the API key
|
||||||
--delete-key -d delete the API key
|
--delete-key -d delete the API key
|
||||||
|
|
||||||
|
# ollama example
|
||||||
|
export LCG_PROVIDER=ollama
|
||||||
|
export LCG_HOST=http://192.168.87.108:11434/
|
||||||
|
export LCG_MODEL=codegeex4
|
||||||
|
|
||||||
|
lcg "I want to extract linux-command-gpt.tar.gz file"
|
||||||
|
|
||||||
|
export LCG_PROVIDER=proxy
|
||||||
|
export LCG_HOST=http://localhost:8080
|
||||||
|
export LCG_MODEL=GigaChat-2
|
||||||
|
export LCG_JWT_TOKEN=your_jwt_token_here
|
||||||
|
|
||||||
|
lcg "I want to extract linux-command-gpt.tar.gz file"
|
||||||
|
|
||||||
|
lcg health
|
||||||
|
|
||||||
|
lcg config
|
||||||
|
|
||||||
|
lcg update-jwt
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
v1.0.4
|
v1.0.1
|
||||||
|
|||||||
2
go.mod
2
go.mod
@@ -7,6 +7,6 @@ require github.com/atotto/clipboard v0.1.4
|
|||||||
require (
|
require (
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||||
github.com/urfave/cli/v2 v2.27.5 // indirect
|
github.com/urfave/cli/v2 v2.27.5
|
||||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
118
gpt/gpt.go
118
gpt/gpt.go
@@ -1,24 +1,40 @@
|
|||||||
package gpt
|
package gpt
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ProxySimpleChatRequest структура для простого запроса
|
||||||
|
type ProxySimpleChatRequest struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
Model string `json:"model,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProxySimpleChatResponse структура ответа для простого запроса
|
||||||
|
type ProxySimpleChatResponse struct {
|
||||||
|
Response string `json:"response"`
|
||||||
|
Usage struct {
|
||||||
|
PromptTokens int `json:"prompt_tokens"`
|
||||||
|
CompletionTokens int `json:"completion_tokens"`
|
||||||
|
TotalTokens int `json:"total_tokens"`
|
||||||
|
} `json:"usage,omitempty"`
|
||||||
|
Model string `json:"model,omitempty"`
|
||||||
|
Timeout int `json:"timeout_seconds,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gpt3 обновленная структура с поддержкой разных провайдеров
|
||||||
type Gpt3 struct {
|
type Gpt3 struct {
|
||||||
CompletionUrl string
|
Provider Provider
|
||||||
Prompt string
|
Prompt string
|
||||||
Model string
|
Model string
|
||||||
HomeDir string
|
HomeDir string
|
||||||
ApiKeyFile string
|
ApiKeyFile string
|
||||||
ApiKey string
|
ApiKey string
|
||||||
Temperature float64
|
Temperature float64
|
||||||
|
ProviderType string // "ollama", "proxy"
|
||||||
}
|
}
|
||||||
|
|
||||||
type Chat struct {
|
type Chat struct {
|
||||||
@@ -135,6 +151,11 @@ func (gpt3 *Gpt3) DeleteKey() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (gpt3 *Gpt3) InitKey() {
|
func (gpt3 *Gpt3) InitKey() {
|
||||||
|
// Для proxy провайдера не нужен API ключ, используется JWT токен
|
||||||
|
if gpt3.ProviderType == "proxy" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
load := gpt3.loadApiKey()
|
load := gpt3.loadApiKey()
|
||||||
if load {
|
if load {
|
||||||
return
|
return
|
||||||
@@ -145,55 +166,46 @@ func (gpt3 *Gpt3) InitKey() {
|
|||||||
gpt3.storeApiKey(apiKey)
|
gpt3.storeApiKey(apiKey)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (gpt3 *Gpt3) Completions(ask string) string {
|
// NewGpt3 создает новый экземпляр GPT с выбранным провайдером
|
||||||
req, err := http.NewRequest("POST", gpt3.CompletionUrl, nil)
|
func NewGpt3(providerType, host, apiKey, model, prompt string, temperature float64) *Gpt3 {
|
||||||
if err != nil {
|
var provider Provider
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
req.Header.Set("Content-Type", "application/json")
|
|
||||||
// req.Header.Set("Authorization", "Bearer "+strings.TrimSpace(gpt3.ApiKey))
|
|
||||||
|
|
||||||
|
switch providerType {
|
||||||
|
case "proxy":
|
||||||
|
provider = NewProxyAPIProvider(host, apiKey, model) // apiKey используется как JWT токен
|
||||||
|
case "ollama":
|
||||||
|
provider = NewOllamaProvider(host, model, temperature)
|
||||||
|
default:
|
||||||
|
provider = NewOllamaProvider(host, model, temperature)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Gpt3{
|
||||||
|
Provider: provider,
|
||||||
|
Prompt: prompt,
|
||||||
|
Model: model,
|
||||||
|
ApiKey: apiKey,
|
||||||
|
Temperature: temperature,
|
||||||
|
ProviderType: providerType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Completions обновленный метод с поддержкой разных провайдеров
|
||||||
|
func (gpt3 *Gpt3) Completions(ask string) string {
|
||||||
messages := []Chat{
|
messages := []Chat{
|
||||||
{"system", gpt3.Prompt},
|
{"system", gpt3.Prompt},
|
||||||
{"user", ask + "." + gpt3.Prompt},
|
{"user", ask + ". " + gpt3.Prompt},
|
||||||
}
|
|
||||||
payload := Gpt3Request{
|
|
||||||
Model: gpt3.Model,
|
|
||||||
Messages: messages,
|
|
||||||
Stream: false,
|
|
||||||
Options: Gpt3Options{gpt3.Temperature},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
payloadJson, err := json.Marshal(payload)
|
response, err := gpt3.Provider.Chat(messages)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
fmt.Printf("Ошибка при выполнении запроса: %v\n", err)
|
||||||
}
|
|
||||||
req.Body = io.NopCloser(bytes.NewBuffer(payloadJson))
|
|
||||||
|
|
||||||
client := &http.Client{}
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
fmt.Println(string(body))
|
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// var res Gpt3Response
|
return response
|
||||||
var res OllamaResponse
|
}
|
||||||
err = json.Unmarshal(body, &res)
|
|
||||||
if err != nil {
|
// Health проверяет состояние провайдера
|
||||||
panic(err)
|
func (gpt3 *Gpt3) Health() error {
|
||||||
}
|
return gpt3.Provider.Health()
|
||||||
|
|
||||||
// return strings.TrimSpace(res.Choices[0].Message.Content)
|
|
||||||
return strings.TrimSpace(res.Message.Content)
|
|
||||||
}
|
}
|
||||||
|
|||||||
246
gpt/providers.go
Normal file
246
gpt/providers.go
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
package gpt
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Provider интерфейс для работы с разными LLM провайдерами
|
||||||
|
type Provider interface {
|
||||||
|
Chat(messages []Chat) (string, error)
|
||||||
|
Health() error
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProxyAPIProvider реализация для прокси API (gin-restapi)
|
||||||
|
type ProxyAPIProvider struct {
|
||||||
|
BaseURL string
|
||||||
|
JWTToken string
|
||||||
|
Model string
|
||||||
|
HTTPClient *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProxyChatRequest структура запроса к прокси API
|
||||||
|
type ProxyChatRequest struct {
|
||||||
|
Messages []Chat `json:"messages"`
|
||||||
|
Model string `json:"model,omitempty"`
|
||||||
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
|
Stream bool `json:"stream,omitempty"`
|
||||||
|
SystemContent string `json:"system_content,omitempty"`
|
||||||
|
UserContent string `json:"user_content,omitempty"`
|
||||||
|
RandomWords []string `json:"random_words,omitempty"`
|
||||||
|
FallbackString string `json:"fallback_string,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProxyChatResponse структура ответа от прокси API
|
||||||
|
type ProxyChatResponse struct {
|
||||||
|
Response string `json:"response"`
|
||||||
|
Usage struct {
|
||||||
|
PromptTokens int `json:"prompt_tokens"`
|
||||||
|
CompletionTokens int `json:"completion_tokens"`
|
||||||
|
TotalTokens int `json:"total_tokens"`
|
||||||
|
} `json:"usage,omitempty"`
|
||||||
|
Error string `json:"error,omitempty"`
|
||||||
|
Model string `json:"model,omitempty"`
|
||||||
|
Timeout int `json:"timeout_seconds,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProxyHealthResponse структура ответа health check
|
||||||
|
type ProxyHealthResponse struct {
|
||||||
|
Status string `json:"status"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
Model string `json:"default_model,omitempty"`
|
||||||
|
Timeout int `json:"default_timeout_seconds,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// OllamaProvider реализация для Ollama API
|
||||||
|
type OllamaProvider struct {
|
||||||
|
BaseURL string
|
||||||
|
Model string
|
||||||
|
Temperature float64
|
||||||
|
HTTPClient *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewProxyAPIProvider(baseURL, jwtToken, model string) *ProxyAPIProvider {
|
||||||
|
return &ProxyAPIProvider{
|
||||||
|
BaseURL: strings.TrimSuffix(baseURL, "/"),
|
||||||
|
JWTToken: jwtToken,
|
||||||
|
Model: model,
|
||||||
|
HTTPClient: &http.Client{Timeout: 120 * time.Second},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewOllamaProvider(baseURL, model string, temperature float64) *OllamaProvider {
|
||||||
|
return &OllamaProvider{
|
||||||
|
BaseURL: strings.TrimSuffix(baseURL, "/"),
|
||||||
|
Model: model,
|
||||||
|
Temperature: temperature,
|
||||||
|
HTTPClient: &http.Client{Timeout: 120 * time.Second},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chat для ProxyAPIProvider
|
||||||
|
func (p *ProxyAPIProvider) Chat(messages []Chat) (string, error) {
|
||||||
|
// Используем основной endpoint /api/v1/protected/sberchat/chat
|
||||||
|
payload := ProxyChatRequest{
|
||||||
|
Messages: messages,
|
||||||
|
Model: p.Model,
|
||||||
|
Temperature: 0.5,
|
||||||
|
TopP: 0.5,
|
||||||
|
Stream: false,
|
||||||
|
RandomWords: []string{"linux", "command", "gpt"},
|
||||||
|
FallbackString: "I'm sorry, I can't help with that. Please try again.",
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonData, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка маршалинга запроса: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", p.BaseURL+"/api/v1/protected/sberchat/chat", bytes.NewBuffer(jsonData))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка создания запроса: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
if p.JWTToken != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+p.JWTToken)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := p.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка выполнения запроса: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка чтения ответа: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return "", fmt.Errorf("ошибка API: %d - %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
var response ProxyChatResponse
|
||||||
|
if err := json.Unmarshal(body, &response); err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка парсинга ответа: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if response.Error != "" {
|
||||||
|
return "", fmt.Errorf("ошибка прокси API: %s", response.Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
if response.Response == "" {
|
||||||
|
return "", fmt.Errorf("пустой ответ от API")
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.TrimSpace(response.Response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Health для ProxyAPIProvider
|
||||||
|
func (p *ProxyAPIProvider) Health() error {
|
||||||
|
req, err := http.NewRequest("GET", p.BaseURL+"/api/v1/protected/sberchat/health", nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("ошибка создания health check запроса: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.JWTToken != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+p.JWTToken)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := p.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("ошибка health check: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("health check failed: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
var healthResponse ProxyHealthResponse
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("ошибка чтения health check ответа: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := json.Unmarshal(body, &healthResponse); err != nil {
|
||||||
|
return fmt.Errorf("ошибка парсинга health check ответа: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if healthResponse.Status != "ok" {
|
||||||
|
return fmt.Errorf("health check status: %s - %s", healthResponse.Status, healthResponse.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chat для OllamaProvider
|
||||||
|
func (o *OllamaProvider) Chat(messages []Chat) (string, error) {
|
||||||
|
payload := Gpt3Request{
|
||||||
|
Model: o.Model,
|
||||||
|
Messages: messages,
|
||||||
|
Stream: false,
|
||||||
|
Options: Gpt3Options{o.Temperature},
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonData, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка маршалинга запроса: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", o.BaseURL+"/api/chat", bytes.NewBuffer(jsonData))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка создания запроса: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err := o.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка выполнения запроса: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка чтения ответа: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return "", fmt.Errorf("ошибка API: %d - %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
var response OllamaResponse
|
||||||
|
if err := json.Unmarshal(body, &response); err != nil {
|
||||||
|
return "", fmt.Errorf("ошибка парсинга ответа: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.TrimSpace(response.Message.Content), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Health для OllamaProvider
|
||||||
|
func (o *OllamaProvider) Health() error {
|
||||||
|
req, err := http.NewRequest("GET", o.BaseURL+"/api/tags", nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("ошибка создания health check запроса: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := o.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("ошибка health check: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("health check failed: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
272
main.go
272
main.go
@@ -5,6 +5,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"os"
|
"os"
|
||||||
|
"os/exec"
|
||||||
"os/user"
|
"os/user"
|
||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -27,33 +28,44 @@ var (
|
|||||||
PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
|
PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
|
||||||
API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
|
API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
|
||||||
RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
|
RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
|
||||||
|
PROVIDER_TYPE = getEnv("LCG_PROVIDER", "ollama") // "ollama", "proxy"
|
||||||
|
JWT_TOKEN = getEnv("LCG_JWT_TOKEN", "")
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
colorRed = "\033[31m"
|
||||||
|
colorGreen = "\033[32m"
|
||||||
|
colorYellow = "\033[33m"
|
||||||
|
colorBlue = "\033[34m"
|
||||||
|
colorPurple = "\033[35m"
|
||||||
|
colorCyan = "\033[36m"
|
||||||
|
colorReset = "\033[0m"
|
||||||
|
colorBold = "\033[1m"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
app := &cli.App{
|
app := &cli.App{
|
||||||
Name: "lcg",
|
Name: "lcg",
|
||||||
Usage: "Linux Command GPT - Generate Linux commands from descriptions",
|
Usage: "Linux Command GPT - Генерация Linux команд из описаний",
|
||||||
Version: Version,
|
Version: Version,
|
||||||
Commands: getCommands(),
|
Commands: getCommands(),
|
||||||
UsageText: `
|
UsageText: `
|
||||||
lcg [global options] <command description>
|
lcg [опции] <описание команды>
|
||||||
|
|
||||||
Examples:
|
Примеры:
|
||||||
lcg "I want to extract linux-command-gpt.tar.gz file"
|
lcg "хочу извлечь файл linux-command-gpt.tar.gz"
|
||||||
lcg --file /path/to/file.txt "I want to list all directories with ls"
|
lcg --file /path/to/file.txt "хочу вывести все директории с помощью ls"
|
||||||
`,
|
`,
|
||||||
Description: `
|
Description: `
|
||||||
Linux Command GPT is a tool for generating Linux commands from natural language descriptions.
|
Linux Command GPT - инструмент для генерации Linux команд из описаний на естественном языке.
|
||||||
It supports reading parts of the prompt from files and allows saving, copying, or regenerating results.
|
Поддерживает чтение частей промпта из файлов и позволяет сохранять, копировать или перегенерировать результаты.
|
||||||
Additional commands are available for managing API keys.
|
|
||||||
|
|
||||||
Environment Variables:
|
Переменные окружения:
|
||||||
LCG_HOST Endpoint for LLM API (default: http://192.168.87.108:11434/)
|
LCG_HOST Endpoint для LLM API (по умолчанию: http://192.168.87.108:11434/)
|
||||||
LCG_COMPLETIONS_PATH Relative API path (default: api/chat)
|
LCG_MODEL Название модели (по умолчанию: codegeex4)
|
||||||
LCG_MODEL Model name (default: codegeex4)
|
LCG_PROMPT Текст промпта по умолчанию
|
||||||
LCG_PROMPT Default prompt text
|
LCG_PROVIDER Тип провайдера: "ollama" или "proxy" (по умолчанию: ollama)
|
||||||
LCG_API_KEY_FILE API key storage file (default: ~/.openai_api_key)
|
LCG_JWT_TOKEN JWT токен для proxy провайдера
|
||||||
LCG_RESULT_FOLDER Results folder (default: ./gpt_results)
|
|
||||||
`,
|
`,
|
||||||
Flags: []cli.Flag{
|
Flags: []cli.Flag{
|
||||||
&cli.StringFlag{
|
&cli.StringFlag{
|
||||||
@@ -75,6 +87,7 @@ Environment Variables:
|
|||||||
args := c.Args().Slice()
|
args := c.Args().Slice()
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
cli.ShowAppHelp(c)
|
cli.ShowAppHelp(c)
|
||||||
|
showTips()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
executeMain(file, system, strings.Join(args, " "))
|
executeMain(file, system, strings.Join(args, " "))
|
||||||
@@ -121,67 +134,174 @@ func getCommands() []*cli.Command {
|
|||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "update-jwt",
|
||||||
|
Aliases: []string{"j"},
|
||||||
|
Usage: "Update the JWT token for proxy API",
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
if PROVIDER_TYPE != "proxy" {
|
||||||
|
fmt.Println("JWT token is only needed for proxy provider")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var jwtToken string
|
||||||
|
fmt.Print("JWT Token: ")
|
||||||
|
fmt.Scanln(&jwtToken)
|
||||||
|
|
||||||
|
currentUser, _ := user.Current()
|
||||||
|
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
|
||||||
|
if err := os.WriteFile(jwtFile, []byte(strings.TrimSpace(jwtToken)), 0600); err != nil {
|
||||||
|
fmt.Printf("Ошибка сохранения JWT токена: %v\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("JWT token updated.")
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "delete-jwt",
|
||||||
|
Aliases: []string{"dj"},
|
||||||
|
Usage: "Delete the JWT token for proxy API",
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
if PROVIDER_TYPE != "proxy" {
|
||||||
|
fmt.Println("JWT token is only needed for proxy provider")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
currentUser, _ := user.Current()
|
||||||
|
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
|
||||||
|
if err := os.Remove(jwtFile); err != nil && !os.IsNotExist(err) {
|
||||||
|
fmt.Printf("Ошибка удаления JWT токена: %v\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("JWT token deleted.")
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "health",
|
||||||
|
Aliases: []string{"he"}, // Изменено с "h" на "he"
|
||||||
|
Usage: "Check API health",
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
gpt3 := initGPT(PROMPT)
|
||||||
|
if err := gpt3.Health(); err != nil {
|
||||||
|
fmt.Printf("Health check failed: %v\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fmt.Println("API is healthy.")
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "config",
|
||||||
|
Aliases: []string{"co"}, // Изменено с "c" на "co"
|
||||||
|
Usage: "Show current configuration",
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
fmt.Printf("Provider: %s\n", PROVIDER_TYPE)
|
||||||
|
fmt.Printf("Host: %s\n", HOST)
|
||||||
|
fmt.Printf("Model: %s\n", MODEL)
|
||||||
|
fmt.Printf("Prompt: %s\n", PROMPT)
|
||||||
|
if PROVIDER_TYPE == "proxy" {
|
||||||
|
fmt.Printf("JWT Token: %s\n", func() string {
|
||||||
|
if JWT_TOKEN != "" {
|
||||||
|
return "***set***"
|
||||||
|
}
|
||||||
|
currentUser, _ := user.Current()
|
||||||
|
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
|
||||||
|
if _, err := os.Stat(jwtFile); err == nil {
|
||||||
|
return "***from file***"
|
||||||
|
}
|
||||||
|
return "***not set***"
|
||||||
|
}())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "history",
|
||||||
|
Aliases: []string{"hist"},
|
||||||
|
Usage: "Show command history",
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
showHistory()
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func executeMain(file, system, commandInput string) {
|
func executeMain(file, system, commandInput string) {
|
||||||
// fmt.Println(system, commandInput)
|
|
||||||
// os.Exit(0)
|
|
||||||
if file != "" {
|
if file != "" {
|
||||||
if err := reader.FileToPrompt(&commandInput, file); err != nil {
|
if err := reader.FileToPrompt(&commandInput, file); err != nil {
|
||||||
fmt.Println("Error reading file:", err)
|
printColored(fmt.Sprintf("❌ Ошибка чтения файла: %v\n", err), colorRed)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
|
if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
|
||||||
os.MkdirAll(RESULT_FOLDER, 0755)
|
if err := os.MkdirAll(RESULT_FOLDER, 0755); err != nil {
|
||||||
|
printColored(fmt.Sprintf("❌ Ошибка создания папки результатов: %v\n", err), colorRed)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
gpt3 := initGPT(system)
|
gpt3 := initGPT(system)
|
||||||
|
|
||||||
// if system != PROMPT {
|
printColored("🤖 Запрос: ", colorCyan)
|
||||||
// commandInput += ". " + system
|
fmt.Printf("%s\n", commandInput)
|
||||||
// }
|
|
||||||
fmt.Println(commandInput)
|
|
||||||
response, elapsed := getCommand(gpt3, commandInput)
|
response, elapsed := getCommand(gpt3, commandInput)
|
||||||
if response == "" {
|
if response == "" {
|
||||||
fmt.Println("No response received.")
|
printColored("❌ Ответ не получен. Проверьте подключение к API.\n", colorRed)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("Completed in %v seconds\n\n%s\n", elapsed, response)
|
printColored(fmt.Sprintf("✅ Выполнено за %.2f сек\n", elapsed), colorGreen)
|
||||||
|
printColored("\n📋 Команда:\n", colorYellow)
|
||||||
|
printColored(fmt.Sprintf(" %s\n\n", response), colorBold+colorGreen)
|
||||||
|
|
||||||
|
saveToHistory(commandInput, response)
|
||||||
handlePostResponse(response, gpt3, system, commandInput)
|
handlePostResponse(response, gpt3, system, commandInput)
|
||||||
}
|
}
|
||||||
|
|
||||||
func initGPT(system string) gpt.Gpt3 {
|
func initGPT(system string) gpt.Gpt3 {
|
||||||
currentUser, _ := user.Current()
|
currentUser, _ := user.Current()
|
||||||
return gpt.Gpt3{
|
|
||||||
CompletionUrl: HOST + COMPLETIONS,
|
// Загружаем JWT токен в зависимости от провайдера
|
||||||
Model: MODEL,
|
var jwtToken string
|
||||||
Prompt: system,
|
if PROVIDER_TYPE == "proxy" {
|
||||||
HomeDir: currentUser.HomeDir,
|
jwtToken = JWT_TOKEN
|
||||||
ApiKeyFile: API_KEY_FILE,
|
if jwtToken == "" {
|
||||||
Temperature: 0.01,
|
// Пытаемся загрузить из файла
|
||||||
|
jwtFile := currentUser.HomeDir + "/.proxy_jwt_token"
|
||||||
|
if data, err := os.ReadFile(jwtFile); err == nil {
|
||||||
|
jwtToken = strings.TrimSpace(string(data))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return *gpt.NewGpt3(PROVIDER_TYPE, HOST, jwtToken, MODEL, system, 0.01)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
|
func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
|
||||||
gpt3.InitKey()
|
gpt3.InitKey()
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
done := make(chan bool)
|
done := make(chan bool)
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
loadingChars := []rune{'-', '\\', '|', '/'}
|
loadingChars := []string{"⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"}
|
||||||
i := 0
|
i := 0
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-done:
|
case <-done:
|
||||||
fmt.Printf("\r")
|
fmt.Printf("\r%s", strings.Repeat(" ", 50))
|
||||||
|
fmt.Print("\r")
|
||||||
return
|
return
|
||||||
default:
|
default:
|
||||||
fmt.Printf("\rLoading %c", loadingChars[i])
|
fmt.Printf("\r%s Обрабатываю запрос...", loadingChars[i])
|
||||||
i = (i + 1) % len(loadingChars)
|
i = (i + 1) % len(loadingChars)
|
||||||
time.Sleep(30 * time.Millisecond)
|
time.Sleep(100 * time.Millisecond)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
@@ -194,20 +314,23 @@ func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func handlePostResponse(response string, gpt3 gpt.Gpt3, system, cmd string) {
|
func handlePostResponse(response string, gpt3 gpt.Gpt3, system, cmd string) {
|
||||||
fmt.Print("\nOptions: (c)opy, (s)ave, (r)egenerate, (n)one: ")
|
fmt.Printf("Действия: (c)копировать, (s)сохранить, (r)перегенерировать, (e)выполнить, (n)ничего: ")
|
||||||
var choice string
|
var choice string
|
||||||
fmt.Scanln(&choice)
|
fmt.Scanln(&choice)
|
||||||
|
|
||||||
switch strings.ToLower(choice) {
|
switch strings.ToLower(choice) {
|
||||||
case "c":
|
case "c":
|
||||||
clipboard.WriteAll(response)
|
clipboard.WriteAll(response)
|
||||||
fmt.Println("Response copied to clipboard.")
|
fmt.Println("✅ Команда скопирована в буфер обмена")
|
||||||
case "s":
|
case "s":
|
||||||
saveResponse(response, gpt3, cmd)
|
saveResponse(response, gpt3, cmd)
|
||||||
case "r":
|
case "r":
|
||||||
|
fmt.Println("🔄 Перегенерирую...")
|
||||||
executeMain("", system, cmd)
|
executeMain("", system, cmd)
|
||||||
|
case "e":
|
||||||
|
executeCommand(response)
|
||||||
default:
|
default:
|
||||||
fmt.Println("No action taken.")
|
fmt.Println(" До свидания!")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -224,9 +347,80 @@ func saveResponse(response string, gpt3 gpt.Gpt3, cmd string) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func executeCommand(command string) {
|
||||||
|
fmt.Printf("🚀 Выполняю: %s\n", command)
|
||||||
|
fmt.Print("Продолжить? (y/N): ")
|
||||||
|
var confirm string
|
||||||
|
fmt.Scanln(&confirm)
|
||||||
|
|
||||||
|
if strings.ToLower(confirm) == "y" || strings.ToLower(confirm) == "yes" {
|
||||||
|
cmd := exec.Command("bash", "-c", command)
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
fmt.Printf("❌ Ошибка выполнения: %v\n", err)
|
||||||
|
} else {
|
||||||
|
fmt.Println("✅ Команда выполнена успешно")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Println("❌ Выполнение отменено")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func getEnv(key, defaultValue string) string {
|
func getEnv(key, defaultValue string) string {
|
||||||
if value, exists := os.LookupEnv(key); exists {
|
if value, exists := os.LookupEnv(key); exists {
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
return defaultValue
|
return defaultValue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type CommandHistory struct {
|
||||||
|
Command string
|
||||||
|
Response string
|
||||||
|
Timestamp time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
var commandHistory []CommandHistory
|
||||||
|
|
||||||
|
func saveToHistory(cmd, response string) {
|
||||||
|
commandHistory = append(commandHistory, CommandHistory{
|
||||||
|
Command: cmd,
|
||||||
|
Response: response,
|
||||||
|
Timestamp: time.Now(),
|
||||||
|
})
|
||||||
|
|
||||||
|
// Ограничиваем историю 100 командами
|
||||||
|
if len(commandHistory) > 100 {
|
||||||
|
commandHistory = commandHistory[1:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func showHistory() {
|
||||||
|
if len(commandHistory) == 0 {
|
||||||
|
printColored("📝 История пуста\n", colorYellow)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
printColored("📝 История команд:\n", colorYellow)
|
||||||
|
for i, hist := range commandHistory {
|
||||||
|
fmt.Printf("%d. %s → %s (%s)\n",
|
||||||
|
i+1,
|
||||||
|
hist.Command,
|
||||||
|
hist.Response,
|
||||||
|
hist.Timestamp.Format("15:04:05"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printColored(text, color string) {
|
||||||
|
fmt.Printf("%s%s%s", color, text, colorReset)
|
||||||
|
}
|
||||||
|
|
||||||
|
func showTips() {
|
||||||
|
printColored("💡 Подсказки:\n", colorCyan)
|
||||||
|
fmt.Println(" • Используйте --file для чтения из файла")
|
||||||
|
fmt.Println(" • Используйте --sys для изменения системного промпта")
|
||||||
|
fmt.Println(" • Команда 'history' покажет историю запросов")
|
||||||
|
fmt.Println(" • Команда 'config' покажет текущие настройки")
|
||||||
|
fmt.Println(" • Команда 'health' проверит доступность API")
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
REPO=kuznetcovay/go-lcg
|
REPO=kuznetcovay/go-lcg
|
||||||
VERSION=$1
|
VERSION=$1
|
||||||
if [ -z "$VERSION" ]; then
|
if [ -z "$VERSION" ]; then
|
||||||
VERSION=v1.0.1
|
VERSION=v1.1.0
|
||||||
fi
|
fi
|
||||||
BRANCH=main
|
BRANCH=main
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user