Compare commits

..

26 Commits

Author SHA1 Message Date
46a0d9e45a release v1.0.3 2024-12-05 13:25:58 +06:00
12cd3fe6db moved to cli framework 2024-12-05 13:17:35 +06:00
7136fe4607 before refactor to cli framework 2024-12-05 11:15:38 +06:00
fa0a8565c3 release v1.0.2 2024-12-03 18:00:10 +06:00
8758ab19ef release v1.0.1 2024-12-03 17:17:04 +06:00
asrul10
7a40d8d51e feat: update prompt and model 2024-09-19 00:53:48 -04:00
asrul10
d11017d792 Merge pull request #12 from asrul10/feat/read-file
feat: read file and add to prompt
2024-02-08 12:00:21 +07:00
asrul10
1c4113d0c2 feat: read file and add to prompt 2024-02-08 11:57:25 +07:00
asrul10
00b2ea6614 Merge pull request #10 from asrul10/feat/copy-clipboard
(feat) Copy to clipboard
2023-12-19 10:12:35 +07:00
asrul10
9538b0fed5 feat: copy to clipboard 2023-12-19 10:09:26 +07:00
asrul10
5141cb69a3 update 2023-12-19 09:30:22 +07:00
asrul10
ae90ef6cfb Merge pull request #8 from asrul10/feature/regenerate-gpt
Add a feature to regenerate the result and some minor improvements
2023-04-09 10:56:43 +07:00
asrul10
7f81b1942b style: update version 2023-04-09 10:54:35 +07:00
asrul10
dafcaaff0f docs: update example result 2023-04-09 10:53:11 +07:00
asrul10
fbb68d2a28 feat: add regenerate options 2023-04-09 10:51:26 +07:00
asrul10
2d6fef23aa chore: set default options to (N)o execute 2023-04-09 10:33:53 +07:00
asrul10
432bfc61db feat: add example usage if there is no option 2023-04-09 10:30:13 +07:00
asrul10
0e50c8ec04 feat: remove borders to make it easy to copy 2023-04-09 10:25:50 +07:00
asrul10
148e1d9420 fix: box result 2023-03-25 22:26:38 +07:00
asrul10
952eee1a29 docs: fix link 2023-03-12 16:20:23 +07:00
asrul10
c2619a2864 docs: add executable link 2023-03-12 16:13:01 +07:00
asrul10
b1166a724d Merge pull request #1 from asrul10/workflow
Add github actions for release
2023-03-12 16:04:27 +07:00
asrul10
c6b1474117 chore: add github actions 2023-03-12 16:01:18 +07:00
asrul10
b04f7016b8 fix: remove precompiled file 2023-03-12 14:36:12 +07:00
asrul10
4f52b5bbad docs: add LICENSE 2023-03-12 06:48:21 +07:00
asrul10
dce4360043 fix: blocking loading when input api key 2023-03-12 06:27:03 +07:00
19 changed files with 833 additions and 152 deletions

7
.gitignore vendored
View File

@@ -8,4 +8,9 @@
go.work
*.log
lcg
dist/
shell-code/build.env
bin-linux-amd64/*
bin-linux-arm64/*
binaries-for-upload/*
gpt_results

33
.goreleaser.yaml Normal file
View File

@@ -0,0 +1,33 @@
archives:
- format: tar.gz
builds:
- binary: lcg
env:
- CGO_ENABLED=0
goarch:
- amd64
- arm64
- arm
goos:
- linux
- darwin
changelog:
filters:
exclude:
- '^docs:'
- '^test:'
sort: asc
checksum:
name_template: 'checksums.txt'
release:
draft: true
snapshot:
name_template: "{{ incpatch .Version }}-next"
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
# vim: set ts=2 sw=2 tw=0 fo=cnqoj

View File

@@ -0,0 +1,25 @@
FROM --platform=${BUILDPLATFORM} golang:1.23-alpine AS builder
ARG TARGETARCH
RUN apk add git && go install mvdan.cc/garble@latest
WORKDIR /app
COPY . .
RUN echo $BUILDPLATFORM > buildplatform
RUN echo $TARGETARCH > targetarch
# RUN GOOS=linux GOARCH=$TARGETARCH go build -o /app/go-lcg .
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} garble -literals -tiny build -ldflags="-w -s" -o /app/go-lcg .
FROM alpine:latest
WORKDIR /root
# COPY --from=builder /app/buildplatform .
# COPY --from=builder /app/targetarch .
COPY --from=builder /app/go-lcg /root/lcg
ENTRYPOINT ["/root/lcg"]

View File

@@ -0,0 +1,23 @@
FROM --platform=${BUILDPLATFORM} golang:1.23-alpine AS build
ARG TARGETOS
ARG TARGETARCH
RUN apk add git && go install mvdan.cc/garble@latest
WORKDIR /src
ENV CGO_ENABLED=0
COPY go.* .
RUN go mod download
COPY . .
# RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o /out/go-lcg .
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} garble -literals -tiny build -ldflags="-w -s" -o /out/go-lcg .
FROM scratch AS bin-unix
COPY --from=build /out/go-lcg /lcg
FROM bin-unix AS bin-linux
FROM bin-unix AS bin-darwin
FROM scratch AS bin-windows
COPY --from=build /out/go-lcg /lcg.exe
FROM bin-${TARGETOS} AS bin

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2023 asrul10
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -11,23 +11,62 @@ Build from source
> ln -s ~/.linux-command-gpt/lcg ~/.local/bin
```
Or you can [download lcg executable file](https://github.com/asrul10/linux-command-gpt/releases)
### Example Usage
```bash
> lcg I want to extract file linux-command-gpt.tar.gz
> lcg I want to extract linux-command-gpt.tar.gz file
Completed in 0.92 seconds
┌────────────────────────────────────┐
tar -xvzf linux-command-gpt.tar.gz
└────────────────────────────────────┘
Are you sure you want to execute the command? (Y/n):
tar -xvzf linux-command-gpt.tar.gz
Do you want to (c)opy, (r)egenerate, or take (N)o action on the command? (c/r/N):
```
```bash
> LCG_PROMPT='Provide full response' LCG_MODEL=codellama:13b lcg 'i need bash script
to execute some command by ssh on some array of hosts'
Completed in 181.16 seconds
Here is a sample Bash script that demonstrates how to execute commands over SSH on an array of hosts:
```bash
#!/bin/bash
hosts=(host1 host2 host3)
for host in "${hosts[@]}"; do
ssh $host "echo 'Hello, world!' > /tmp/hello.txt"
done
```
This script defines an array `hosts` that contains the names of the hosts to connect to. The loop iterates over each element in the array and uses the `ssh` command to execute a simple command on the remote host. In this case, the command is `echo 'Hello, world!' > /tmp/hello.txt`, which writes the string "Hello, world!" to a file called `/tmp/hello.txt`.
You can modify the script to run any command you like by replacing the `echo` command with your desired command. For example, if you want to run a Python script on each host, you could use the following command:
```bash
ssh $host "python /path/to/script.py"
```
This will execute the Python script located at `/path/to/script.py` on the remote host.
You can also modify the script to run multiple commands in a single SSH session by using the `&&` operator to chain the commands together. For example:
```bash
ssh $host "echo 'Hello, world!' > /tmp/hello.txt && python /path/to/script.py"
```
This will execute both the `echo` command and the Python script in a single SSH session.
I hope this helps! Let me know if you have any questions or need further assistance.
Do you want to (c)opy, (r)egenerate, or take (N)o action on the command? (c/r/N):
```
To use the "copy to clipboard" feature, you need to install either the `xclip` or `xsel` package.
### Options
```bash
> lcg [options]
--help output usage information
--version output the version number
--update-key update the API key
--delete-key delete the API key
--help -h output usage information
--version -v output the version number
--file -f read command from file
--update-key -u update the API key
--delete-key -d delete the API key
```

1
VERSION.txt Normal file
View File

@@ -0,0 +1 @@
v1.0.3

224
_main.go Normal file
View File

@@ -0,0 +1,224 @@
// package main
// import (
// _ "embed"
// "fmt"
// "math"
// "os"
// "os/user"
// "path"
// "strings"
// "time"
// "github.com/atotto/clipboard"
// "github.com/direct-dev-ru/linux-command-gpt/gpt"
// "github.com/direct-dev-ru/linux-command-gpt/reader"
// )
// //go:embed VERSION.txt
// var Version string
// var cwd, _ = os.Getwd()
// var (
// HOST = getEnv("LCG_HOST", "http://192.168.87.108:11434/")
// COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat") // relative part of endpoint
// MODEL = getEnv("LCG_MODEL", "codegeex4")
// PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
// API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
// RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
// // HOST = "https://api.openai.com/v1/"
// // COMPLETIONS = "chat/completions"
// // MODEL = "gpt-4o-mini"
// // MODEL = "codellama:13b"
// // This file is created in the user's home directory
// // Example: /home/username/.openai_api_key
// // API_KEY_FILE = ".openai_api_key"
// HELP = `
// Usage: lcg [options]
// --help -h output usage information
// --version -v output the version number
// --file -f read part of command from file or bash feature $(...)
// --update-key -u update the API key
// --delete-key -d delete the API key
// Example Usage: lcg I want to extract linux-command-gpt.tar.gz file
// Example Usage: lcg --file /path/to/file.json I want to print object questions with jq
// Env Vars:
// LCG_HOST - defaults to "http://192.168.87.108:11434/" - endpoint for Ollama or other LLM API
// LCG_COMPLETIONS_PATH -defaults to "api/chat" - relative part of endpoint
// LCG_MODEL - defaults to "codegeex4"
// LCG_PROMPT - defaults to Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks.
// LCG_API_KEY_FILE - defaults to ${HOME}/.openai_api_key - file with API key
// LCG_RESULT_FOLDER - defaults to $(pwd)/gpt_results - folder to save results
// `
// VERSION = Version
// CMD_HELP = 100
// CMD_VERSION = 101
// CMD_UPDATE = 102
// CMD_DELETE = 103
// CMD_COMPLETION = 110
// )
// // getEnv retrieves the value of the environment variable `key` or returns `defaultValue` if not set.
// func getEnv(key, defaultValue string) string {
// if value, exists := os.LookupEnv(key); exists {
// return value
// }
// return defaultValue
// }
// func handleCommand(cmd string) int {
// if cmd == "" || cmd == "--help" || cmd == "-h" {
// return CMD_HELP
// }
// if cmd == "--version" || cmd == "-v" {
// return CMD_VERSION
// }
// if cmd == "--update-key" || cmd == "-u" {
// return CMD_UPDATE
// }
// if cmd == "--delete-key" || cmd == "-d" {
// return CMD_DELETE
// }
// return CMD_COMPLETION
// }
// func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
// gpt3.InitKey()
// s := time.Now()
// done := make(chan bool)
// go func() {
// loadingChars := []rune{'-', '\\', '|', '/'}
// i := 0
// for {
// select {
// case <-done:
// fmt.Printf("\r")
// return
// default:
// fmt.Printf("\rLoading %c", loadingChars[i])
// i = (i + 1) % len(loadingChars)
// time.Sleep(30 * time.Millisecond)
// }
// }
// }()
// r := gpt3.Completions(cmd)
// done <- true
// elapsed := time.Since(s).Seconds()
// elapsed = math.Round(elapsed*100) / 100
// if r == "" {
// return "", elapsed
// }
// return r, elapsed
// }
// func main() {
// currentUser, err := user.Current()
// if err != nil {
// panic(err)
// }
// args := os.Args
// cmd := ""
// file := ""
// if len(args) > 1 {
// start := 1
// if args[1] == "--file" || args[1] == "-f" {
// file = args[2]
// start = 3
// }
// cmd = strings.Join(args[start:], " ")
// }
// if file != "" {
// err := reader.FileToPrompt(&cmd, file)
// if err != nil {
// fmt.Println(err)
// return
// }
// }
// if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
// os.MkdirAll(RESULT_FOLDER, 0755)
// }
// h := handleCommand(cmd)
// if h == CMD_HELP {
// fmt.Println(HELP)
// return
// }
// if h == CMD_VERSION {
// fmt.Println(VERSION)
// return
// }
// gpt3 := gpt.Gpt3{
// CompletionUrl: HOST + COMPLETIONS,
// Model: MODEL,
// Prompt: PROMPT,
// HomeDir: currentUser.HomeDir,
// ApiKeyFile: API_KEY_FILE,
// Temperature: 0.01,
// }
// if h == CMD_UPDATE {
// gpt3.UpdateKey()
// return
// }
// if h == CMD_DELETE {
// gpt3.DeleteKey()
// return
// }
// c := "R"
// r := ""
// elapsed := 0.0
// for c == "R" || c == "r" {
// r, elapsed = getCommand(gpt3, cmd)
// c = "N"
// fmt.Printf("Completed in %v seconds\n\n", elapsed)
// fmt.Println(r)
// fmt.Print("\nDo you want to (c)opy, (s)ave to file, (r)egenerate, or take (N)o action on the command? (c/r/N): ")
// fmt.Scanln(&c)
// // no action
// if c == "N" || c == "n" {
// return
// }
// }
// if r == "" {
// return
// }
// // Copy to clipboard
// if c == "C" || c == "c" {
// clipboard.WriteAll(r)
// fmt.Println("\033[33mCopied to clipboard")
// return
// }
// if c == "S" || c == "s" {
// timestamp := time.Now().Format("2006-01-02_15-04-05") // Format: YYYY-MM-DD_HH-MM-SS
// filename := fmt.Sprintf("gpt_request_%s(%s).md", timestamp, gpt3.Model)
// filePath := path.Join(RESULT_FOLDER, filename)
// resultString := fmt.Sprintf("## Prompt:\n\n%s\n\n------------------\n\n## Response:\n\n%s\n\n", cmd+". "+gpt3.Prompt, r)
// os.WriteFile(filePath, []byte(resultString), 0644)
// fmt.Println("\033[33mSaved to file")
// return
// }
// }

11
go.mod
View File

@@ -1,3 +1,12 @@
module github.com/asrul/linux-command-gpt
module github.com/direct-dev-ru/linux-command-gpt
go 1.18
require github.com/atotto/clipboard v0.1.4
require (
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/urfave/cli/v2 v2.27.5 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
)

10
go.sum Normal file
View File

@@ -0,0 +1,10 @@
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w=
github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=

View File

@@ -4,7 +4,7 @@ import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"os"
"path/filepath"
@@ -18,6 +18,7 @@ type Gpt3 struct {
HomeDir string
ApiKeyFile string
ApiKey string
Temperature float64
}
type Chat struct {
@@ -26,8 +27,14 @@ type Chat struct {
}
type Gpt3Request struct {
Model string `json:"model"`
Messages []Chat `json:"messages"`
Model string `json:"model"`
Stream bool `json:"stream"`
Messages []Chat `json:"messages"`
Options Gpt3Options `json:"options"`
}
type Gpt3Options struct {
Temperature float64 `json:"temperature"`
}
type Gpt3Response struct {
@@ -36,6 +43,20 @@ type Gpt3Response struct {
} `json:"choices"`
}
// LlamaResponse represents the response structure.
type OllamaResponse struct {
Model string `json:"model"`
CreatedAt string `json:"created_at"`
Message Chat `json:"message"`
Done bool `json:"done"`
TotalDuration int64 `json:"total_duration"`
LoadDuration int64 `json:"load_duration"`
PromptEvalCount int64 `json:"prompt_eval_count"`
PromptEvalDuration int64 `json:"prompt_eval_duration"`
EvalCount int64 `json:"eval_count"`
EvalDuration int64 `json:"eval_duration"`
}
func (gpt3 *Gpt3) deleteApiKey() {
filePath := gpt3.HomeDir + string(filepath.Separator) + gpt3.ApiKeyFile
if _, err := os.Stat(filePath); os.IsNotExist(err) {
@@ -88,7 +109,7 @@ func (gpt3 *Gpt3) loadApiKey() bool {
if _, err := os.Stat(apiKeyFile); os.IsNotExist(err) {
return false
}
apiKey, err := ioutil.ReadFile(apiKeyFile)
apiKey, err := os.ReadFile(apiKeyFile)
if err != nil {
return false
}
@@ -130,21 +151,24 @@ func (gpt3 *Gpt3) Completions(ask string) string {
panic(err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+strings.TrimSpace(gpt3.ApiKey))
// req.Header.Set("Authorization", "Bearer "+strings.TrimSpace(gpt3.ApiKey))
messages := []Chat{
{"system", gpt3.Prompt},
{"user", ask},
{"user", ask + "." + gpt3.Prompt},
}
payload := Gpt3Request{
gpt3.Model,
messages,
Model: gpt3.Model,
Messages: messages,
Stream: false,
Options: Gpt3Options{gpt3.Temperature},
}
payloadJson, err := json.Marshal(payload)
if err != nil {
panic(err)
}
req.Body = ioutil.NopCloser(bytes.NewBuffer(payloadJson))
req.Body = io.NopCloser(bytes.NewBuffer(payloadJson))
client := &http.Client{}
resp, err := client.Do(req)
@@ -153,7 +177,7 @@ func (gpt3 *Gpt3) Completions(ask string) string {
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
body, err := io.ReadAll(resp.Body)
if err != nil {
panic(err)
}
@@ -163,11 +187,13 @@ func (gpt3 *Gpt3) Completions(ask string) string {
return ""
}
var res Gpt3Response
// var res Gpt3Response
var res OllamaResponse
err = json.Unmarshal(body, &res)
if err != nil {
panic(err)
}
return strings.TrimSpace(res.Choices[0].Message.Content)
// return strings.TrimSpace(res.Choices[0].Message.Content)
return strings.TrimSpace(res.Message.Content)
}

Binary file not shown.

274
main.go
View File

@@ -1,104 +1,170 @@
package main
import (
_ "embed"
"fmt"
"math"
"os"
"os/exec"
"os/user"
"path"
"strings"
"time"
"github.com/asrul/linux-command-gpt/gpt"
"github.com/atotto/clipboard"
"github.com/direct-dev-ru/linux-command-gpt/gpt"
"github.com/direct-dev-ru/linux-command-gpt/reader"
"github.com/urfave/cli/v2"
)
const (
HOST = "https://api.openai.com/v1/"
COMPLETIONS = "chat/completions"
MODEL = "gpt-3.5-turbo"
PROMPT = "I want you to reply with linux command and nothing else. Do not write explanations."
//go:embed VERSION.txt
var Version string
// This file is created in the user's home directory
// Example: /home/username/.openai_api_key
API_KEY_FILE = ".openai_api_key"
HELP = `
Usage: lcg [options]
--help output usage information
--version output the version number
--update-key update the API key
--delete-key delete the API key
`
VERSION = "0.1.0"
CMD_HELP = 100
CMD_VERSION = 101
CMD_UPDATE = 102
CMD_DELETE = 103
CMD_COMPLETION = 110
var (
cwd, _ = os.Getwd()
HOST = getEnv("LCG_HOST", "http://192.168.87.108:11434/")
COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat")
MODEL = getEnv("LCG_MODEL", "codegeex4")
PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
)
func handleCommand(cmd string) int {
if cmd == "" || cmd == "--help" || cmd == "-h" {
return CMD_HELP
}
if cmd == "--version" || cmd == "-v" {
return CMD_VERSION
}
if cmd == "--update-key" || cmd == "-u" {
return CMD_UPDATE
}
if cmd == "--delete-key" || cmd == "-d" {
return CMD_DELETE
}
return CMD_COMPLETION
}
func main() {
currentUser, err := user.Current()
if err != nil {
panic(err)
app := &cli.App{
Name: "lcg",
Usage: "Linux Command GPT - Generate Linux commands from descriptions",
Version: Version,
Commands: getCommands(),
UsageText: `
lcg [global options] <command description>
Examples:
lcg "I want to extract linux-command-gpt.tar.gz file"
lcg --file /path/to/file.txt "I want to list all directories with ls"
`,
Description: `
Linux Command GPT is a tool for generating Linux commands from natural language descriptions.
It supports reading parts of the prompt from files and allows saving, copying, or regenerating results.
Additional commands are available for managing API keys.
Environment Variables:
LCG_HOST Endpoint for LLM API (default: http://192.168.87.108:11434/)
LCG_COMPLETIONS_PATH Relative API path (default: api/chat)
LCG_MODEL Model name (default: codegeex4)
LCG_PROMPT Default prompt text
LCG_API_KEY_FILE API key storage file (default: ~/.openai_api_key)
LCG_RESULT_FOLDER Results folder (default: ./gpt_results)
`,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "file",
Aliases: []string{"f"},
Usage: "Read part of the command from a file",
},
&cli.StringFlag{
Name: "sys",
Aliases: []string{"s"},
Usage: "System prompt",
DefaultText: getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks"),
Value: getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks"),
},
},
Action: func(c *cli.Context) error {
file := c.String("file")
system := c.String("sys")
args := c.Args().Slice()
if len(args) == 0 {
cli.ShowAppHelp(c)
return nil
}
executeMain(file, system, strings.Join(args, " "))
return nil
},
}
args := os.Args
cmd := ""
if len(args) > 1 {
cmd = strings.Join(args[1:], " ")
cli.VersionFlag = &cli.BoolFlag{
Name: "version",
Aliases: []string{"V", "v"},
Usage: "prints out version",
}
cli.VersionPrinter = func(cCtx *cli.Context) {
fmt.Printf("%s\n", cCtx.App.Version)
}
h := handleCommand(cmd)
if h == CMD_HELP {
fmt.Println(HELP)
if err := app.Run(os.Args); err != nil {
fmt.Println("Error:", err)
os.Exit(1)
}
}
func getCommands() []*cli.Command {
return []*cli.Command{
{
Name: "update-key",
Aliases: []string{"u"},
Usage: "Update the API key",
Action: func(c *cli.Context) error {
gpt3 := initGPT()
gpt3.UpdateKey()
fmt.Println("API key updated.")
return nil
},
},
{
Name: "delete-key",
Aliases: []string{"d"},
Usage: "Delete the API key",
Action: func(c *cli.Context) error {
gpt3 := initGPT()
gpt3.DeleteKey()
fmt.Println("API key deleted.")
return nil
},
},
}
}
func executeMain(file, system, commandInput string) {
// fmt.Println(system, commandInput)
// os.Exit(0)
if file != "" {
if err := reader.FileToPrompt(&commandInput, file); err != nil {
fmt.Println("Error reading file:", err)
return
}
}
if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
os.MkdirAll(RESULT_FOLDER, 0755)
}
gpt3 := initGPT()
response, elapsed := getCommand(gpt3, commandInput)
if response == "" {
fmt.Println("No response received.")
return
}
if h == CMD_VERSION {
fmt.Println(VERSION)
return
}
fmt.Printf("Completed in %v seconds\n\n%s\n", elapsed, response)
handlePostResponse(response, gpt3, system, commandInput)
}
gpt3 := gpt.Gpt3{
func initGPT() gpt.Gpt3 {
currentUser, _ := user.Current()
return gpt.Gpt3{
CompletionUrl: HOST + COMPLETIONS,
Model: MODEL,
Prompt: PROMPT,
HomeDir: currentUser.HomeDir,
ApiKeyFile: API_KEY_FILE,
Temperature: 0.01,
}
}
if h == CMD_UPDATE {
gpt3.UpdateKey()
return
}
if h == CMD_DELETE {
gpt3.DeleteKey()
return
}
s := time.Now()
func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
gpt3.InitKey()
start := time.Now()
done := make(chan bool)
go func() {
loadingChars := []rune{'-', '\\', '|', '/'}
@@ -116,33 +182,47 @@ func main() {
}
}()
gpt3.InitKey()
r := gpt3.Completions(cmd)
response := gpt3.Completions(cmd)
done <- true
if r == "" {
return
}
elapsed := math.Round(time.Since(start).Seconds()*100) / 100
c := "Y"
elapsed := time.Since(s).Seconds()
elapsed = math.Round(elapsed*100) / 100
fmt.Printf("Completed in %v seconds\n", elapsed)
fmt.Printf("┌%s┐\n", strings.Repeat("─", len(r)+2))
fmt.Printf("│ %s │\n", r)
fmt.Printf("└%s┘\n", strings.Repeat("─", len(r)+2))
fmt.Print("Are you sure you want to execute the command? (Y/n): ")
fmt.Scanln(&c)
if c != "Y" && c != "y" {
return
}
cmsplit := strings.Split(r, " ")
cm := exec.Command(cmsplit[0], cmsplit[1:]...)
out, err := cm.Output()
if err != nil {
fmt.Println(err.Error())
return
}
fmt.Println(string(out))
return response, elapsed
}
func handlePostResponse(response string, gpt3 gpt.Gpt3, system, cmd string) {
fmt.Print("\nOptions: (c)opy, (s)ave, (r)egenerate, (n)one: ")
var choice string
fmt.Scanln(&choice)
switch strings.ToLower(choice) {
case "c":
clipboard.WriteAll(response)
fmt.Println("Response copied to clipboard.")
case "s":
saveResponse(response, gpt3, cmd)
case "r":
executeMain("", system, cmd)
default:
fmt.Println("No action taken.")
}
}
func saveResponse(response string, gpt3 gpt.Gpt3, cmd string) {
timestamp := time.Now().Format("2006-01-02_15-04-05")
filename := fmt.Sprintf("gpt_request_%s_%s.md", gpt3.Model, timestamp)
filePath := path.Join(RESULT_FOLDER, filename)
content := fmt.Sprintf("## Prompt:\n\n%s\n\n## Response:\n\n%s\n", cmd+". "+gpt3.Prompt, response)
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
fmt.Println("Failed to save response:", err)
} else {
fmt.Printf("Response saved to %s\n", filePath)
}
}
func getEnv(key, defaultValue string) string {
if value, exists := os.LookupEnv(key); exists {
return value
}
return defaultValue
}

View File

@@ -1,33 +1 @@
package main
import (
"testing"
)
func TestHandleCommand(t *testing.T) {
tests := []struct {
command string
expected int
}{
{"", CMD_HELP},
{"--help", CMD_HELP},
{"-h", CMD_HELP},
{"--version", CMD_VERSION},
{"-v", CMD_VERSION},
{"--update-key", CMD_UPDATE},
{"-u", CMD_UPDATE},
{"--delete-key", CMD_DELETE},
{"-d", CMD_DELETE},
{"random strings", CMD_COMPLETION},
{"--test", CMD_COMPLETION},
{"-test", CMD_COMPLETION},
{"how to extract test.tar.gz", CMD_COMPLETION},
}
for _, test := range tests {
result := handleCommand(test.command)
if result != test.expected {
t.Error("Expected", test.expected, "got", result)
}
}
}

24
reader/file.go Normal file
View File

@@ -0,0 +1,24 @@
package reader
import (
"bufio"
"os"
)
func FileToPrompt(cmd *string, filePath string) error {
f, err := os.Open(filePath)
if err != nil {
return err
}
defer f.Close()
reader := bufio.NewReader(f)
*cmd = *cmd + "\nFile path: " + filePath + "\n"
for {
line, err := reader.ReadString('\n')
if err != nil {
break
}
*cmd = *cmd + "\n" + line
}
return nil
}

92
shell-code/build-full.sh Normal file
View File

@@ -0,0 +1,92 @@
#!/bin/bash
# REPO=registry.direct-dev.ru/go-lcg
REPO=kuznetcovay/go-lcg
VERSION=$1
if [ -z "$VERSION" ]; then
VERSION=v1.0.1
fi
BRANCH=main
echo ${VERSION} > VERSION.txt
export GOCACHE="${HOME}/.cache/go-build"
# Save the current branch
CURRENT_BRANCH=$(git branch --show-current)
# Function to restore the original branch
function restore_branch {
echo "Restoring original branch: ${CURRENT_BRANCH}"
git checkout "${CURRENT_BRANCH}"
}
# Check if the current branch is different from the target branch
if [ "$CURRENT_BRANCH" != "$BRANCH" ]; then
# Set a trap to restore the branch on exit
trap restore_branch EXIT
echo "Switching to branch: ${BRANCH}"
git checkout ${BRANCH}
fi
# Fetch all tags from the remote repository
git fetch --tags
# Check if the specified version tag exists
if git rev-parse "refs/tags/${VERSION}" >/dev/null 2>&1; then
echo "Tag ${VERSION} already exists. Halting script."
exit 1
fi
# Run go tests
# if ! go test -v -run=^Test; then
# echo "Tests failed. Exiting..."
# exit 1
# fi
mkdir binaries-for-upload
# Build for linux/amd64
docker build -f Dockerfiles/LocalCompile/Dockerfile --target bin-linux --output bin-linux-amd64/ --platform linux/amd64 . ||
{
echo "docker build for amd64 failed. Exiting with code 1."
exit 1
}
cp bin-linux-amd64/lcg "binaries-for-upload/lcg.amd64.${VERSION}"
# Build for linux/arm64
docker build -f Dockerfiles/LocalCompile/Dockerfile --target bin-linux --output bin-linux-arm64/ --platform linux/arm64 . ||
{
echo "docker build for arm64 failed. Exiting with code 1."
exit 1
}
cp bin-linux-arm64/lcg "binaries-for-upload/lcg.arm64.${VERSION}"
# Push multi-platform images
docker buildx build -f Dockerfiles/ImageBuild/Dockerfile --push --platform linux/amd64,linux/arm64 -t ${REPO}:"${VERSION}" . ||
{
echo "docker buildx build --push failed. Exiting with code 1."
exit 1
}
git add -A . ||
{
echo "git add failed. Exiting with code 1."
exit 1
}
git commit -m "release $VERSION" ||
{
echo "git commit failed. Exiting with code 1."
exit 1
}
git tag -a "$VERSION" -m "release $VERSION" ||
{
echo "git tag failed. Exiting with code 1."
exit 1
}
git push -u origin main --tags ||
{
echo "git push failed. Exiting with code 1."
exit 1
}

View File

@@ -0,0 +1,8 @@
#!/bin/bash
docker build -f Dockerfiles/LocalCompile/Dockerfile --target bin-linux --output bin-linux-amd64/ --platform linux/amd64 .
docker build -f Dockerfiles/LocalCompile/Dockerfile --target bin-linux --output bin-linux-arm64/ --platform linux/arm64 .
# in linux setuid
# sudo chown root:root bin-linux/lcg
# sudo chmod +s bin-linux/lcg

View File

@@ -0,0 +1,41 @@
#!/bin/bash
REPO=kuznetcovay/go-lcg
VERSION=$1
if [ -z "$VERSION" ]; then
VERSION=v1.0.8
fi
BRANCH=main
echo "${VERSION}" > VERSION.txt
export GOCACHE="${HOME}/.cache/go-build"
# Save the current branch
CURRENT_BRANCH=$(git branch --show-current)
# Function to restore the original branch
function restore_branch {
echo "Restoring original branch: ${CURRENT_BRANCH}"
git checkout "${CURRENT_BRANCH}"
}
# Check if the current branch is different from the target branch
if [ "$CURRENT_BRANCH" != "$BRANCH" ]; then
# Set a trap to restore the branch on exit
trap restore_branch EXIT
echo "Switching to branch: ${BRANCH}"
git checkout ${BRANCH}
fi
# Run go tests
if ! go test -v -run=^Test; then
echo "Tests failed. Exiting..."
exit 1
fi
# Push multi-platform images
docker buildx build --push --platform linux/amd64,linux/arm64 -t ${REPO}:"${VERSION}" . ||
{
echo "docker buildx build --push failed. Exiting with code 1."
exit 1
}

View File

@@ -0,0 +1,52 @@
#!/bin/bash
# Variables
VERSION_FILE="VERSION.txt"
GITHUB_TOKEN="${GITHUB_TOKEN}" # Replace with your GitHub token
REPO="direct-dev-ru/binaries" # Replace with your GitHub username/repo
TAG=lcg.$(cat "$VERSION_FILE")
echo TAG: $TAG
RELEASE_DIR="/home/su/projects/golang/linux-command-gpt/binaries-for-upload"
body="{\"tag_name\":\"${TAG}\", \"target_commitish\":\"main\", \"name\":\"${TAG}\", \
\"body\":\"${TAG}\", \"draft\":false, \"prerelease\":false, \"generate_release_notes\":false}"
echo BODY: $body
response=$(curl -L -X POST \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GITHUB_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/direct-dev-ru/binaries/releases \
-d $body)
echo $response
# Extract the upload URL from the response
upload_url=$(echo "$response" | jq -r '.upload_url' | sed "s/{?name,label}//")
# Check if the release was created successfully
if [[ "$response" == *"Not Found"* ]]; then
echo "Error: Repository not found or invalid token."
exit 1
fi
# Upload each binary file
for file in "$RELEASE_DIR"/*; do
if [[ -f "$file" ]]; then
filename=$(basename "$file")
echo "Uploading $filename..."
response=$(curl -s -X POST -H "Authorization: token $GITHUB_TOKEN" \
-H "Content-Type: application/octet-stream" \
"$upload_url?name=$filename" \
--data-binary @"$file")
echo $response
fi
done
echo "All binaries uploaded successfully."