Compare commits

...

3 Commits

Author SHA1 Message Date
46a0d9e45a release v1.0.3 2024-12-05 13:25:58 +06:00
12cd3fe6db moved to cli framework 2024-12-05 13:17:35 +06:00
7136fe4607 before refactor to cli framework 2024-12-05 11:15:38 +06:00
7 changed files with 407 additions and 195 deletions

1
.gitignore vendored
View File

@@ -13,3 +13,4 @@ shell-code/build.env
bin-linux-amd64/*
bin-linux-arm64/*
binaries-for-upload/*
gpt_results

View File

@@ -1 +1 @@
v1.0.2
v1.0.3

224
_main.go Normal file
View File

@@ -0,0 +1,224 @@
// package main
// import (
// _ "embed"
// "fmt"
// "math"
// "os"
// "os/user"
// "path"
// "strings"
// "time"
// "github.com/atotto/clipboard"
// "github.com/direct-dev-ru/linux-command-gpt/gpt"
// "github.com/direct-dev-ru/linux-command-gpt/reader"
// )
// //go:embed VERSION.txt
// var Version string
// var cwd, _ = os.Getwd()
// var (
// HOST = getEnv("LCG_HOST", "http://192.168.87.108:11434/")
// COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat") // relative part of endpoint
// MODEL = getEnv("LCG_MODEL", "codegeex4")
// PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
// API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
// RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
// // HOST = "https://api.openai.com/v1/"
// // COMPLETIONS = "chat/completions"
// // MODEL = "gpt-4o-mini"
// // MODEL = "codellama:13b"
// // This file is created in the user's home directory
// // Example: /home/username/.openai_api_key
// // API_KEY_FILE = ".openai_api_key"
// HELP = `
// Usage: lcg [options]
// --help -h output usage information
// --version -v output the version number
// --file -f read part of command from file or bash feature $(...)
// --update-key -u update the API key
// --delete-key -d delete the API key
// Example Usage: lcg I want to extract linux-command-gpt.tar.gz file
// Example Usage: lcg --file /path/to/file.json I want to print object questions with jq
// Env Vars:
// LCG_HOST - defaults to "http://192.168.87.108:11434/" - endpoint for Ollama or other LLM API
// LCG_COMPLETIONS_PATH -defaults to "api/chat" - relative part of endpoint
// LCG_MODEL - defaults to "codegeex4"
// LCG_PROMPT - defaults to Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks.
// LCG_API_KEY_FILE - defaults to ${HOME}/.openai_api_key - file with API key
// LCG_RESULT_FOLDER - defaults to $(pwd)/gpt_results - folder to save results
// `
// VERSION = Version
// CMD_HELP = 100
// CMD_VERSION = 101
// CMD_UPDATE = 102
// CMD_DELETE = 103
// CMD_COMPLETION = 110
// )
// // getEnv retrieves the value of the environment variable `key` or returns `defaultValue` if not set.
// func getEnv(key, defaultValue string) string {
// if value, exists := os.LookupEnv(key); exists {
// return value
// }
// return defaultValue
// }
// func handleCommand(cmd string) int {
// if cmd == "" || cmd == "--help" || cmd == "-h" {
// return CMD_HELP
// }
// if cmd == "--version" || cmd == "-v" {
// return CMD_VERSION
// }
// if cmd == "--update-key" || cmd == "-u" {
// return CMD_UPDATE
// }
// if cmd == "--delete-key" || cmd == "-d" {
// return CMD_DELETE
// }
// return CMD_COMPLETION
// }
// func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
// gpt3.InitKey()
// s := time.Now()
// done := make(chan bool)
// go func() {
// loadingChars := []rune{'-', '\\', '|', '/'}
// i := 0
// for {
// select {
// case <-done:
// fmt.Printf("\r")
// return
// default:
// fmt.Printf("\rLoading %c", loadingChars[i])
// i = (i + 1) % len(loadingChars)
// time.Sleep(30 * time.Millisecond)
// }
// }
// }()
// r := gpt3.Completions(cmd)
// done <- true
// elapsed := time.Since(s).Seconds()
// elapsed = math.Round(elapsed*100) / 100
// if r == "" {
// return "", elapsed
// }
// return r, elapsed
// }
// func main() {
// currentUser, err := user.Current()
// if err != nil {
// panic(err)
// }
// args := os.Args
// cmd := ""
// file := ""
// if len(args) > 1 {
// start := 1
// if args[1] == "--file" || args[1] == "-f" {
// file = args[2]
// start = 3
// }
// cmd = strings.Join(args[start:], " ")
// }
// if file != "" {
// err := reader.FileToPrompt(&cmd, file)
// if err != nil {
// fmt.Println(err)
// return
// }
// }
// if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
// os.MkdirAll(RESULT_FOLDER, 0755)
// }
// h := handleCommand(cmd)
// if h == CMD_HELP {
// fmt.Println(HELP)
// return
// }
// if h == CMD_VERSION {
// fmt.Println(VERSION)
// return
// }
// gpt3 := gpt.Gpt3{
// CompletionUrl: HOST + COMPLETIONS,
// Model: MODEL,
// Prompt: PROMPT,
// HomeDir: currentUser.HomeDir,
// ApiKeyFile: API_KEY_FILE,
// Temperature: 0.01,
// }
// if h == CMD_UPDATE {
// gpt3.UpdateKey()
// return
// }
// if h == CMD_DELETE {
// gpt3.DeleteKey()
// return
// }
// c := "R"
// r := ""
// elapsed := 0.0
// for c == "R" || c == "r" {
// r, elapsed = getCommand(gpt3, cmd)
// c = "N"
// fmt.Printf("Completed in %v seconds\n\n", elapsed)
// fmt.Println(r)
// fmt.Print("\nDo you want to (c)opy, (s)ave to file, (r)egenerate, or take (N)o action on the command? (c/r/N): ")
// fmt.Scanln(&c)
// // no action
// if c == "N" || c == "n" {
// return
// }
// }
// if r == "" {
// return
// }
// // Copy to clipboard
// if c == "C" || c == "c" {
// clipboard.WriteAll(r)
// fmt.Println("\033[33mCopied to clipboard")
// return
// }
// if c == "S" || c == "s" {
// timestamp := time.Now().Format("2006-01-02_15-04-05") // Format: YYYY-MM-DD_HH-MM-SS
// filename := fmt.Sprintf("gpt_request_%s(%s).md", timestamp, gpt3.Model)
// filePath := path.Join(RESULT_FOLDER, filename)
// resultString := fmt.Sprintf("## Prompt:\n\n%s\n\n------------------\n\n## Response:\n\n%s\n\n", cmd+". "+gpt3.Prompt, r)
// os.WriteFile(filePath, []byte(resultString), 0644)
// fmt.Println("\033[33mSaved to file")
// return
// }
// }

7
go.mod
View File

@@ -3,3 +3,10 @@ module github.com/direct-dev-ru/linux-command-gpt
go 1.18
require github.com/atotto/clipboard v0.1.4
require (
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/urfave/cli/v2 v2.27.5 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
)

8
go.sum
View File

@@ -1,2 +1,10 @@
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w=
github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=

320
main.go
View File

@@ -13,88 +13,158 @@ import (
"github.com/atotto/clipboard"
"github.com/direct-dev-ru/linux-command-gpt/gpt"
"github.com/direct-dev-ru/linux-command-gpt/reader"
"github.com/urfave/cli/v2"
)
//go:embed VERSION.txt
var Version string
var cwd, _ = os.Getwd()
var (
cwd, _ = os.Getwd()
HOST = getEnv("LCG_HOST", "http://192.168.87.108:11434/")
COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat") // relative part of endpoint
COMPLETIONS = getEnv("LCG_COMPLETIONS_PATH", "api/chat")
MODEL = getEnv("LCG_MODEL", "codegeex4")
PROMPT = getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks. No need ` symbols.")
API_KEY_FILE = getEnv("LCG_API_KEY_FILE", ".openai_api_key")
RESULT_FOLDER = getEnv("LCG_RESULT_FOLDER", path.Join(cwd, "gpt_results"))
// HOST = "https://api.openai.com/v1/"
// COMPLETIONS = "chat/completions"
// MODEL = "gpt-4o-mini"
// MODEL = "codellama:13b"
// This file is created in the user's home directory
// Example: /home/username/.openai_api_key
// API_KEY_FILE = ".openai_api_key"
HELP = `
Usage: lcg [options]
--help -h output usage information
--version -v output the version number
--file -f read part of command from file or bash feature $(...)
--update-key -u update the API key
--delete-key -d delete the API key
Example Usage: lcg I want to extract linux-command-gpt.tar.gz file
Example Usage: lcg --file /path/to/file.json I want to print object questions with jq
Env Vars:
LCG_HOST - defaults to "http://192.168.87.108:11434/" - endpoint for Ollama or other LLM API
LCG_COMPLETIONS_PATH -defaults to "api/chat" - relative part of endpoint
LCG_MODEL - defaults to "codegeex4"
LCG_PROMPT - defaults to Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks.
LCG_API_KEY_FILE - defaults to ${HOME}/.openai_api_key - file with API key
LCG_RESULT_FOLDER - defaults to $(pwd)/gpt_results - folder to save results
`
VERSION = Version
CMD_HELP = 100
CMD_VERSION = 101
CMD_UPDATE = 102
CMD_DELETE = 103
CMD_COMPLETION = 110
)
// getEnv retrieves the value of the environment variable `key` or returns `defaultValue` if not set.
func getEnv(key, defaultValue string) string {
if value, exists := os.LookupEnv(key); exists {
return value
func main() {
app := &cli.App{
Name: "lcg",
Usage: "Linux Command GPT - Generate Linux commands from descriptions",
Version: Version,
Commands: getCommands(),
UsageText: `
lcg [global options] <command description>
Examples:
lcg "I want to extract linux-command-gpt.tar.gz file"
lcg --file /path/to/file.txt "I want to list all directories with ls"
`,
Description: `
Linux Command GPT is a tool for generating Linux commands from natural language descriptions.
It supports reading parts of the prompt from files and allows saving, copying, or regenerating results.
Additional commands are available for managing API keys.
Environment Variables:
LCG_HOST Endpoint for LLM API (default: http://192.168.87.108:11434/)
LCG_COMPLETIONS_PATH Relative API path (default: api/chat)
LCG_MODEL Model name (default: codegeex4)
LCG_PROMPT Default prompt text
LCG_API_KEY_FILE API key storage file (default: ~/.openai_api_key)
LCG_RESULT_FOLDER Results folder (default: ./gpt_results)
`,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "file",
Aliases: []string{"f"},
Usage: "Read part of the command from a file",
},
&cli.StringFlag{
Name: "sys",
Aliases: []string{"s"},
Usage: "System prompt",
DefaultText: getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks"),
Value: getEnv("LCG_PROMPT", "Reply with linux command and nothing else. Output with plain response - no need formatting. No need explanation. No need code blocks"),
},
},
Action: func(c *cli.Context) error {
file := c.String("file")
system := c.String("sys")
args := c.Args().Slice()
if len(args) == 0 {
cli.ShowAppHelp(c)
return nil
}
executeMain(file, system, strings.Join(args, " "))
return nil
},
}
cli.VersionFlag = &cli.BoolFlag{
Name: "version",
Aliases: []string{"V", "v"},
Usage: "prints out version",
}
cli.VersionPrinter = func(cCtx *cli.Context) {
fmt.Printf("%s\n", cCtx.App.Version)
}
if err := app.Run(os.Args); err != nil {
fmt.Println("Error:", err)
os.Exit(1)
}
return defaultValue
}
func handleCommand(cmd string) int {
if cmd == "" || cmd == "--help" || cmd == "-h" {
return CMD_HELP
func getCommands() []*cli.Command {
return []*cli.Command{
{
Name: "update-key",
Aliases: []string{"u"},
Usage: "Update the API key",
Action: func(c *cli.Context) error {
gpt3 := initGPT()
gpt3.UpdateKey()
fmt.Println("API key updated.")
return nil
},
},
{
Name: "delete-key",
Aliases: []string{"d"},
Usage: "Delete the API key",
Action: func(c *cli.Context) error {
gpt3 := initGPT()
gpt3.DeleteKey()
fmt.Println("API key deleted.")
return nil
},
},
}
if cmd == "--version" || cmd == "-v" {
return CMD_VERSION
}
func executeMain(file, system, commandInput string) {
// fmt.Println(system, commandInput)
// os.Exit(0)
if file != "" {
if err := reader.FileToPrompt(&commandInput, file); err != nil {
fmt.Println("Error reading file:", err)
return
}
if cmd == "--update-key" || cmd == "-u" {
return CMD_UPDATE
}
if cmd == "--delete-key" || cmd == "-d" {
return CMD_DELETE
if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
os.MkdirAll(RESULT_FOLDER, 0755)
}
gpt3 := initGPT()
response, elapsed := getCommand(gpt3, commandInput)
if response == "" {
fmt.Println("No response received.")
return
}
fmt.Printf("Completed in %v seconds\n\n%s\n", elapsed, response)
handlePostResponse(response, gpt3, system, commandInput)
}
func initGPT() gpt.Gpt3 {
currentUser, _ := user.Current()
return gpt.Gpt3{
CompletionUrl: HOST + COMPLETIONS,
Model: MODEL,
Prompt: PROMPT,
HomeDir: currentUser.HomeDir,
ApiKeyFile: API_KEY_FILE,
Temperature: 0.01,
}
return CMD_COMPLETION
}
func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
gpt3.InitKey()
s := time.Now()
start := time.Now()
done := make(chan bool)
go func() {
loadingChars := []rune{'-', '\\', '|', '/'}
@@ -112,113 +182,47 @@ func getCommand(gpt3 gpt.Gpt3, cmd string) (string, float64) {
}
}()
r := gpt3.Completions(cmd)
response := gpt3.Completions(cmd)
done <- true
elapsed := time.Since(s).Seconds()
elapsed = math.Round(elapsed*100) / 100
elapsed := math.Round(time.Since(start).Seconds()*100) / 100
if r == "" {
return "", elapsed
}
return r, elapsed
return response, elapsed
}
func main() {
currentUser, err := user.Current()
if err != nil {
panic(err)
}
func handlePostResponse(response string, gpt3 gpt.Gpt3, system, cmd string) {
fmt.Print("\nOptions: (c)opy, (s)ave, (r)egenerate, (n)one: ")
var choice string
fmt.Scanln(&choice)
args := os.Args
cmd := ""
file := ""
if len(args) > 1 {
start := 1
if args[1] == "--file" || args[1] == "-f" {
file = args[2]
start = 3
}
cmd = strings.Join(args[start:], " ")
switch strings.ToLower(choice) {
case "c":
clipboard.WriteAll(response)
fmt.Println("Response copied to clipboard.")
case "s":
saveResponse(response, gpt3, cmd)
case "r":
executeMain("", system, cmd)
default:
fmt.Println("No action taken.")
}
}
if file != "" {
err := reader.FileToPrompt(&cmd, file)
if err != nil {
fmt.Println(err)
return
}
}
if _, err := os.Stat(RESULT_FOLDER); os.IsNotExist(err) {
os.MkdirAll(RESULT_FOLDER, 0755)
}
h := handleCommand(cmd)
if h == CMD_HELP {
fmt.Println(HELP)
return
}
if h == CMD_VERSION {
fmt.Println(VERSION)
return
}
gpt3 := gpt.Gpt3{
CompletionUrl: HOST + COMPLETIONS,
Model: MODEL,
Prompt: PROMPT,
HomeDir: currentUser.HomeDir,
ApiKeyFile: API_KEY_FILE,
Temperature: 0.01,
}
if h == CMD_UPDATE {
gpt3.UpdateKey()
return
}
if h == CMD_DELETE {
gpt3.DeleteKey()
return
}
c := "R"
r := ""
elapsed := 0.0
for c == "R" || c == "r" {
r, elapsed = getCommand(gpt3, cmd)
c = "N"
fmt.Printf("Completed in %v seconds\n\n", elapsed)
fmt.Println(r)
fmt.Print("\nDo you want to (c)opy, (s)ave to file, (r)egenerate, or take (N)o action on the command? (c/r/N): ")
fmt.Scanln(&c)
// no action
if c == "N" || c == "n" {
return
}
}
if r == "" {
return
}
// Copy to clipboard
if c == "C" || c == "c" {
clipboard.WriteAll(r)
fmt.Println("\033[33mCopied to clipboard")
return
}
if c == "S" || c == "s" {
timestamp := time.Now().Format("2006-01-02_15-04-05") // Format: YYYY-MM-DD_HH-MM-SS
filename := fmt.Sprintf("gpt_request_%s(%s).md", timestamp, gpt3.Model)
func saveResponse(response string, gpt3 gpt.Gpt3, cmd string) {
timestamp := time.Now().Format("2006-01-02_15-04-05")
filename := fmt.Sprintf("gpt_request_%s_%s.md", gpt3.Model, timestamp)
filePath := path.Join(RESULT_FOLDER, filename)
resultString := fmt.Sprintf("## Prompt:\n\n%s\n\n------------------\n\n## Response:\n\n%s\n\n", cmd+". "+gpt3.Prompt, r)
os.WriteFile(filePath, []byte(resultString), 0644)
fmt.Println("\033[33mSaved to file")
return
content := fmt.Sprintf("## Prompt:\n\n%s\n\n## Response:\n\n%s\n", cmd+". "+gpt3.Prompt, response)
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
fmt.Println("Failed to save response:", err)
} else {
fmt.Printf("Response saved to %s\n", filePath)
}
}
func getEnv(key, defaultValue string) string {
if value, exists := os.LookupEnv(key); exists {
return value
}
return defaultValue
}

View File

@@ -1,33 +1 @@
package main
import (
"testing"
)
func TestHandleCommand(t *testing.T) {
tests := []struct {
command string
expected int
}{
{"", CMD_HELP},
{"--help", CMD_HELP},
{"-h", CMD_HELP},
{"--version", CMD_VERSION},
{"-v", CMD_VERSION},
{"--update-key", CMD_UPDATE},
{"-u", CMD_UPDATE},
{"--delete-key", CMD_DELETE},
{"-d", CMD_DELETE},
{"random strings", CMD_COMPLETION},
{"--test", CMD_COMPLETION},
{"-test", CMD_COMPLETION},
{"how to extract test.tar.gz", CMD_COMPLETION},
}
for _, test := range tests {
result := handleCommand(test.command)
if result != test.expected {
t.Error("Expected", test.expected, "got", result)
}
}
}