diff --git a/build.bat b/build.bat new file mode 100644 index 0000000..138783b --- /dev/null +++ b/build.bat @@ -0,0 +1,3 @@ +go build -o gskaro-v1.exe ./cmd/gskaro + +pause \ No newline at end of file diff --git a/cmd/gskaro/main.go b/cmd/gskaro/main.go new file mode 100644 index 0000000..9599fba --- /dev/null +++ b/cmd/gskaro/main.go @@ -0,0 +1,7 @@ +package main + +import "gskaro-v1/internal/server" + +func main() { + server.Start() +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..dc3d25a --- /dev/null +++ b/go.mod @@ -0,0 +1,3 @@ +module gskaro-v1 + +go 1.25.6 diff --git a/gskaro-v1.exe b/gskaro-v1.exe new file mode 100644 index 0000000..0e0f1ad Binary files /dev/null and b/gskaro-v1.exe differ diff --git a/init-gskaro.bat b/init-gskaro.bat new file mode 100644 index 0000000..9a1bc18 --- /dev/null +++ b/init-gskaro.bat @@ -0,0 +1,6 @@ +gskaro-v1.exe init + +pause + + +init \ No newline at end of file diff --git a/internal/llm/models.go b/internal/llm/models.go new file mode 100644 index 0000000..2cfc3a4 --- /dev/null +++ b/internal/llm/models.go @@ -0,0 +1,38 @@ +package llm + +import ( + "encoding/json" + "fmt" + "net/http" +) + +type OllamaTagsResponse struct { + Models []struct { + Name string `json:"name"` + } `json:"models"` +} + +// Получить список моделей с указанного хоста +func GetModelsFromHost(host string) ([]string, error) { + resp, err := http.Get(host + "/api/tags") + if err != nil { + return nil, fmt.Errorf("ошибка запроса к %s/api/tags: %w", host, err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("Ollama вернула статус %s", resp.Status) + } + + var data OllamaTagsResponse + if err := json.NewDecoder(resp.Body).Decode(&data); err != nil { + return nil, fmt.Errorf("ошибка разбора JSON: %w", err) + } + + out := make([]string, 0, len(data.Models)) + for _, m := range data.Models { + out = append(out, m.Name) + } + + return out, nil +} diff --git a/internal/llm/state.go b/internal/llm/state.go new file mode 100644 index 0000000..6f0323c --- /dev/null +++ b/internal/llm/state.go @@ -0,0 +1,6 @@ +package llm + +// Активный хост и модель, с которыми работает сервер. +// Меняются из обработчиков в пакете server. +var ActiveHost = "http://localhost:11434" +var ActiveModel = "mistral:7b" diff --git a/internal/llm/stream.go b/internal/llm/stream.go new file mode 100644 index 0000000..27f9c0b --- /dev/null +++ b/internal/llm/stream.go @@ -0,0 +1,75 @@ +package llm + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" +) + +type ollamaGenerateRequest struct { + Model string `json:"model"` + Prompt string `json:"prompt"` + Stream bool `json:"stream"` +} + +type OllamaGenerateResponse struct { + Model string `json:"model"` + CreatedAt string `json:"created_at"` + Response string `json:"response"` + Done bool `json:"done"` + PromptEvalCount int `json:"prompt_eval_count"` + EvalCount int `json:"eval_count"` +} + +type OllamaClient struct { + Host string + Model string +} + +// Stream — стриминг ответа модели по частям +func (c *OllamaClient) Stream(prompt string, callback func(chunk string, meta *OllamaGenerateResponse)) error { + if c.Host == "" { + c.Host = ActiveHost + } + if c.Model == "" { + c.Model = ActiveModel + } + + reqBody := ollamaGenerateRequest{ + Model: c.Model, + Prompt: prompt, + Stream: true, + } + + var buf bytes.Buffer + if err := json.NewEncoder(&buf).Encode(&reqBody); err != nil { + return fmt.Errorf("ошибка кодирования запроса: %w", err) + } + + resp, err := http.Post(c.Host+"/api/generate", "application/json", &buf) + if err != nil { + return fmt.Errorf("ошибка запроса к %s: %w", c.Host, err) + } + defer resp.Body.Close() + + dec := json.NewDecoder(resp.Body) + + for { + var msg OllamaGenerateResponse + if err := dec.Decode(&msg); err != nil { + break // конец стрима + } + + if msg.Response != "" { + callback(msg.Response, nil) + } + + if msg.Done { + callback("", &msg) + break + } + } + + return nil +} diff --git a/internal/server/handlers_llm.go b/internal/server/handlers_llm.go new file mode 100644 index 0000000..6396dfa --- /dev/null +++ b/internal/server/handlers_llm.go @@ -0,0 +1,267 @@ +package server + +import ( + "bytes" + "fmt" + "gskaro-v1/internal/llm" + "net/http" + "time" +) + +type llmLogEntry struct { + Model string + Prompt string + Answer string + Timestamp time.Time + PromptTokens int + AnswerTokens int +} + +var llmHistory []llmLogEntry + +// ----------------------------- +// LLM Console +// ----------------------------- + +func llmConsoleHandler(w http.ResponseWriter, r *http.Request) { + if r.Header.Get("HX-Request") != "true" { + fmt.Fprintf(w, ` + + + + + LLM Console + + + +
+
+ + + + + + +
+ +
+ Текущая модель: +
+
+ +
+ `) + renderConsole(w) + fmt.Fprintf(w, ` +
+ + + `) + return + } + + renderConsole(w) +} + +func renderConsole(w http.ResponseWriter) { + fmt.Fprintf(w, ` +

LLM Console

+ +

Текущая модель: %s

+ +
+ +
+ +
+ +
+ Ожидание ответа... +
+ `, llm.ActiveModel) +} + +// ----------------------------- +// Список моделей +// ----------------------------- + +func llmModelsHandler(w http.ResponseWriter, r *http.Request) { + host := r.URL.Query().Get("host") + + if host == "" { + fmt.Fprintf(w, ` +

Подключение к Ollama

+
+ + +
+ `) + return + } + + models, err := llm.GetModelsFromHost(host) + if err != nil { + fmt.Fprintf(w, `

Ошибка подключения: %s

`, err) + return + } + + fmt.Fprintf(w, `

Модели на %s

`, host) + fmt.Fprintf(w, `
`) + fmt.Fprintf(w, ``, host) + fmt.Fprintf(w, ``) + fmt.Fprintf(w, ``) + fmt.Fprintf(w, `
`) +} + +// ----------------------------- +// Выбор модели +// ----------------------------- + +func llmSelectHandler(w http.ResponseWriter, r *http.Request) { + host := r.FormValue("host") + model := r.FormValue("model") + + if host != "" { + llm.ActiveHost = host + } + + if model != "" { + llm.ActiveModel = model + } + + if r.Header.Get("HX-Request") == "true" { + w.Header().Set("HX-Redirect", "/api/llm/console") + return + } + + http.Redirect(w, r, "/api/llm/console", http.StatusSeeOther) +} + +// ----------------------------- +// Запрос к модели +// ----------------------------- + +func llmQueryHandler(w http.ResponseWriter, r *http.Request) { + prompt := r.FormValue("prompt") + if prompt == "" { + fmt.Fprintf(w, "

Пустой запрос.

") + return + } + + client := &llm.OllamaClient{ + Host: llm.ActiveHost, + Model: llm.ActiveModel, + } + + var answerBuf bytes.Buffer + var promptTokens, answerTokens int + + err := client.Stream(prompt, func(chunk string, meta *llm.OllamaGenerateResponse) { + answerBuf.WriteString(chunk) + if meta != nil { + promptTokens = meta.PromptEvalCount + answerTokens = meta.EvalCount + } + }) + if err != nil { + fmt.Fprintf(w, "

Ошибка: %s

", err) + return + } + + resp := answerBuf.String() + + llmHistory = append(llmHistory, llmLogEntry{ + Model: llm.ActiveModel, + Prompt: prompt, + Answer: resp, + Timestamp: time.Now(), + PromptTokens: promptTokens, + AnswerTokens: answerTokens, + }) + + fmt.Fprintf(w, "
%s
", resp) +} + +// ----------------------------- +// Статус +// ----------------------------- + +func llmStatusHandler(w http.ResponseWriter, r *http.Request) { + resp, err := http.Get(llm.ActiveHost + "/api/tags") + if err != nil { + fmt.Fprintf(w, ` + + Ollama: offline + +   + Модель: %s + `, llm.ActiveModel) + return + } + resp.Body.Close() + + fmt.Fprintf(w, ` + + Ollama: online + +   + Модель: %s + `, llm.ActiveModel) +} + +// ----------------------------- +// История +// ----------------------------- + +func llmHistoryHandler(w http.ResponseWriter, r *http.Request) { + fmt.Fprintf(w, "

LLM History

") + + if len(llmHistory) == 0 { + fmt.Fprintf(w, "

Пока пусто.

") + return + } + + fmt.Fprintf(w, ``) +} diff --git a/internal/server/server.go b/internal/server/server.go new file mode 100644 index 0000000..63d8ea7 --- /dev/null +++ b/internal/server/server.go @@ -0,0 +1,43 @@ +// internal/server/server.go +package server + +import ( + "fmt" + "io/fs" + "net/http" + "os/exec" +) + +func Start() { + mux := http.NewServeMux() + + // LLM API + mux.HandleFunc("/api/llm/console", llmConsoleHandler) + mux.HandleFunc("/api/llm/models", llmModelsHandler) + mux.HandleFunc("/api/llm/select", llmSelectHandler) + mux.HandleFunc("/api/llm/query", llmQueryHandler) + mux.HandleFunc("/api/llm/status", llmStatusHandler) + mux.HandleFunc("/api/llm/history", llmHistoryHandler) + + // Заглушки + mux.HandleFunc("/api/stats", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintf(w, "

Статистика

Пока пусто.

") + }) + mux.HandleFunc("/api/tasks", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintf(w, "

Задачи

Пока пусто.

") + }) + mux.HandleFunc("/api/notes", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintf(w, "

AI_NOTES.md

Пока пусто.
") + }) + + // Статика — строго последняя + sub, err := fs.Sub(WebFS, "web") + if err != nil { + panic(err) + } + mux.Handle("/", http.FileServer(http.FS(sub))) + + fmt.Println("Listening on :4568") + exec.Command("rundll32", "url.dll,FileProtocolHandler", "http://localhost:4568").Start() + http.ListenAndServe(":4568", mux) +} diff --git a/internal/server/web/index.html b/internal/server/web/index.html new file mode 100644 index 0000000..e339cbe --- /dev/null +++ b/internal/server/web/index.html @@ -0,0 +1,30 @@ + + + + + gskaro — Developer Assistant + + + +
+
+ + + + + + +
+ +
+ Текущая модель: +
+
+ +
+

Выбери раздел вверху.

+
+ + \ No newline at end of file diff --git a/internal/server/web_static.go b/internal/server/web_static.go new file mode 100644 index 0000000..8e09f7b --- /dev/null +++ b/internal/server/web_static.go @@ -0,0 +1,6 @@ +package server + +import "embed" + +//go:embed web/* +var WebFS embed.FS diff --git a/start-gskaro.bat b/start-gskaro.bat new file mode 100644 index 0000000..082f676 --- /dev/null +++ b/start-gskaro.bat @@ -0,0 +1,6 @@ +gskaro-v1.exe ui + +pause + + +init \ No newline at end of file