Автокоммит: 2026-03-29 16:21:29

This commit is contained in:
Evgenii 2026-03-29 16:21:29 +05:00
parent 6b336f0ce6
commit 70e900853c
13 changed files with 490 additions and 0 deletions

3
build.bat Normal file
View File

@ -0,0 +1,3 @@
go build -o gskaro-v1.exe ./cmd/gskaro
pause

7
cmd/gskaro/main.go Normal file
View File

@ -0,0 +1,7 @@
package main
import "gskaro-v1/internal/server"
func main() {
server.Start()
}

3
go.mod Normal file
View File

@ -0,0 +1,3 @@
module gskaro-v1
go 1.25.6

BIN
gskaro-v1.exe Normal file

Binary file not shown.

6
init-gskaro.bat Normal file
View File

@ -0,0 +1,6 @@
gskaro-v1.exe init
pause
init

38
internal/llm/models.go Normal file
View File

@ -0,0 +1,38 @@
package llm
import (
"encoding/json"
"fmt"
"net/http"
)
type OllamaTagsResponse struct {
Models []struct {
Name string `json:"name"`
} `json:"models"`
}
// Получить список моделей с указанного хоста
func GetModelsFromHost(host string) ([]string, error) {
resp, err := http.Get(host + "/api/tags")
if err != nil {
return nil, fmt.Errorf("ошибка запроса к %s/api/tags: %w", host, err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("Ollama вернула статус %s", resp.Status)
}
var data OllamaTagsResponse
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
return nil, fmt.Errorf("ошибка разбора JSON: %w", err)
}
out := make([]string, 0, len(data.Models))
for _, m := range data.Models {
out = append(out, m.Name)
}
return out, nil
}

6
internal/llm/state.go Normal file
View File

@ -0,0 +1,6 @@
package llm
// Активный хост и модель, с которыми работает сервер.
// Меняются из обработчиков в пакете server.
var ActiveHost = "http://localhost:11434"
var ActiveModel = "mistral:7b"

75
internal/llm/stream.go Normal file
View File

@ -0,0 +1,75 @@
package llm
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
)
type ollamaGenerateRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Stream bool `json:"stream"`
}
type OllamaGenerateResponse struct {
Model string `json:"model"`
CreatedAt string `json:"created_at"`
Response string `json:"response"`
Done bool `json:"done"`
PromptEvalCount int `json:"prompt_eval_count"`
EvalCount int `json:"eval_count"`
}
type OllamaClient struct {
Host string
Model string
}
// Stream — стриминг ответа модели по частям
func (c *OllamaClient) Stream(prompt string, callback func(chunk string, meta *OllamaGenerateResponse)) error {
if c.Host == "" {
c.Host = ActiveHost
}
if c.Model == "" {
c.Model = ActiveModel
}
reqBody := ollamaGenerateRequest{
Model: c.Model,
Prompt: prompt,
Stream: true,
}
var buf bytes.Buffer
if err := json.NewEncoder(&buf).Encode(&reqBody); err != nil {
return fmt.Errorf("ошибка кодирования запроса: %w", err)
}
resp, err := http.Post(c.Host+"/api/generate", "application/json", &buf)
if err != nil {
return fmt.Errorf("ошибка запроса к %s: %w", c.Host, err)
}
defer resp.Body.Close()
dec := json.NewDecoder(resp.Body)
for {
var msg OllamaGenerateResponse
if err := dec.Decode(&msg); err != nil {
break // конец стрима
}
if msg.Response != "" {
callback(msg.Response, nil)
}
if msg.Done {
callback("", &msg)
break
}
}
return nil
}

View File

@ -0,0 +1,267 @@
package server
import (
"bytes"
"fmt"
"gskaro-v1/internal/llm"
"net/http"
"time"
)
type llmLogEntry struct {
Model string
Prompt string
Answer string
Timestamp time.Time
PromptTokens int
AnswerTokens int
}
var llmHistory []llmLogEntry
// -----------------------------
// LLM Console
// -----------------------------
func llmConsoleHandler(w http.ResponseWriter, r *http.Request) {
if r.Header.Get("HX-Request") != "true" {
fmt.Fprintf(w, `
<!doctype html>
<html lang="ru">
<head>
<meta charset="utf-8">
<title>LLM Console</title>
<script src="https://unpkg.com/htmx.org@1.9.10"></script>
</head>
<body hx-boost="true">
<div id="top-bar" style="display:flex; gap:16px; align-items:center; margin-bottom:16px;">
<div>
<button hx-get="/api/stats" hx-target="#content">Статистика</button>
<button hx-get="/api/tasks" hx-target="#content">Задачи</button>
<button hx-get="/api/notes" hx-target="#content">AI_NOTES.md</button>
<button hx-get="/api/llm/console" hx-target="#content">LLM Console</button>
<button hx-get="/api/llm/models" hx-target="#content">LLM Models</button>
<button hx-get="/api/llm/history" hx-target="#content">LLM History</button>
</div>
<div id="model-badge"
hx-get="/api/llm/status"
hx-trigger="load, every 5s">
Текущая модель: <strong></strong>
</div>
</div>
<div id="content">
`)
renderConsole(w)
fmt.Fprintf(w, `
</div>
</body>
</html>
`)
return
}
renderConsole(w)
}
func renderConsole(w http.ResponseWriter) {
fmt.Fprintf(w, `
<h2>LLM Console</h2>
<p>Текущая модель: <strong>%s</strong></p>
<form hx-post="/api/llm/query" hx-target="#llm-output" hx-swap="innerHTML">
<textarea name="prompt" rows="4" style="width:100%%"></textarea>
<br>
<button type="submit">Отправить</button>
</form>
<div id="llm-output" style="margin-top:20px; border:1px solid #ddd; padding:10px;">
Ожидание ответа...
</div>
`, llm.ActiveModel)
}
// -----------------------------
// Список моделей
// -----------------------------
func llmModelsHandler(w http.ResponseWriter, r *http.Request) {
host := r.URL.Query().Get("host")
if host == "" {
fmt.Fprintf(w, `
<h2>Подключение к Ollama</h2>
<form hx-get="/api/llm/models" hx-target="#content">
<input type="text" name="host" placeholder="http://localhost:11434" style="width:300px;">
<button type="submit">Получить модели</button>
</form>
`)
return
}
models, err := llm.GetModelsFromHost(host)
if err != nil {
fmt.Fprintf(w, `<p style="color:red;">Ошибка подключения: %s</p>`, err)
return
}
fmt.Fprintf(w, `<h2>Модели на %s</h2>`, host)
fmt.Fprintf(w, `<form hx-post="/api/llm/select" hx-target="#content">`)
fmt.Fprintf(w, `<input type="hidden" name="host" value="%s">`, host)
fmt.Fprintf(w, `<select name="model">`)
for _, m := range models {
fmt.Fprintf(w, `<option value="%s">%s</option>`, m, m)
}
fmt.Fprintf(w, `</select>`)
fmt.Fprintf(w, `<button type="submit">Выбрать</button>`)
fmt.Fprintf(w, `</form>`)
}
// -----------------------------
// Выбор модели
// -----------------------------
func llmSelectHandler(w http.ResponseWriter, r *http.Request) {
host := r.FormValue("host")
model := r.FormValue("model")
if host != "" {
llm.ActiveHost = host
}
if model != "" {
llm.ActiveModel = model
}
if r.Header.Get("HX-Request") == "true" {
w.Header().Set("HX-Redirect", "/api/llm/console")
return
}
http.Redirect(w, r, "/api/llm/console", http.StatusSeeOther)
}
// -----------------------------
// Запрос к модели
// -----------------------------
func llmQueryHandler(w http.ResponseWriter, r *http.Request) {
prompt := r.FormValue("prompt")
if prompt == "" {
fmt.Fprintf(w, "<p style='color:red;'>Пустой запрос.</p>")
return
}
client := &llm.OllamaClient{
Host: llm.ActiveHost,
Model: llm.ActiveModel,
}
var answerBuf bytes.Buffer
var promptTokens, answerTokens int
err := client.Stream(prompt, func(chunk string, meta *llm.OllamaGenerateResponse) {
answerBuf.WriteString(chunk)
if meta != nil {
promptTokens = meta.PromptEvalCount
answerTokens = meta.EvalCount
}
})
if err != nil {
fmt.Fprintf(w, "<p style='color:red;'>Ошибка: %s</p>", err)
return
}
resp := answerBuf.String()
llmHistory = append(llmHistory, llmLogEntry{
Model: llm.ActiveModel,
Prompt: prompt,
Answer: resp,
Timestamp: time.Now(),
PromptTokens: promptTokens,
AnswerTokens: answerTokens,
})
fmt.Fprintf(w, "<pre>%s</pre>", resp)
}
// -----------------------------
// Статус
// -----------------------------
func llmStatusHandler(w http.ResponseWriter, r *http.Request) {
resp, err := http.Get(llm.ActiveHost + "/api/tags")
if err != nil {
fmt.Fprintf(w, `
<span style="padding:4px 8px; border-radius:4px; background:#fee; color:#900;">
Ollama: offline
</span>
&nbsp;
<span>Модель: <strong>%s</strong></span>
`, llm.ActiveModel)
return
}
resp.Body.Close()
fmt.Fprintf(w, `
<span style="padding:4px 8px; border-radius:4px; background:#e6ffed; color:#137333;">
Ollama: online
</span>
&nbsp;
<span>Модель: <strong>%s</strong></span>
`, llm.ActiveModel)
}
// -----------------------------
// История
// -----------------------------
func llmHistoryHandler(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "<h2>LLM History</h2>")
if len(llmHistory) == 0 {
fmt.Fprintf(w, "<p>Пока пусто.</p>")
return
}
fmt.Fprintf(w, `<ul style="list-style:none; padding:0;">`)
for i := len(llmHistory) - 1; i >= 0; i-- {
e := llmHistory[i]
fmt.Fprintf(w, `
<li style="margin-bottom:16px; padding:12px; border:1px solid #ddd; border-radius:6px;">
<div style="color:#666; font-size:13px;">
🕒 %s
</div>
<div><strong>Модель:</strong> %s</div>
<div style="margin-top:8px;"><strong>👤 Запрос:</strong></div>
<pre style="white-space:pre-wrap; background:#fafafa; padding:8px; border-radius:4px;">%s</pre>
<div style="margin-top:8px;"><strong>🤖 Ответ модели:</strong></div>
<pre style="white-space:pre-wrap; background:#f0f7ff; padding:8px; border-radius:4px;">%s</pre>
<div style="margin-top:8px; color:#555;">
🔢 Токены: prompt=%d, answer=%d, total=%d
</div>
</li>
`,
e.Timestamp.Format("2006-01-02 15:04:05"),
e.Model,
e.Prompt,
e.Answer,
e.PromptTokens,
e.AnswerTokens,
e.PromptTokens+e.AnswerTokens,
)
}
fmt.Fprintf(w, `</ul>`)
}

43
internal/server/server.go Normal file
View File

@ -0,0 +1,43 @@
// internal/server/server.go
package server
import (
"fmt"
"io/fs"
"net/http"
"os/exec"
)
func Start() {
mux := http.NewServeMux()
// LLM API
mux.HandleFunc("/api/llm/console", llmConsoleHandler)
mux.HandleFunc("/api/llm/models", llmModelsHandler)
mux.HandleFunc("/api/llm/select", llmSelectHandler)
mux.HandleFunc("/api/llm/query", llmQueryHandler)
mux.HandleFunc("/api/llm/status", llmStatusHandler)
mux.HandleFunc("/api/llm/history", llmHistoryHandler)
// Заглушки
mux.HandleFunc("/api/stats", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "<h2>Статистика</h2><p>Пока пусто.</p>")
})
mux.HandleFunc("/api/tasks", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "<h2>Задачи</h2><p>Пока пусто.</p>")
})
mux.HandleFunc("/api/notes", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "<h2>AI_NOTES.md</h2><pre>Пока пусто.</pre>")
})
// Статика — строго последняя
sub, err := fs.Sub(WebFS, "web")
if err != nil {
panic(err)
}
mux.Handle("/", http.FileServer(http.FS(sub)))
fmt.Println("Listening on :4568")
exec.Command("rundll32", "url.dll,FileProtocolHandler", "http://localhost:4568").Start()
http.ListenAndServe(":4568", mux)
}

View File

@ -0,0 +1,30 @@
<!doctype html>
<html lang="ru">
<head>
<meta charset="utf-8">
<title>gskaro — Developer Assistant</title>
<script src="https://unpkg.com/htmx.org@1.9.10"></script>
</head>
<body hx-boost="true">
<div id="top-bar" style="display:flex; gap:16px; align-items:center; margin-bottom:16px;">
<div>
<button hx-get="/api/stats" hx-target="#content">Статистика</button>
<button hx-get="/api/tasks" hx-target="#content">Задачи</button>
<button hx-get="/api/notes" hx-target="#content">AI_NOTES.md</button>
<button hx-get="/api/llm/console" hx-target="#content">LLM Console</button>
<button hx-get="/api/llm/models" hx-target="#content">LLM Models</button>
<button hx-get="/api/llm/history" hx-target="#content">LLM History</button>
</div>
<div id="model-badge"
hx-get="/api/llm/status"
hx-trigger="load, every 5s">
Текущая модель: <strong></strong>
</div>
</div>
<div id="content">
<p>Выбери раздел вверху.</p>
</div>
</body>
</html>

View File

@ -0,0 +1,6 @@
package server
import "embed"
//go:embed web/*
var WebFS embed.FS

6
start-gskaro.bat Normal file
View File

@ -0,0 +1,6 @@
gskaro-v1.exe ui
pause
init