🔊 More log after generation

This commit is contained in:
LittleSheep 2025-01-30 13:02:21 +08:00
parent 53e9d4de03
commit 0f8dd2a709
2 changed files with 9 additions and 6 deletions

View File

@ -2,7 +2,7 @@ package grpc
import (
"context"
"fmt"
"git.solsynth.dev/hypernet/insight/pkg/internal/services"
"git.solsynth.dev/hypernet/insight/pkg/proto"
"google.golang.org/grpc/codes"
@ -12,13 +12,13 @@ import (
func (v *Server) GenerateInsight(ctx context.Context, request *proto.InsightRequest) (*proto.InsightResponse, error) {
input := request.GetSource()
if err := services.PlaceOrder(uint(request.GetUserId()), len(input)); err != nil {
return nil, status.Errorf(codes.ResourceExhausted, fmt.Sprintf("failed to place order: %v", err))
return nil, status.Errorf(codes.ResourceExhausted, "failed to place order: %v", err)
}
out, err := services.GenerateInsights(input)
if err != nil {
_ = services.MakeRefund(uint(request.GetUserId()), len(input))
return nil, status.Errorf(codes.Internal, fmt.Sprintf("failed to generate insight: %v", err))
return nil, status.Errorf(codes.Internal, "failed to generate insight: %v", err)
}
return &proto.InsightResponse{Response: out}, nil

View File

@ -6,6 +6,7 @@ import (
"net/http"
"time"
"github.com/rs/zerolog/log"
"github.com/spf13/viper"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/ollama"
@ -49,11 +50,13 @@ func GenerateInsights(source string) (string, error) {
return "", fmt.Errorf("failed to format prompt: %v", err)
}
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
defer cancel()
completion, err := LargeModel.Call(ctx, inPrompt,
start := time.Now()
completion, err := LargeModel.Call(context.Background(), inPrompt,
llms.WithTemperature(0.8),
)
took := time.Since(start)
log.Info().Dur("took", took).Msg("Insight generated successfully...")
return completion, err
}