Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

enhance: add file source and location metadata to knowledge results #428

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion knowledge/pkg/datastore/retrieve.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,5 +100,13 @@ func (s *Datastore) SimilaritySearch(ctx context.Context, query string, numDocum
}
}
}
return s.Vectorstore.SimilaritySearch(ctx, query, numDocuments, datasetID, where, whereDocument, ef)
docs, err := s.Vectorstore.SimilaritySearch(ctx, query, numDocuments, datasetID, where, whereDocument, ef)
if err != nil {
return nil, err
}
for i, doc := range docs {
doc.Metadata["datasetID"] = datasetID
docs[i] = doc
}
return docs, nil
}
89 changes: 64 additions & 25 deletions result-formatter/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ import (
"context"
"encoding/json"
"fmt"
"log/slog"
neturl "net/url"
"os"
"strconv"
"strings"
"sync"

Expand All @@ -22,14 +23,27 @@ type subqueryResults struct {
}

type document struct {
ID string `json:"id"`
Content string `json:"content,omitempty"`
Metadata map[string]any `json:"metadata,omitempty"`
ID string `json:"id"`
Content string `json:"content,omitempty"`
Metadata metadata `json:"metadata,omitempty"`
}

type metadata struct {
Source string `json:"source,omitempty"`
WorkspaceID string `json:"workspaceID,omitempty"`
URL string `json:"url,omitempty"`
Pages string `json:"pages,omitempty"`
Page int `json:"page,omitempty"`
TotalPages int `json:"totalPages,omitempty"`
FileSize int `json:"fileSize,omitempty"`
WorkspaceFileName string `json:"workspaceFileName,omitempty"` // workspaceFileName is the location of the converted file, not the original file - e.g. <path>/foo.pdf.json
DatasetID string `json:"datasetID,omitempty"`
}

type hit struct {
URL string `json:"url,omitempty"`
Content string `json:"content,omitempty"`
URL string `json:"url,omitempty"` // URL should be the original source of the document (Web URL, OneDrive Link, etc.)
Location string `json:"location,omitempty"` // Location should be the location of the result in the original source (page numbers, etc.)
Content string `json:"content,omitempty"` // Content should be the text content of the document
}

type inputContent struct {
Expand All @@ -44,22 +58,22 @@ func main() {
ctx = context.Background()
)

// This is ugly code, I know. Beauty comes later.
// This is ugly code, I know. Beauty comes later. Cleaned up a little. Still room for improvement.

if clientErr != nil {
_, _ = fmt.Fprintf(os.Stderr, "failed to create gptscript client: %v\n", clientErr)
slog.Error("failed to create gptscript client", "error", clientErr)
}

if err := json.Unmarshal([]byte(out), &output); err != nil {
_, _ = fmt.Fprintf(os.Stderr, "failed to unmarshal output: %v\n", err)
slog.Debug("failed to unmarshal output", "err", err)
fmt.Print(out)
return
}

var (
outDocs []hit
wg sync.WaitGroup
fullyFetched = map[string]struct{}{}
fullyFetched = map[string]int{} // fullyFetched is a map of files that have been fully fetched from the workspace - the value is the index in outDocs
budget = 120_000
)

Expand All @@ -68,27 +82,51 @@ func main() {
break
}
for _, doc := range result.ResultDocuments {
filename, _ := doc.Metadata["workspaceFileName"].(string)
if _, ok := fullyFetched[filename]; ok {
filename := doc.Metadata.WorkspaceFileName

// We parse the location regardless of the file potentially being fully fetched already to preserve the
// source reference metadata (i.e. where in the document the information was found).
// This is a UX thing to help users with manual proofreading of answers.
var location string
if doc.Metadata.Pages != "" {
location = "Pages " + doc.Metadata.Pages
} else if doc.Metadata.Page > 0 {
location = fmt.Sprintf("Page %d", doc.Metadata.Page)
}
if location != "" && doc.Metadata.TotalPages > 0 {
location = fmt.Sprintf("%s of %d", location, doc.Metadata.TotalPages)
slog.Debug("result doc in file", "filename", filename, "location", location)
}

if ffi, ok := fullyFetched[filename]; ok {
if location != "" {
outDocs[ffi].Location += " and " + location
}
continue
}

url, _ := doc.Metadata["url"].(string)
var url string
if doc.Metadata.URL != "" {
url = doc.Metadata.URL
} else if doc.Metadata.Source != "" {
url = "knowledge://" + neturl.PathEscape(doc.Metadata.DatasetID+"::"+strings.TrimPrefix(doc.Metadata.Source, "ws://")) // <datasetID>::<workspaceFileName>, where datasetID is <namespace>/<knowledgeset>
}

outDocs = append(outDocs, hit{
URL: url,
Content: doc.Content,
URL: url,
Content: doc.Content,
Location: location,
})

index := len(outDocs) - 1

if index < 3 && clientErr == nil {
fileSize, _ := doc.Metadata["fileSize"].(string)
size, _ := strconv.Atoi(fileSize)
workspaceID, _ := doc.Metadata["workspaceID"].(string)
if size > 5_000 && size < budget && workspaceID != "" {
_, _ = fmt.Fprintf(os.Stderr, "reading file in workspace: %s\n", filename)
fullyFetched[filename] = struct{}{}
budget -= size
fileSize := doc.Metadata.FileSize
workspaceID := doc.Metadata.WorkspaceID
if fileSize > 5_000 && fileSize < budget && workspaceID != "" {
slog.Debug("fetching full file from workspace", "file", filename, "sizeInBytes", fileSize)
fullyFetched[filename] = index
budget -= fileSize
wg.Add(1)

go func() {
Expand All @@ -98,13 +136,13 @@ func main() {
WorkspaceID: workspaceID,
})
if err != nil {
_, _ = fmt.Fprintf(os.Stderr, "failed to read file in workspace: %v\n", err)
slog.Error("failed to read file in workspace", "error", err)
return
}

var sourceContent inputContent
if err := json.Unmarshal(content, &sourceContent); err != nil {
_, _ = fmt.Fprintf(os.Stderr, "failed to unmarshal content: %v\n", err)
slog.Error("failed to unmarshal content", "error", err)
return
}

Expand All @@ -115,10 +153,11 @@ func main() {

if buffer.Len() > 0 {
outDocs[index].Content = buffer.String()
outDocs[index].Location = "Full Document. Specifically " + outDocs[index].Location
}
}()
} else {
_, _ = fmt.Fprintf(os.Stderr, "file size is not within the range: %s %s %d %d\n", workspaceID, filename, size, budget)
slog.Debug("filesize is not within range", "filename", fmt.Sprintf("%s/%s", workspaceID, filename), "filesize", fileSize, "budget", budget)
}
}
}
Expand Down
Loading