diff --git a/SEARCH_INTEGRATION_GUIDE.md b/SEARCH_INTEGRATION_GUIDE.md new file mode 100644 index 0000000000..91ba509775 --- /dev/null +++ b/SEARCH_INTEGRATION_GUIDE.md @@ -0,0 +1,469 @@ +# Search Integration Guide for Cosmo Router + +This guide explains how to integrate the search datasource from `graphql-go-tools` into the Cosmo router. It covers every integration point, the public APIs, and the complete data flow from configuration to query execution. + +## Architecture Overview + +The search datasource is a **virtual subgraph** — it has no running HTTP server. Instead, the router: + +1. Parses a **config schema** (GraphQL SDL with custom directives) that declares what entities are searchable and how +2. **Generates** a federation-compliant subgraph SDL from those directives +3. **Composes** that generated SDL with other subgraphs using `composition-go` +4. At runtime, the search datasource resolves queries by calling a local search index rather than making HTTP fetches + +The search subgraph is identified in composition output by the sentinel fetch URL `"http://search.local"`. + +## Packages + +| Package | Import Path | Purpose | +|---------|------------|---------| +| `searchindex` | `v2/pkg/searchindex` | Core interfaces: `Index`, `IndexFactory`, `Embedder`, `EmbedderRegistry`, `IndexFactoryRegistry` | +| `search_datasource` | `v2/pkg/engine/datasource/search_datasource` | GraphQL integration: `Factory`, `Planner`, `Source`, `Manager`, directive parsing, SDL generation | +| Backend implementations | `v2/pkg/searchindex/{pgvector,elasticsearch,weaviate,qdrant,bleve,algolia,typesense,meilisearch}` | Each exports a `NewFactory() IndexFactory` | +| Embedding providers | `v2/pkg/searchindex/embedder/{openai,ollama}` | Each exports a constructor returning `searchindex.Embedder` | + +## Step-by-Step Integration + +### Step 1: Parse the Config Schema + +The config schema is a GraphQL SDL file written by the user. It uses custom directives to declare indices, searchable entities, indexed fields, and embeddings. + +```go +import ( + "github.com/wundergraph/graphql-go-tools/v2/pkg/astparser" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/search_datasource" +) + +doc, report := astparser.ParseGraphqlDocumentString(configSDL) +if report.HasErrors() { + return fmt.Errorf("parse config schema: %s", report.Error()) +} + +parsedConfig, err := search_datasource.ParseConfigSchema(&doc) +// parsedConfig.Indices -- []IndexDirective (one per @index) +// parsedConfig.Entities -- []SearchableEntity (one per @searchable type) +// parsedConfig.Populations -- []PopulateDirective +// parsedConfig.Subscriptions -- []SubscribeDirective +``` + +**Directive syntax:** + +```graphql +# Declare an index with a backend +extend schema @index(name: "products", backend: "pgvector", config: "{}") + +# Mark an entity as searchable +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID\! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) + _embedding: [Float\!] @embedding(fields: "name description", template: "{{name}}. {{description}}", model: "text-embedding-3-small") +} +``` + +**`@indexed` types:** `TEXT`, `KEYWORD`, `NUMERIC`, `BOOL`, `VECTOR`, `GEO` + +**`@embedding` directive:** +- `fields` is a **space-separated string** (NOT an array), parsed internally by `strings.Fields()` +- `template` uses Go template syntax with field names as variables +- `model` is the key used to look up the embedder in `EmbedderRegistry` + +**`@searchable` options:** +- `resultsMetaInformation: false` -- flat array results instead of wrapper types (no `hits`, `score`, `totalCount`) +- `@index(cursorBasedPagination: true)` -- enables Relay-style cursor pagination + +### Step 2: Generate the Subgraph SDL + +```go +searchSDL, err := search_datasource.GenerateSubgraphSDL(parsedConfig) +``` + +This produces a complete federation-compliant SDL. The shape depends on the entity configuration: + +**Text-only entity (with wrapper):** +```graphql +type Query { + searchProducts(query: String\!, fuzziness: Fuzziness, filter: ProductFilter, sort: [ProductSort\!], limit: Int, offset: Int, facets: [String\!]): SearchProductResult\! +} +``` + +The `Fuzziness` enum (`EXACT`, `LOW`, `HIGH`) controls typo tolerance at query time. It maps to edit distance 0, 1, 2 respectively. Omitting it uses the backend default. + +**Vector-enabled entity (with `@embedding`):** +```graphql +input SearchProductInput @oneOf { + query: String + vector: [Float\!] +} + +type SearchProductHit { + score: Float\! + distance: Float + node: Product\! +} + +type Query { + searchProducts(search: SearchProductInput\!, fuzziness: Fuzziness, filter: ProductFilter, sort: [ProductSort\!], limit: Int, offset: Int): SearchProductResult\! +} +``` + +Key differences when `HasVectorSearch()` is true: +- Query argument changes from `query: String\!` to `search: SearchProductInput\!` (a `@oneOf` input with `query`/`vector`) +- Hits include a `distance: Float` field +- No `facets` argument or facet types + +**Inline style (`resultsMetaInformation: false`):** +```graphql +type Query { + searchProducts(query: String\!, fuzziness: Fuzziness, ...): [Product\!]\! +} +``` + +**Cursor pagination (`cursorBasedPagination: true`):** +```graphql +type Query { + searchProducts(query: String\!, fuzziness: Fuzziness, first: Int, after: String, last: Int, before: String): SearchProductConnection\! +} +``` + +### Step 3: Compose with Other Subgraphs + +Use `composition-go` to compose the generated search SDL with entity subgraphs. The search subgraph uses the sentinel URL `"http://search.local"`: + +```go +import "github.com/wundergraph/cosmo/composition-go" + +subgraphs := []*composition.Subgraph{ + { + Name: "search", + URL: "http://search.local", // sentinel -- no real HTTP server + Schema: searchSDL, + }, + { + Name: "productdetails", + URL: entitySubgraphURL, + Schema: entitySDL, + }, +} + +routerConfigJSON, err := composition.BuildRouterConfiguration(subgraphs...) +``` + +### Step 4: Register Backend Factories + +Create registries and register the backends you want to support: + +```go +import ( + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/pgvector" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/elasticsearch" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/bleve" +) + +indexRegistry := searchindex.NewIndexFactoryRegistry() +indexRegistry.Register("pgvector", pgvector.NewFactory()) +indexRegistry.Register("elasticsearch", elasticsearch.NewFactory()) +indexRegistry.Register("bleve", bleve.NewFactory()) +// Register all 8 backends as needed +``` + +### Step 5: Register Embedding Providers + +If any entity uses `@embedding`, register the corresponding model in the embedder registry: + +```go +embedderRegistry := searchindex.NewEmbedderRegistry() +embedderRegistry.Register("text-embedding-3-small", openaiEmbedder) +embedderRegistry.Register("nomic-embed-text", ollamaEmbedder) +``` + +The model name in the registry must match the `model` argument in `@embedding(model: "...")`. + +### Step 6: Wire the Plan Configuration + +When building the `plan.Configuration` from the composition output, identify the search datasource by checking if the fetch URL is `"http://search.local"` and use `search_datasource.Factory` instead of `graphql_datasource.Factory`: + +```go +searchFactory := search_datasource.NewFactory(ctx, indexRegistry, embedderRegistry) + +for _, ds := range engineConfig.DatasourceConfigurations { + fetchURL := ds.CustomGraphql.Fetch.GetUrl().GetStaticVariableContent() + + if fetchURL == "http://search.local" { + entity := &parsedConfig.Entities[0] + searchConfig := entityToConfiguration(entity) + + searchDS, err := plan.NewDataSourceConfiguration[search_datasource.Configuration]( + ds.Id, + searchFactory, + metadata, + searchConfig, + ) + planConfig.DataSources = append(planConfig.DataSources, searchDS) + } else { + // Standard GraphQL datasource + } +} +``` + +**Converting a `SearchableEntity` to `Configuration`:** + +```go +func entityToConfiguration(entity *search_datasource.SearchableEntity) search_datasource.Configuration { + cfg := search_datasource.Configuration{ + IndexName: entity.IndexName, + SearchField: entity.SearchField, + EntityTypeName: entity.TypeName, + KeyFields: entity.KeyFields, + HasTextSearch: entity.HasTextSearch(), + HasVectorSearch: entity.HasVectorSearch(), + ResultsMetaInformation: entity.ResultsMetaInformation, + CursorBasedPagination: entity.CursorBasedPagination, + CursorBidirectional: entity.CursorBidirectional, + } + for _, f := range entity.Fields { + cfg.Fields = append(cfg.Fields, search_datasource.IndexedFieldConfig{ + FieldName: f.FieldName, + GraphQLType: f.GraphQLType, + IndexType: f.IndexType, + Filterable: f.Filterable, + Sortable: f.Sortable, + Dimensions: f.Dimensions, + }) + } + for _, ef := range entity.EmbeddingFields { + cfg.EmbeddingFields = append(cfg.EmbeddingFields, search_datasource.EmbeddingFieldConfig{ + FieldName: ef.FieldName, + SourceFields: ef.SourceFields, + Template: ef.Template, + Model: ef.Model, + }) + } + return cfg +} +``` + +### Step 7: Lifecycle Management + +The `Manager` handles index creation, initial data population, embedding pipelines, and live subscriptions: + +```go +manager := search_datasource.NewManager( + searchFactory, + indexRegistry, + embedderRegistry, + executor, // implements GraphQLExecutor interface + parsedConfig, +) + +if err := manager.Start(ctx); err \!= nil { + return err +} +defer manager.Stop() +``` + +**`GraphQLExecutor` interface:** +```go +type GraphQLExecutor interface { + Execute(ctx context.Context, operation string) ([]byte, error) +} +``` + +The executor runs GraphQL operations against the federated graph itself. It is used for: +- **Population queries** (`@populate`): Fetches all entities and indexes them +- **Subscription updates** (`@subscribe`): Receives entity changes for live re-indexing + +**What `Manager.Start()` does:** +1. Creates indices via `IndexFactoryRegistry.Get(backend).CreateIndex(ctx, name, schema, configJSON)` +2. Registers each index with `Factory.RegisterIndex(name, idx)` so the planner can find them +3. Sets up `EmbeddingPipeline` for each entity with `@embedding` fields (template transformer + embedder from registry) +4. Runs population queries -- executes GraphQL operations, extracts entities with `ExtractEntities()`, computes embeddings with the pipeline, and calls `idx.IndexDocuments()` +5. Starts subscription goroutines for live updates + +### Step 8: Query Execution Flow + +At query time, the flow is: + +1. **Planner** (`Planner.EnterField`) detects the search field, collects which arguments are present +2. **Planner** (`Planner.ConfigureFetch`) builds a JSON template using `{{.arguments.X}}` syntax, creates a `Source` via `Factory.CreateSourceForConfig(config)`, and returns a `FetchConfiguration` with `PostProcessing.SelectResponseDataPath: ["data"]` +3. **Resolver** resolves the template variables and calls `Source.Load(ctx, headers, input)` +4. **Source** parses the input JSON, builds a `SearchRequest`, calls `index.Search(ctx, req)`, formats the response + +**Auto-embedding flow in `Source.Load()`:** +- If `search.query` is provided AND the source has an embedder: `embedder.EmbedSingle(query)` produces `req.Vector` +- If `search.vector` is provided: use as `req.Vector` directly +- Otherwise: `req.TextQuery` for full-text search + +**Response wrapping:** +- Source wraps results in `{"data": {"": {...}}}` which matches `PostProcessing.SelectResponseDataPath: ["data"]` +- After the resolver extracts `"data"`, the result is keyed by the search field name, aligning with the plan visitor's response tree + +## Backend Support Matrix + +| Backend | Vector | Text | Facets | Cursor | Fuzziness | Package | +|---------|--------|------|--------|--------|-----------|---------| +| pgvector | native + hybrid RRF | tsvector | yes | bidirectional | no | `searchindex/pgvector` | +| Elasticsearch | dense_vector/kNN | yes | yes | forward only | yes (`multi_match.fuzziness`) | `searchindex/elasticsearch` | +| Weaviate | nearVector | BM25 | no | no | no | `searchindex/weaviate` | +| Qdrant | native | payload filter only | no | no | no | `searchindex/qdrant` | +| Bleve | no (silently ignores) | yes | yes | bidirectional | yes (`SetFuzziness()`) | `searchindex/bleve` | +| Algolia | no | yes | yes | no | EXACT only (`typoTolerance: false`) | `searchindex/algolia` | +| TypeSense | no | yes | yes | no | yes (`num_typos`) | `searchindex/typesense` | +| MeiliSearch | no | yes | yes | no | no (built-in, not per-query) | `searchindex/meilisearch` | + +## Core Types Reference + +### `searchindex.Index` +```go +type Index interface { + IndexDocument(ctx context.Context, doc EntityDocument) error + IndexDocuments(ctx context.Context, docs []EntityDocument) error + DeleteDocument(ctx context.Context, id DocumentIdentity) error + DeleteDocuments(ctx context.Context, ids []DocumentIdentity) error + Search(ctx context.Context, req SearchRequest) (*SearchResult, error) + Close() error +} +``` + +### `searchindex.IndexFactory` +```go +type IndexFactory interface { + CreateIndex(ctx context.Context, name string, schema IndexConfig, configJSON []byte) (Index, error) +} +``` + +### `searchindex.Embedder` +```go +type Embedder interface { + Embed(ctx context.Context, texts []string) ([][]float32, error) + EmbedSingle(ctx context.Context, text string) ([]float32, error) + Dimensions() int +} +``` + +### `searchindex.IndexConfig` +```go +type IndexConfig struct { + Name string + Fields []FieldConfig +} + +type FieldConfig struct { + Name string + Type FieldType // TEXT, KEYWORD, NUMERIC, BOOL, VECTOR, GEO + Filterable bool + Sortable bool + Dimensions int // required for VECTOR fields +} +``` + +### `searchindex.SearchRequest` +```go +type SearchRequest struct { + TextQuery string + TextFields []TextFieldWeight // field name + optional boost weight + Vector []float32 + VectorField string + Filter *Filter + Sort []SortField + Limit int + Offset int + Facets []FacetRequest + TypeName string + GeoDistanceSort *GeoDistanceSort + Fuzziness *Fuzziness // nil = backend default; EXACT(0), LOW(1), HIGH(2) + SearchAfter []string // cursor pagination + SearchBefore []string // cursor pagination (backward) +} +``` + +### `search_datasource.Configuration` +```go +type Configuration struct { + IndexName string + SearchField string + EntityTypeName string + KeyFields []string + Fields []IndexedFieldConfig + EmbeddingFields []EmbeddingFieldConfig + HasVectorSearch bool + HasTextSearch bool + ResultsMetaInformation bool + CursorBasedPagination bool + CursorBidirectional bool +} +``` + +### `search_datasource.ParsedConfig` +```go +type ParsedConfig struct { + Indices []IndexDirective + Entities []SearchableEntity + Populations []PopulateDirective + Subscriptions []SubscribeDirective +} +``` + +## Known Gaps + +1. **Vector dimensions from `@embedding`**: `Manager.buildIndexSchema()` does NOT set `Dimensions` on vector fields created from `@embedding`. The dimensions come from `embedder.Dimensions()` at runtime. You must patch the `IndexConfig` after building it: + +```go +for i, f := range schema.Fields { + if f.Type == searchindex.FieldTypeVector && f.Dimensions == 0 { + embedder, _ := embedderRegistry.Get(modelName) + schema.Fields[i].Dimensions = embedder.Dimensions() + } +} +``` + +2. **Population queries**: `Manager.populate()` calls `executor.Execute(ctx, "")` with an empty operation string. The actual population query mechanism needs wiring based on how the router provides the `GraphQLExecutor`. + +3. **Subscription handlers**: `Manager.startSubscriptions()` is a placeholder -- it creates cancellable contexts but does not yet process subscription events. + +## Reference Implementation + +The e2e test framework in `execution/searchtesting/` is the authoritative reference: + +- **`framework.go`** -- `setupTestEnv()` performs Steps 1-7 (parse, generate, compose, create index, populate, build plan config) +- **`framework.go`** -- `buildPlanConfiguration()` shows how to identify the search datasource by sentinel URL and wire the factory +- **`framework.go`** -- `setupVectorTestEnv()` extends this for vector search (patches dimensions, wires embedder registry) +- **Backend test files** (`pgvector_test.go`, etc.) -- per-backend factory creation with Docker testcontainers +- **`mock_embedder.go`** -- deterministic mock embedder for testing without external services +- **`testdata.go`** -- `testProducts()` and `testVectorProducts()` show document structure + +### Running Tests + +```bash +# Bleve (offline): +cd execution && go test ./searchtesting/ -run TestBleve -count=1 + +# Integration backends (requires Docker): +cd execution && go test -tags integration ./searchtesting/ -run TestPgvector -count=1 -timeout 120s +cd execution && go test -tags integration ./searchtesting/ -run TestElasticsearch -count=1 -timeout 120s +cd execution && go test -tags integration ./searchtesting/ -run TestWeaviate -count=1 -timeout 120s +cd execution && go test -tags integration ./searchtesting/ -run TestQdrant -count=1 -timeout 120s + +# Vector search tests (requires Docker): +cd execution && go test -tags integration ./searchtesting/ -run TestPgvectorVector -count=1 -timeout 120s +``` + +## Integration Checklist + +- [ ] Parse config schema SDL with `search_datasource.ParseConfigSchema()` +- [ ] Generate search subgraph SDL with `search_datasource.GenerateSubgraphSDL()` +- [ ] Compose with other subgraphs (search subgraph URL = `"http://search.local"`) +- [ ] Create `IndexFactoryRegistry` and register all desired backends +- [ ] Create `EmbedderRegistry` and register embedding providers (if using `@embedding`) +- [ ] Create `search_datasource.Factory` with both registries +- [ ] Detect search datasource by fetch URL `"http://search.local"` and use `search_datasource.Factory` +- [ ] Convert `SearchableEntity` to `search_datasource.Configuration` for each entity +- [ ] Patch vector field dimensions from `embedder.Dimensions()` if using `@embedding` +- [ ] Create `Manager` with factory, registries, executor, and parsed config +- [ ] Call `Manager.Start(ctx)` during router startup +- [ ] Call `Manager.Stop()` during router shutdown +- [ ] Implement `GraphQLExecutor` interface for population/subscription queries diff --git a/execution/go.mod b/execution/go.mod index 54ad85b621..ee920302a8 100644 --- a/execution/go.mod +++ b/execution/go.mod @@ -11,28 +11,78 @@ require ( github.com/hashicorp/go-plugin v1.6.3 github.com/hashicorp/golang-lru v0.5.4 github.com/jensneuse/abstractlogger v0.0.4 + github.com/lib/pq v1.11.2 github.com/sebdah/goldie/v2 v2.7.1 github.com/stretchr/testify v1.11.1 + github.com/testcontainers/testcontainers-go v0.40.0 github.com/vektah/gqlparser/v2 v2.5.30 - github.com/wundergraph/astjson v0.0.0-20250106123708-be463c97e083 + github.com/wundergraph/astjson v1.0.0 github.com/wundergraph/cosmo/composition-go v0.0.0-20241020204711-78f240a77c99 github.com/wundergraph/cosmo/router v0.0.0-20251013094319-c611abf26b17 github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.231 go.uber.org/atomic v1.11.0 - google.golang.org/grpc v1.68.1 - google.golang.org/protobuf v1.36.9 + google.golang.org/grpc v1.75.1 + google.golang.org/protobuf v1.36.11 ) require ( + github.com/RoaringBitmap/roaring/v2 v2.4.5 // indirect + github.com/bits-and-blooms/bitset v1.22.0 // indirect + github.com/blevesearch/bleve/v2 v2.5.7 // indirect + github.com/blevesearch/bleve_index_api v1.2.11 // indirect + github.com/blevesearch/geo v0.2.4 // indirect + github.com/blevesearch/go-faiss v1.0.26 // indirect + github.com/blevesearch/go-porterstemmer v1.0.3 // indirect + github.com/blevesearch/gtreap v0.1.1 // indirect + github.com/blevesearch/mmap-go v1.0.4 // indirect + github.com/blevesearch/scorch_segment_api/v2 v2.3.13 // indirect + github.com/blevesearch/segment v0.9.1 // indirect + github.com/blevesearch/snowballstem v0.9.0 // indirect + github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect + github.com/blevesearch/vellum v1.1.0 // indirect + github.com/blevesearch/zapx/v11 v11.4.2 // indirect + github.com/blevesearch/zapx/v12 v12.4.2 // indirect + github.com/blevesearch/zapx/v13 v13.4.2 // indirect + github.com/blevesearch/zapx/v14 v14.4.2 // indirect + github.com/blevesearch/zapx/v15 v15.4.2 // indirect + github.com/blevesearch/zapx/v16 v16.2.8 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/json-iterator/go v0.0.0-20171115153421-f7279a603ede // indirect + github.com/mschoch/smat v0.2.0 // indirect + github.com/wundergraph/go-arena v1.1.0 // indirect + go.etcd.io/bbolt v1.4.0 // indirect +) + +replace github.com/wundergraph/graphql-go-tools/v2 => ../v2 + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect github.com/agnivade/levenshtein v1.2.1 // indirect github.com/bufbuild/protocompile v0.14.1 // indirect github.com/buger/jsonparser v1.1.1 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v0.2.1 // indirect + github.com/cpuguy83/dockercfg v0.3.2 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/distribution/reference v0.6.0 // indirect github.com/dlclark/regexp2 v1.11.0 // indirect + github.com/docker/docker v28.5.1+incompatible // indirect + github.com/docker/go-connections v0.6.0 // indirect + github.com/docker/go-units v0.5.0 // indirect github.com/dop251/goja v0.0.0-20230906160731-9410bcaa81d2 // indirect + github.com/ebitengine/purego v0.8.4 // indirect github.com/fatih/color v1.18.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobwas/httphead v0.1.0 // indirect @@ -44,33 +94,57 @@ require ( github.com/hashicorp/yamux v0.1.1 // indirect github.com/jensneuse/byte-template v0.0.0-20231025215717-69252eb3ed56 // indirect github.com/kingledion/go-tools v0.6.0 // indirect + github.com/klauspost/compress v1.18.0 // indirect github.com/logrusorgru/aurora/v4 v4.0.0 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.10 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/go-archive v0.1.0 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/morikuni/aec v1.0.0 // indirect github.com/oklog/run v1.0.0 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect github.com/phf/go-queue v0.0.0-20170504031614-9abe38d0371d // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/r3labs/sse/v2 v2.8.1 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sergi/go-diff v1.3.1 // indirect + github.com/shirou/gopsutil/v4 v4.25.6 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sosodev/duration v1.3.1 // indirect github.com/tidwall/gjson v1.18.0 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/tidwall/sjson v1.2.5 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect github.com/urfave/cli/v2 v2.27.7 // indirect github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 // indirect + go.opentelemetry.io/otel v1.40.0 // indirect + go.opentelemetry.io/otel/metric v1.40.0 // indirect + go.opentelemetry.io/otel/trace v1.40.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect + golang.org/x/crypto v0.43.0 // indirect golang.org/x/mod v0.29.0 // indirect golang.org/x/net v0.46.0 // indirect golang.org/x/sync v0.17.0 // indirect - golang.org/x/sys v0.37.0 // indirect + golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.30.0 // indirect golang.org/x/tools v0.38.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rogchap.com/v8go v0.9.0 // indirect diff --git a/execution/go.sum b/execution/go.sum index 33e73afb60..26828c6c4f 100644 --- a/execution/go.sum +++ b/execution/go.sum @@ -1,16 +1,67 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= github.com/99designs/gqlgen v0.17.76 h1:YsJBcfACWmXWU2t1yCjoGdOmqcTfOFpjbLAE443fmYI= github.com/99designs/gqlgen v0.17.76/go.mod h1:miiU+PkAnTIDKMQ1BseUOIVeQHoiwYDZGCswoxl7xec= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/RoaringBitmap/roaring/v2 v2.4.5 h1:uGrrMreGjvAtTBobc0g5IrW1D5ldxDQYe2JW2gggRdg= +github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0= github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM= github.com/agnivade/levenshtein v1.2.1/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= +github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.22.0 h1:Tquv9S8+SGaS3EhyA+up3FXzmkhxPGjQQCkcs2uw7w4= +github.com/bits-and-blooms/bitset v1.22.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/blevesearch/bleve/v2 v2.5.7 h1:2d9YrL5zrX5EBBW++GOaEKjE+NPWeZGaX77IM26m1Z8= +github.com/blevesearch/bleve/v2 v2.5.7/go.mod h1:yj0NlS7ocGC4VOSAedqDDMktdh2935v2CSWOCDMHdSA= +github.com/blevesearch/bleve_index_api v1.2.11 h1:bXQ54kVuwP8hdrXUSOnvTQfgK0KI1+f9A0ITJT8tX1s= +github.com/blevesearch/bleve_index_api v1.2.11/go.mod h1:rKQDl4u51uwafZxFrPD1R7xFOwKnzZW7s/LSeK4lgo0= +github.com/blevesearch/geo v0.2.4 h1:ECIGQhw+QALCZaDcogRTNSJYQXRtC8/m8IKiA706cqk= +github.com/blevesearch/geo v0.2.4/go.mod h1:K56Q33AzXt2YExVHGObtmRSFYZKYGv0JEN5mdacJJR8= +github.com/blevesearch/go-faiss v1.0.26 h1:4dRLolFgjPyjkaXwff4NfbZFdE/dfywbzDqporeQvXI= +github.com/blevesearch/go-faiss v1.0.26/go.mod h1:OMGQwOaRRYxrmeNdMrXJPvVx8gBnvE5RYrr0BahNnkk= +github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo= +github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M= +github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y= +github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk= +github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc= +github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs= +github.com/blevesearch/scorch_segment_api/v2 v2.3.13 h1:ZPjv/4VwWvHJZKeMSgScCapOy8+DdmsmRyLmSB88UoY= +github.com/blevesearch/scorch_segment_api/v2 v2.3.13/go.mod h1:ENk2LClTehOuMS8XzN3UxBEErYmtwkE7MAArFTXs9Vc= +github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU= +github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw= +github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s= +github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs= +github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A= +github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ= +github.com/blevesearch/vellum v1.1.0 h1:CinkGyIsgVlYf8Y2LUQHvdelgXr6PYuvoDIajq6yR9w= +github.com/blevesearch/vellum v1.1.0/go.mod h1:QgwWryE8ThtNPxtgWJof5ndPfx0/YMBh+W2weHKPw8Y= +github.com/blevesearch/zapx/v11 v11.4.2 h1:l46SV+b0gFN+Rw3wUI1YdMWdSAVhskYuvxlcgpQFljs= +github.com/blevesearch/zapx/v11 v11.4.2/go.mod h1:4gdeyy9oGa/lLa6D34R9daXNUvfMPZqUYjPwiLmekwc= +github.com/blevesearch/zapx/v12 v12.4.2 h1:fzRbhllQmEMUuAQ7zBuMvKRlcPA5ESTgWlDEoB9uQNE= +github.com/blevesearch/zapx/v12 v12.4.2/go.mod h1:TdFmr7afSz1hFh/SIBCCZvcLfzYvievIH6aEISCte58= +github.com/blevesearch/zapx/v13 v13.4.2 h1:46PIZCO/ZuKZYgxI8Y7lOJqX3Irkc3N8W82QTK3MVks= +github.com/blevesearch/zapx/v13 v13.4.2/go.mod h1:knK8z2NdQHlb5ot/uj8wuvOq5PhDGjNYQQy0QDnopZk= +github.com/blevesearch/zapx/v14 v14.4.2 h1:2SGHakVKd+TrtEqpfeq8X+So5PShQ5nW6GNxT7fWYz0= +github.com/blevesearch/zapx/v14 v14.4.2/go.mod h1:rz0XNb/OZSMjNorufDGSpFpjoFKhXmppH9Hi7a877D8= +github.com/blevesearch/zapx/v15 v15.4.2 h1:sWxpDE0QQOTjyxYbAVjt3+0ieu8NCE0fDRaFxEsp31k= +github.com/blevesearch/zapx/v15 v15.4.2/go.mod h1:1pssev/59FsuWcgSnTa0OeEpOzmhtmr/0/11H0Z8+Nw= +github.com/blevesearch/zapx/v16 v16.2.8 h1:SlnzF0YGtSlrsOE3oE7EgEX6BIepGpeqxs1IjMbHLQI= +github.com/blevesearch/zapx/v16 v16.2.8/go.mod h1:murSoCJPCk25MqURrcJaBQ1RekuqSCSfMjXH4rHyA14= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY= @@ -18,26 +69,57 @@ github.com/chzyer/readline v1.5.0/go.mod h1:x22KAscuvRqlLoK9CsoYsmxoXZMMFVyOl86c github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/coder/websocket v1.8.12 h1:5bUXkEPPIbewrnkU8LTCLVaxi4N4J8ahufH2vlo4NAo= github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= +github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo= github.com/cpuguy83/go-md2man/v2 v2.0.7/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54 h1:SG7nF6SRlWhcT7cNTs5R6Hk4V2lcmLz2NsG2VnInyNo= github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM= +github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= github.com/dop251/goja v0.0.0-20230906160731-9410bcaa81d2 h1:3J+RqSTu+JuyCYjoe82vvUUljEfgp8i6+nyhUsaYAbg= github.com/dop251/goja v0.0.0-20230906160731-9410bcaa81d2/go.mod h1:QMWlm50DNe14hD7t24KEqZuUdC9sOTy8W6XbCU1mlw4= github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= +github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= +github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= @@ -52,8 +134,13 @@ github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/pprof v0.0.0-20230207041349-798e818bf904 h1:4/hN5RUoecvl+RmJRE2YxKWtnnQls6rQjjW5oV7qg2U= github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= @@ -61,6 +148,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 h1:TmHmbvxPmaegwhDubVz0lICL0J5Ka2vwTzhoePEXsGE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0/go.mod h1:qztMSjm835F2bXf+5HKAPIS5qsmQDqZna/PgVt4rWtI= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg= @@ -80,9 +169,13 @@ github.com/jensneuse/diffview v1.0.0 h1:4b6FQJ7y3295JUHU3tRko6euyEboL825ZsXeZZM4 github.com/jensneuse/diffview v1.0.0/go.mod h1:i6IacuD8LnEaPuiyzMHA+Wfz5mAuycMOf3R/orUY9y4= github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= +github.com/json-iterator/go v0.0.0-20171115153421-f7279a603ede h1:YrgBGwxMRK0Vq0WSCWFaZUnTsrA/PZE/xs1QZh+/edg= +github.com/json-iterator/go v0.0.0-20171115153421-f7279a603ede/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/kingledion/go-tools v0.6.0 h1:y8C/4mWoHgLkO45dB+Y/j0o4Y4WUB5lDTAcMPMtFpTg= github.com/kingledion/go-tools v0.6.0/go.mod h1:qcDJQxBui/H/hterGb90GMlLs9Yi7QrwaJL8OGdbsms= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= @@ -95,8 +188,14 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.11.2 h1:x6gxUeu39V0BHZiugWe8LXZYZ+Utk7hSJGThs8sdzfs= +github.com/lib/pq v1.11.2/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA= github.com/logrusorgru/aurora/v4 v4.0.0 h1:sRjfPpun/63iADiSvGGjgA1cAYegEWMPCJdUpJYn9JA= github.com/logrusorgru/aurora/v4 v4.0.0/go.mod h1:lP0iIa2nrnT/qoFXcOZSrZQpJ1o6n2CUf/hyHi2Q4ZQ= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= @@ -105,8 +204,32 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= +github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM= +github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw= github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/phf/go-queue v0.0.0-20170504031614-9abe38d0371d h1:U+PMnTlV2tu7RuMK5etusZG3Cf+rpow5hqQByeCzJ2g= github.com/phf/go-queue v0.0.0-20170504031614-9abe38d0371d/go.mod h1:lXfE4PvvTW5xOjO6Mba8zDPyw8M93B6AQ7frTGnMlA8= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -114,12 +237,14 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/r3labs/sse/v2 v2.8.1 h1:lZH+W4XOLIq88U5MIHOsLec7+R62uhz3bIi2yn0Sg8o= github.com/r3labs/sse/v2 v2.8.1/go.mod h1:Igau6Whc+F17QUgML1fYe1VPZzTV6EMCnYktEmkNJ7I= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sebdah/goldie/v2 v2.7.1 h1:PkBHymaYdtvEkZV7TmyqKxdmn5/Vcj+8TpATWZjnG5E= @@ -127,6 +252,8 @@ github.com/sebdah/goldie/v2 v2.7.1/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvK github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= +github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= @@ -134,6 +261,8 @@ github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -141,6 +270,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU= +github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= @@ -151,22 +282,50 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/urfave/cli/v2 v2.27.7 h1:bH59vdhbjLv3LAvIu6gd0usJHgoTTPhCFib8qqOwXYU= github.com/urfave/cli/v2 v2.27.7/go.mod h1:CyNAG/xg+iAOg0N4MPGZqVmv2rCoP267496AOXUZjA4= github.com/vektah/gqlparser/v2 v2.5.30 h1:EqLwGAFLIzt1wpx1IPpY67DwUujF1OfzgEyDsLrN6kE= github.com/vektah/gqlparser/v2 v2.5.30/go.mod h1:D1/VCZtV3LPnQrcPBeR/q5jkSQIPti0uYCP/RI0gIeo= -github.com/wundergraph/astjson v0.0.0-20250106123708-be463c97e083 h1:8/D7f8gKxTBjW+SZK4mhxTTBVpxcqeBgWF1Rfmltbfk= -github.com/wundergraph/astjson v0.0.0-20250106123708-be463c97e083/go.mod h1:eOTL6acwctsN4F3b7YE+eE2t8zcJ/doLm9sZzsxxxrE= +github.com/wundergraph/astjson v1.0.0 h1:rETLJuQkMWWW03HCF6WBttEBOu8gi5vznj5KEUPVV2Q= +github.com/wundergraph/astjson v1.0.0/go.mod h1:h12D/dxxnedtLzsKyBLK7/Oe4TAoGpRVC9nDpDrZSWw= github.com/wundergraph/cosmo/composition-go v0.0.0-20241020204711-78f240a77c99 h1:TGXDYfDhwFLFTuNuCwkuqXT5aXGz47zcurXLfTBS9w4= github.com/wundergraph/cosmo/composition-go v0.0.0-20241020204711-78f240a77c99/go.mod h1:fUuOAUAXUFB/mlSkAaImGeE4A841AKR5dTMWhV4ibxI= github.com/wundergraph/cosmo/router v0.0.0-20251013094319-c611abf26b17 h1:GjO2E8LTf3U5JiQJCY4MmlRcAjVt7IvAbWFSgEjQdl8= github.com/wundergraph/cosmo/router v0.0.0-20251013094319-c611abf26b17/go.mod h1:7kt64e0LOLMBqOzrfu9PuLRn9cVT9YN1Bb3EennVtws= -github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.231 h1:2C8LNFGs8MtI2yPy2/a2WRf9/X2FoMqXlEJkpTjvsTg= -github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.231/go.mod h1:ErOQH1ki2+SZB8JjpTyGVnoBpg5picIyjvuWQJP4abg= +github.com/wundergraph/go-arena v1.1.0 h1:9+wSRkJAkA2vbYHp6s8tEGhPViRGQNGXqPHT0QzhdIc= +github.com/wundergraph/go-arena v1.1.0/go.mod h1:ROOysEHWJjLQ8FSfNxZCziagb7Qw2nXY3/vgKRh7eWw= github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 h1:FnBeRrxr7OU4VvAzt5X7s6266i6cSVkkFPS0TuXWbIg= github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.etcd.io/bbolt v1.4.0 h1:TU77id3TnN/zKr7CO/uk+fBCwF2jGcMuw2B/FMAzYIk= +go.etcd.io/bbolt v1.4.0/go.mod h1:AsD+OCi/qPN1giOX1aiLAha3o1U8rAz65bvN4j0sRuk= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 h1:yd02MEjBdJkG3uabWP9apV+OuWRIXGDuJEUJbOHmCFU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:umTcuxiv1n/s/S6/c2AT/g2CQ7u5C59sHDNmfSwgz7Q= +go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms= +go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.40.0 h1:QKdN8ly8zEMrByybbQgv8cWBcdAarwmIPZ6FThrWXJs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.40.0/go.mod h1:bTdK1nhqF76qiPoCCdyFIV+N/sRHYXYCTQc+3VCi3MI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= +go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g= +go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc= +go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI= +go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= +go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw= +go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg= +go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw= +go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA= +go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A= +go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= @@ -183,6 +342,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= @@ -206,12 +367,15 @@ golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -220,10 +384,14 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -231,6 +399,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= +golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= @@ -243,15 +413,18 @@ golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gonum.org/v1/gonum v0.14.0 h1:2NiG67LD1tEH0D7kM+ps2V+fXmsAnpUeec7n8tcr4S0= -gonum.org/v1/gonum v0.14.0/go.mod h1:AoWeoz0becf9QMWtE8iWXNXc27fK4fNeHNf/oMejGfU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f h1:OxYkA3wjPsZyBylwymxSHa7ViiW1Sml4ToBrncvFehI= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f/go.mod h1:+2Yz8+CLJbIfL9z73EW45avw8Lmge3xVElCP9zEKi50= -google.golang.org/grpc v1.68.1 h1:oI5oTa11+ng8r8XMMN7jAOmWfPZWbYpCFaMUTACxkM0= -google.golang.org/grpc v1.68.1/go.mod h1:+q1XYFJjShcqn0QZHvCyeR4CXPA+llXIeUIfIe00waw= -google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= -google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/genproto/googleapis/api v0.0.0-20250707201910-8d1bb00bc6a7 h1:FiusG7LWj+4byqhbvmB+Q93B/mOxJLN2DTozDuZm4EU= +google.golang.org/genproto/googleapis/api v0.0.0-20250707201910-8d1bb00bc6a7/go.mod h1:kXqgZtrWaf6qS3jZOCnCH7WYfrvFjkC51bM8fz3RsCA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5/go.mod h1:M4/wBTSeyLxupu3W3tJtOgB14jILAS/XWPSSa3TAlJc= +google.golang.org/grpc v1.75.1 h1:/ODCNEuf9VghjgO3rqLcfg8fiOP0nSluljWFlDxELLI= +google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -264,8 +437,11 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= rogchap.com/v8go v0.9.0 h1:wYbUCO4h6fjTamziHrzyrPnpFNuzPpjZY+nfmZjNaew= rogchap.com/v8go v0.9.0/go.mod h1:MxgP3pL2MW4dpme/72QRs8sgNMmM0pRc8DPhcuLWPAs= diff --git a/execution/searchtesting/algolia_test.go b/execution/searchtesting/algolia_test.go new file mode 100644 index 0000000000..e115888cad --- /dev/null +++ b/execution/searchtesting/algolia_test.go @@ -0,0 +1,108 @@ +//go:build integration + +package searchtesting + +import ( + "context" + "encoding/json" + "fmt" + "os" + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/algolia" +) + +const algoliaConfigSDL = ` +extend schema @index(name: "products", backend: "algolia", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func TestAlgolia(t *testing.T) { + t.Parallel() + + appID := os.Getenv("ALGOLIA_APP_ID") + apiKey := os.Getenv("ALGOLIA_API_KEY") + if appID == "" || apiKey == "" { + t.Skip("ALGOLIA_APP_ID and ALGOLIA_API_KEY environment variables are required") + } + + makeSetup := func(name, configSDL string) BackendSetup { + return BackendSetup{ + Name: name, + ConfigSDL: configSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := &algolia.Factory{} + cfg := algolia.Config{ + AppID: appID, + APIKey: apiKey, + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + indexName := fmt.Sprintf("%s_%d", name, time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, schema, cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + Hooks: BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(2 * time.Second) + }, + }, + } + } + + t.Run("standard", func(t *testing.T) { + t.Parallel() + // ExpectedResponses must be populated after running with valid ALGOLIA_APP_ID and ALGOLIA_API_KEY. + RunAllScenarios(t, makeSetup("algolia", algoliaConfigSDL)) + }) + + t.Run("date", func(t *testing.T) { + t.Parallel() + setup := makeSetup("algolia_date", dateConfigSDL("algolia", "{}")) + setup.ExpectedResponses = map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + } + RunDateScenarios(t, setup) + }) + + t.Run("suggest", func(t *testing.T) { + t.Parallel() + RunSuggestScenarios(t, makeSetup("algolia_suggest", suggestConfigSDL("algolia", "{}"))) + }) + + t.Run("fuzzy", func(t *testing.T) { + t.Parallel() + RunFuzzyScenarios(t, makeSetup("algolia_fuzzy", algoliaConfigSDL)) + }) +} diff --git a/execution/searchtesting/bleve_test.go b/execution/searchtesting/bleve_test.go new file mode 100644 index 0000000000..6785ab04a5 --- /dev/null +++ b/execution/searchtesting/bleve_test.go @@ -0,0 +1,318 @@ +package searchtesting + +import ( + "context" + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/bleve" +) + +const bleveInlineConfigSDL = ` +extend schema @index(name: "products", backend: "bleve", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts", resultsMetaInformation: false) { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +const bleveConfigSDL = ` +extend schema @index(name: "products", backend: "bleve", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +const bleveCursorConfigSDL = ` +extend schema @index(name: "products", backend: "bleve", config: "{}", cursorBasedPagination: true) + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func TestBleve(t *testing.T) { + t.Parallel() + RunAllScenarios(t, BackendSetup{ + Name: "bleve", + ConfigSDL: bleveConfigSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + ExpectedResponses: map[string]string{ + "supergraph_sdl": expectedSupergraphSDL, + "basic_search_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_keyword_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","rating":4.7}},{"node":{"id":"1","name":"Running Shoes","rating":4.5}},{"node":{"id":"2","name":"Basketball Shoes","rating":4.2}}]}}}`, + "filter_boolean": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_numeric_range": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}}],"totalCount":2}}}`, + "filter_AND": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + "filter_OR": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_NOT": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "sort_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}]}}}`, + "pagination_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","reviews":[{"text":"Nice belt","stars":3}]}},{"node":{"id":"1","reviews":[{"text":"Great shoes","stars":5}]}}],"totalCount":4}}}`, + "score_and_totalCount": `{"data":{"searchProducts":{"hits":[{"score":0.7768564486857903,"node":{"id":"4","manufacturer":"Smartwool"}},{"score":0.7768564486857903,"node":{"id":"3","manufacturer":"Gucci"}},{"score":0.7768564486857903,"node":{"id":"1","manufacturer":"Nike"}},{"score":0.7768564486857903,"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + "facets_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"facets":[{"field":"category","values":[{"value":"Footwear","count":3},{"value":"Accessories","count":1}]}]}}}`, + }, + }) +} + +func TestBleveInline(t *testing.T) { + t.Parallel() + RunInlineScenarios(t, BackendSetup{ + Name: "bleve_inline", + ConfigSDL: bleveInlineConfigSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: false, // inline style has no facets + }, + ExpectedResponses: map[string]string{ + "supergraph_sdl": expectedInlineSupergraphSDL, + "basic_search_inline": `{"data":{"searchProducts":[{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"},{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"},{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"},{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}]}}`, + "filter_keyword_inline": `{"data":{"searchProducts":[{"id":"4","name":"Wool Socks"},{"id":"1","name":"Running Shoes"},{"id":"2","name":"Basketball Shoes"}]}}`, + "filter_boolean_inline": `{"data":{"searchProducts":[{"id":"3","manufacturer":"Gucci"}]}}`, + "filter_numeric_range_inline": `{"data":{"searchProducts":[{"id":"3","manufacturer":"Gucci"},{"id":"1","manufacturer":"Nike"}]}}`, + "filter_AND_inline": `{"data":{"searchProducts":[{"id":"4","manufacturer":"Smartwool"},{"id":"1","manufacturer":"Nike"},{"id":"2","manufacturer":"Adidas"}]}}`, + "filter_OR_inline": `{"data":{"searchProducts":[{"id":"3","manufacturer":"Gucci"},{"id":"2","manufacturer":"Adidas"}]}}`, + "filter_NOT_inline": `{"data":{"searchProducts":[{"id":"3","manufacturer":"Gucci"}]}}`, + "sort_inline": `{"data":{"searchProducts":[{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"},{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"},{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"},{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}]}}`, + "pagination_inline": `{"data":{"searchProducts":[{"id":"3","name":"Leather Belt"},{"id":"1","name":"Running Shoes"}]}}`, + }, + }) +} + +func TestBleveHybrid(t *testing.T) { + t.Parallel() + // Bleve doesn't support vectors, but the hybrid pipeline sets both TextQuery + // and Vector on SearchRequest. Bleve silently ignores the vector and performs + // text-only search. This test validates the pipeline doesn't break. + RunHybridScenarios(t, VectorBackendSetup{ + BackendSetup: BackendSetup{ + Name: "bleve_hybrid", + ConfigSDL: vectorConfigSDL("bleve", "{}"), + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + }, + Embedder: &MockEmbedder{}, + }) +} + +func TestBleveHighlights(t *testing.T) { + t.Parallel() + RunHighlightScenarios(t, BackendSetup{ + Name: "bleve_highlights", + ConfigSDL: bleveConfigSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + }) +} + +func TestBleveAdditionalFilters(t *testing.T) { + t.Parallel() + RunAdditionalFilterScenarios(t, BackendSetup{ + Name: "bleve_additional_filters", + ConfigSDL: bleveConfigSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + ExpectedResponses: map[string]string{ + "filter_string_ne": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_string_in": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + "filter_string_startsWith": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + }, + }) +} + +func TestBleveBoosting(t *testing.T) { + t.Parallel() + RunBoostingScenarios(t, BackendSetup{ + Name: "bleve_boosting", + ConfigSDL: boostConfigSDL("bleve", "{}"), + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + }) +} + +func TestBleveFuzzy(t *testing.T) { + t.Parallel() + RunFuzzyScenarios(t, BackendSetup{ + Name: "bleve_fuzzy", + ConfigSDL: bleveConfigSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + }) +} + +func TestBleveSuggest(t *testing.T) { + t.Parallel() + RunSuggestScenarios(t, BackendSetup{ + Name: "bleve_suggest", + ConfigSDL: suggestConfigSDL("bleve", "{}"), + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + }) +} + +func TestBleveDate(t *testing.T) { + t.Parallel() + RunDateScenarios(t, BackendSetup{ + Name: "bleve_date", + ConfigSDL: dateConfigSDL("bleve", "{}"), + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + ExpectedResponses: map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + }, + }) +} + +func TestBleveCursor(t *testing.T) { + t.Parallel() + RunCursorScenarios(t, BackendSetup{ + Name: "bleve_cursor", + ConfigSDL: bleveCursorConfigSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + }) +} diff --git a/execution/searchtesting/elasticsearch_test.go b/execution/searchtesting/elasticsearch_test.go new file mode 100644 index 0000000000..4165d9314a --- /dev/null +++ b/execution/searchtesting/elasticsearch_test.go @@ -0,0 +1,192 @@ +//go:build integration + +package searchtesting + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/elasticsearch" +) + +const elasticsearchConfigSDL = ` +extend schema @index(name: "products", backend: "elasticsearch", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func startElasticsearch(t *testing.T) string { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "docker.elastic.co/elasticsearch/elasticsearch:8.13.4", + ExposedPorts: []string{"9200/tcp"}, + Env: map[string]string{ + "discovery.type": "single-node", + "xpack.security.enabled": "false", + "ES_JAVA_OPTS": "-Xms512m -Xmx512m", + }, + WaitingFor: wait.ForHTTP("/"). + WithPort("9200/tcp"). + WithStartupTimeout(120 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start elasticsearch container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "9200/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return fmt.Sprintf("http://%s:%s", host, port.Port()) +} + +func TestElasticsearch(t *testing.T) { + t.Parallel() + baseURL := startElasticsearch(t) + + makeSetup := func(name, configSDL string) BackendSetup { + return BackendSetup{ + Name: name, + ConfigSDL: configSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := elasticsearch.NewFactory() + cfg := elasticsearch.Config{ + Addresses: []string{baseURL}, + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + idx, err := factory.CreateIndex(context.Background(), name, schema, cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + Hooks: BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(2 * time.Second) + }, + }, + } + } + + t.Run("standard", func(t *testing.T) { + t.Parallel() + setup := makeSetup("elasticsearch", elasticsearchConfigSDL) + setup.ExpectedResponses = map[string]string{ + "supergraph_sdl": expectedSupergraphSDL, + "basic_search_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_keyword_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","rating":4.7}},{"node":{"id":"1","name":"Running Shoes","rating":4.5}},{"node":{"id":"2","name":"Basketball Shoes","rating":4.2}}]}}}`, + "filter_boolean": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_numeric_range": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}}],"totalCount":2}}}`, + "filter_AND": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + "filter_OR": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_NOT": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "sort_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}]}}}`, + "pagination_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","reviews":[{"text":"Nice belt","stars":3}]}},{"node":{"id":"1","reviews":[{"text":"Great shoes","stars":5}]}}],"totalCount":4}}}`, + "score_and_totalCount": `{"data":{"searchProducts":{"hits":[{"score":0,"node":{"id":"4","manufacturer":"Smartwool"}},{"score":0,"node":{"id":"3","manufacturer":"Gucci"}},{"score":0,"node":{"id":"1","manufacturer":"Nike"}},{"score":0,"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + "facets_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"facets":[{"field":"category","values":[{"value":"Footwear","count":3},{"value":"Accessories","count":1}]}]}}}`, + } + RunAllScenarios(t, setup) + }) + + t.Run("vector", func(t *testing.T) { + t.Parallel() + RunVectorScenarios(t, VectorBackendSetup{ + BackendSetup: makeSetup("elasticsearch_vector", vectorConfigSDL("elasticsearch", "{}")), + Embedder: &MockEmbedder{}, + }) + }) + + t.Run("hybrid", func(t *testing.T) { + t.Parallel() + RunHybridScenarios(t, VectorBackendSetup{ + BackendSetup: makeSetup("elasticsearch_hybrid", vectorConfigSDL("elasticsearch", "{}")), + Embedder: &MockEmbedder{}, + }) + }) + + t.Run("geo", func(t *testing.T) { + t.Parallel() + RunGeoScenarios(t, GeoBackendSetup{ + BackendSetup: makeSetup("elasticsearch_geo", geoConfigSDL("elasticsearch", "{}")), + }) + }) + + t.Run("highlights", func(t *testing.T) { + t.Parallel() + RunHighlightScenarios(t, makeSetup("elasticsearch_highlights", elasticsearchConfigSDL)) + }) + + t.Run("boosting", func(t *testing.T) { + t.Parallel() + RunBoostingScenarios(t, makeSetup("elasticsearch_boosting", boostConfigSDL("elasticsearch", "{}"))) + }) + + t.Run("fuzzy", func(t *testing.T) { + t.Parallel() + RunFuzzyScenarios(t, makeSetup("elasticsearch_fuzzy", elasticsearchConfigSDL)) + }) + + t.Run("suggest", func(t *testing.T) { + t.Parallel() + RunSuggestScenarios(t, makeSetup("elasticsearch_suggest", suggestConfigSDL("elasticsearch", "{}"))) + }) + + t.Run("date", func(t *testing.T) { + t.Parallel() + setup := makeSetup("elasticsearch_date", dateConfigSDL("elasticsearch", "{}")) + setup.ExpectedResponses = map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + } + RunDateScenarios(t, setup) + }) +} diff --git a/execution/searchtesting/framework.go b/execution/searchtesting/framework.go new file mode 100644 index 0000000000..a0e4d2ff4e --- /dev/null +++ b/execution/searchtesting/framework.go @@ -0,0 +1,2546 @@ +package searchtesting + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "runtime" + "strings" + "testing" + + "google.golang.org/protobuf/encoding/protojson" + + "github.com/wundergraph/cosmo/composition-go" + nodev1 "github.com/wundergraph/cosmo/router/gen/proto/wg/cosmo/node/v1" + + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/productdetails" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astparser" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/graphql_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/search_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/plan" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/resolve" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// expectedSupergraphSDL is the expected composed supergraph schema for the standard Product test setup. +// All backends use the same config SDL structure, so the composed supergraph is identical. +var expectedSupergraphSDL = `directive @tag(name: String!) repeatable on ARGUMENT_DEFINITION | ENUM | ENUM_VALUE | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +input StringFilter { + eq: String + ne: String + in: [String!] + contains: String + startsWith: String +} + +input FloatFilter { + eq: Float + gt: Float + gte: Float + lt: Float + lte: Float +} + +input IntFilter { + eq: Int + gt: Int + gte: Int + lt: Int + lte: Int +} + +enum SortDirection { + ASC + DESC +} + +enum Fuzziness { + EXACT + LOW + HIGH +} + +type SearchFacet { + field: String! + values: [SearchFacetValue!]! +} + +type SearchFacetValue { + value: String! + count: Int! +} + +type SearchHighlight { + field: String! + fragments: [String!]! +} + +input ProductFilter { + name: StringFilter + category: StringFilter + price: FloatFilter + inStock: Boolean + AND: [ProductFilter!] + OR: [ProductFilter!] + NOT: ProductFilter +} + +enum ProductSortField { + RELEVANCE + NAME + CATEGORY + PRICE +} + +input ProductSort { + field: ProductSortField! + direction: SortDirection! +} + +type SearchProductResult { + hits: [SearchProductHit!]! + totalCount: Int! + facets: [SearchFacet!] +} + +type SearchProductHit { + score: Float! + highlights: [SearchHighlight!] + node: Product! +} + +type Query { + searchProducts(query: String!, fuzziness: Fuzziness, filter: ProductFilter, sort: [ProductSort!], limit: Int, offset: Int, facets: [String!]): SearchProductResult! +} + +type Product { + id: ID! + name: String + description: String + category: String + price: Float + inStock: Boolean + reviews: [Review!]! + rating: Float + manufacturer: String +} + +type Review { + text: String! + stars: Int! +}` + +// expectedInlineSupergraphSDL is the expected composed supergraph schema for inline style (no wrapper types). +var expectedInlineSupergraphSDL = `directive @tag(name: String!) repeatable on ARGUMENT_DEFINITION | ENUM | ENUM_VALUE | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +input StringFilter { + eq: String + ne: String + in: [String!] + contains: String + startsWith: String +} + +input FloatFilter { + eq: Float + gt: Float + gte: Float + lt: Float + lte: Float +} + +input IntFilter { + eq: Int + gt: Int + gte: Int + lt: Int + lte: Int +} + +enum SortDirection { + ASC + DESC +} + +enum Fuzziness { + EXACT + LOW + HIGH +} + +input ProductFilter { + name: StringFilter + category: StringFilter + price: FloatFilter + inStock: Boolean + AND: [ProductFilter!] + OR: [ProductFilter!] + NOT: ProductFilter +} + +enum ProductSortField { + RELEVANCE + NAME + CATEGORY + PRICE +} + +input ProductSort { + field: ProductSortField! + direction: SortDirection! +} + +type Query { + searchProducts(query: String!, fuzziness: Fuzziness, filter: ProductFilter, sort: [ProductSort!], limit: Int, offset: Int): [Product!]! +} + +type Product { + id: ID! + name: String + description: String + category: String + price: Float + inStock: Boolean + reviews: [Review!]! + rating: Float + manufacturer: String +} + +type Review { + text: String! + stars: Int! +}` + +// BackendCaps describes what capabilities a backend supports. +type BackendCaps struct { + HasTextSearch bool + HasFacets bool +} + +// BackendHooks provides hooks for backend-specific behavior. +type BackendHooks struct { + WaitForIndex func(t *testing.T) +} + +// BackendSetup holds the configuration for a single backend's e2e test. +type BackendSetup struct { + // Name is the backend identifier (e.g. "bleve", "elasticsearch"). + Name string + // ConfigSDL is the complete configuration schema SDL with @index, @searchable, @indexed directives. + ConfigSDL string + // CreateIndex creates a search index for the backend. + CreateIndex func(t *testing.T, name string, schema searchindex.IndexConfig, configJSON []byte) searchindex.Index + // Caps describes the backend's capabilities. + Caps BackendCaps + // Hooks provides backend-specific behavior. + Hooks BackendHooks + // ExpectedResponses maps test scenario names to their expected JSON response strings. + ExpectedResponses map[string]string +} + +// entitySubgraphSDL returns the SDL of the entity subgraph by reading the schema file. +func entitySubgraphSDL(t *testing.T) string { + t.Helper() + _, thisFile, _, _ := runtime.Caller(0) + schemaPath := filepath.Join(filepath.Dir(thisFile), "productdetails", "graph", "schema.graphqls") + data, err := os.ReadFile(schemaPath) + if err != nil { + t.Fatalf("read entity subgraph schema: %v", err) + } + return string(data) +} + +// buildIndexSchema converts parsed entity fields into a searchindex.IndexConfig. +func buildIndexSchema(indexName string, entity *search_datasource.SearchableEntity) searchindex.IndexConfig { + schema := searchindex.IndexConfig{Name: indexName} + for _, f := range entity.Fields { + schema.Fields = append(schema.Fields, searchindex.FieldConfig{ + Name: f.FieldName, + Type: f.IndexType, + Filterable: f.Filterable, + Sortable: f.Sortable, + Dimensions: f.Dimensions, + Weight: f.Weight, + Autocomplete: f.Autocomplete, + }) + } + for _, ef := range entity.EmbeddingFields { + schema.Fields = append(schema.Fields, searchindex.FieldConfig{ + Name: ef.FieldName, + Type: searchindex.FieldTypeVector, + }) + } + return schema +} + +// entityToConfiguration converts a parsed SearchableEntity to a search_datasource.Configuration. +func entityToConfiguration(entity *search_datasource.SearchableEntity) search_datasource.Configuration { + cfg := search_datasource.Configuration{ + IndexName: entity.IndexName, + SearchField: entity.SearchField, + EntityTypeName: entity.TypeName, + KeyFields: entity.KeyFields, + HasTextSearch: entity.HasTextSearch(), + HasVectorSearch: entity.HasVectorSearch(), + ResultsMetaInformation: entity.ResultsMetaInformation, + CursorBasedPagination: entity.CursorBasedPagination, + CursorBidirectional: entity.CursorBidirectional, + } + for _, f := range entity.Fields { + cfg.Fields = append(cfg.Fields, search_datasource.IndexedFieldConfig{ + FieldName: f.FieldName, + GraphQLType: f.GraphQLType, + IndexType: f.IndexType, + Filterable: f.Filterable, + Sortable: f.Sortable, + Dimensions: f.Dimensions, + Weight: f.Weight, + Autocomplete: f.Autocomplete, + }) + } + for _, ef := range entity.EmbeddingFields { + cfg.EmbeddingFields = append(cfg.EmbeddingFields, search_datasource.EmbeddingFieldConfig{ + FieldName: ef.FieldName, + SourceFields: ef.SourceFields, + Template: ef.Template, + Model: ef.Model, + }) + } + return cfg +} + +// composeSubgraphs composes the search and entity subgraph SDLs using cosmo composition-go. +func composeSubgraphs(t *testing.T, searchSDL, entitySDL, entityURL string) *nodev1.RouterConfig { + t.Helper() + + subgraphs := []*composition.Subgraph{ + { + Name: "search", + URL: "http://search.local", + Schema: searchSDL, + SubscriptionProtocol: "ws", + }, + { + Name: "productdetails", + URL: entityURL, + Schema: entitySDL, + SubscriptionProtocol: "ws", + }, + } + + resultJSON, err := composition.BuildRouterConfiguration(subgraphs...) + if err != nil { + t.Fatalf("composition failed: %v", err) + } + + var routerConfig nodev1.RouterConfig + if err := protojson.Unmarshal([]byte(resultJSON), &routerConfig); err != nil { + t.Fatalf("unmarshal router config: %v", err) + } + + return &routerConfig +} + +// loadInternedString resolves an interned string from the engine config. +func loadInternedString(engineConfig *nodev1.EngineConfiguration, str *nodev1.InternedString) (string, error) { + key := str.GetKey() + s, ok := engineConfig.StringStorage[key] + if !ok { + return "", fmt.Errorf("no string found for key %q", key) + } + return s, nil +} + +// noopSubscriptionClient satisfies graphql_datasource.GraphQLSubscriptionClient for tests. +type noopSubscriptionClient struct{} + +func (n *noopSubscriptionClient) Subscribe(_ *resolve.Context, _ graphql_datasource.GraphQLSubscriptionOptions, _ resolve.SubscriptionUpdater) error { + return nil +} +func (n *noopSubscriptionClient) SubscribeAsync(_ *resolve.Context, _ uint64, _ graphql_datasource.GraphQLSubscriptionOptions, _ resolve.SubscriptionUpdater) error { + return nil +} +func (n *noopSubscriptionClient) Unsubscribe(_ uint64) {} + +// buildPlanConfiguration builds a plan.Configuration from the composition output, +// replacing the search subgraph's datasource with a search_datasource. +func buildPlanConfiguration( + t *testing.T, + routerConfig *nodev1.RouterConfig, + idx searchindex.Index, + searchConfig search_datasource.Configuration, + entityServerURL string, + embedderRegistry *searchindex.EmbedderRegistry, +) plan.Configuration { + t.Helper() + + engineConfig := routerConfig.EngineConfig + var planConfig plan.Configuration + planConfig.DefaultFlushIntervalMillis = engineConfig.DefaultFlushInterval + + // Extract field configurations from composition output. + for _, fc := range engineConfig.FieldConfigurations { + var args []plan.ArgumentConfiguration + for _, ac := range fc.ArgumentsConfiguration { + arg := plan.ArgumentConfiguration{ + Name: ac.Name, + RenderConfig: plan.RenderArgumentAsJSONValue, + } + switch ac.SourceType { + case nodev1.ArgumentSource_FIELD_ARGUMENT: + arg.SourceType = plan.FieldArgumentSource + case nodev1.ArgumentSource_OBJECT_FIELD: + arg.SourceType = plan.ObjectFieldSource + } + args = append(args, arg) + } + planConfig.Fields = append(planConfig.Fields, plan.FieldConfiguration{ + TypeName: fc.TypeName, + FieldName: fc.FieldName, + Arguments: args, + }) + } + + // Extract type configurations. + for _, tc := range engineConfig.TypeConfigurations { + planConfig.Types = append(planConfig.Types, plan.TypeConfiguration{ + TypeName: tc.TypeName, + RenameTo: tc.RenameTo, + }) + } + + // Build datasources from composition output. + for _, ds := range engineConfig.DatasourceConfigurations { + metadata := extractDataSourceMetadata(ds) + + fetchURL := "" + if ds.CustomGraphql != nil && ds.CustomGraphql.Fetch != nil { + fetchURL = ds.CustomGraphql.Fetch.GetUrl().GetStaticVariableContent() + } + + if fetchURL == "http://search.local" { + // Search datasource — use search_datasource.Factory. + searchFactory := search_datasource.NewFactory(context.Background(), nil, embedderRegistry) + searchFactory.RegisterIndex(searchConfig.IndexName, idx) + + searchDS, err := plan.NewDataSourceConfiguration[search_datasource.Configuration]( + ds.Id, + searchFactory, + metadata, + searchConfig, + ) + if err != nil { + t.Fatalf("NewDataSourceConfiguration (search): %v", err) + } + planConfig.DataSources = append(planConfig.DataSources, searchDS) + } else { + // Entity datasource — use graphql_datasource.Factory. + graphqlSchema, err := loadInternedString(engineConfig, ds.CustomGraphql.GetUpstreamSchema()) + if err != nil { + t.Fatalf("load upstream schema: %v", err) + } + + schemaConfig, err := graphql_datasource.NewSchemaConfiguration( + graphqlSchema, + &graphql_datasource.FederationConfiguration{ + Enabled: ds.CustomGraphql.Federation.Enabled, + ServiceSDL: ds.CustomGraphql.Federation.ServiceSdl, + }, + ) + if err != nil { + t.Fatalf("NewSchemaConfiguration (entity): %v", err) + } + + entityConfig, err := graphql_datasource.NewConfiguration(graphql_datasource.ConfigurationInput{ + Fetch: &graphql_datasource.FetchConfiguration{ + URL: entityServerURL, + }, + SchemaConfiguration: schemaConfig, + }) + if err != nil { + t.Fatalf("NewConfiguration (entity): %v", err) + } + + entityFactory, err := graphql_datasource.NewFactory(context.Background(), http.DefaultClient, &noopSubscriptionClient{}) + if err != nil { + t.Fatalf("NewFactory (entity): %v", err) + } + + entityDS, err := plan.NewDataSourceConfiguration[graphql_datasource.Configuration]( + ds.Id, + entityFactory, + metadata, + entityConfig, + ) + if err != nil { + t.Fatalf("NewDataSourceConfiguration (entity): %v", err) + } + planConfig.DataSources = append(planConfig.DataSources, entityDS) + } + } + + planConfig.DisableResolveFieldPositions = true + + return planConfig +} + +// extractDataSourceMetadata extracts plan.DataSourceMetadata from a composition datasource config. +func extractDataSourceMetadata(ds *nodev1.DataSourceConfiguration) *plan.DataSourceMetadata { + meta := &plan.DataSourceMetadata{ + RootNodes: make([]plan.TypeField, 0, len(ds.RootNodes)), + ChildNodes: make([]plan.TypeField, 0, len(ds.ChildNodes)), + FederationMetaData: plan.FederationMetaData{ + Keys: make([]plan.FederationFieldConfiguration, 0, len(ds.Keys)), + Requires: make([]plan.FederationFieldConfiguration, 0, len(ds.Requires)), + Provides: make([]plan.FederationFieldConfiguration, 0, len(ds.Provides)), + }, + } + + for _, node := range ds.RootNodes { + meta.RootNodes = append(meta.RootNodes, plan.TypeField{ + TypeName: node.TypeName, + FieldNames: node.FieldNames, + }) + } + for _, node := range ds.ChildNodes { + meta.ChildNodes = append(meta.ChildNodes, plan.TypeField{ + TypeName: node.TypeName, + FieldNames: node.FieldNames, + }) + } + for _, key := range ds.Keys { + meta.FederationMetaData.Keys = append(meta.FederationMetaData.Keys, plan.FederationFieldConfiguration{ + TypeName: key.TypeName, + FieldName: key.FieldName, + SelectionSet: key.SelectionSet, + }) + } + for _, req := range ds.Requires { + meta.FederationMetaData.Requires = append(meta.FederationMetaData.Requires, plan.FederationFieldConfiguration{ + TypeName: req.TypeName, + FieldName: req.FieldName, + SelectionSet: req.SelectionSet, + }) + } + for _, prov := range ds.Provides { + meta.FederationMetaData.Provides = append(meta.FederationMetaData.Provides, plan.FederationFieldConfiguration{ + TypeName: prov.TypeName, + FieldName: prov.FieldName, + SelectionSet: prov.SelectionSet, + }) + } + for _, ei := range ds.EntityInterfaces { + meta.FederationMetaData.EntityInterfaces = append(meta.FederationMetaData.EntityInterfaces, plan.EntityInterfaceConfiguration{ + InterfaceTypeName: ei.InterfaceTypeName, + ConcreteTypeNames: ei.ConcreteTypeNames, + }) + } + for _, io := range ds.InterfaceObjects { + meta.FederationMetaData.InterfaceObjects = append(meta.FederationMetaData.InterfaceObjects, plan.EntityInterfaceConfiguration{ + InterfaceTypeName: io.InterfaceTypeName, + ConcreteTypeNames: io.ConcreteTypeNames, + }) + } + + if len(ds.Directives) > 0 { + d := make(plan.DirectiveConfigurations, 0, len(ds.Directives)) + for _, dir := range ds.Directives { + d = append(d, plan.DirectiveConfiguration{ + DirectiveName: dir.DirectiveName, + RenameTo: dir.DirectiveName, + }) + } + meta.Directives = &d + } + + return meta +} + +// testEnv holds the shared test environment built by setupTestEnv. +type testEnv struct { + Pipeline *testPipeline + SupergraphDef string + DefaultSort string +} + +// setupTestEnv orchestrates steps 1-7 of the e2e test pipeline and returns a reusable testEnv. +func setupTestEnv(t *testing.T, setup BackendSetup) testEnv { + t.Helper() + + // 1. Parse the config schema SDL. + doc, parseReport := astparser.ParseGraphqlDocumentString(setup.ConfigSDL) + if parseReport.HasErrors() { + t.Fatalf("parse config schema: %s", parseReport.Error()) + } + parsedConfig, err := search_datasource.ParseConfigSchema(&doc) + if err != nil { + t.Fatalf("ParseConfigSchema: %v", err) + } + if len(parsedConfig.Entities) == 0 { + t.Fatal("no entities found in config schema") + } + + // 2. Generate the search subgraph SDL. + searchSDL, err := search_datasource.GenerateSubgraphSDL(parsedConfig) + if err != nil { + t.Fatalf("GenerateSubgraphSDL: %v", err) + } + + entity := &parsedConfig.Entities[0] + indexDirective := parsedConfig.Indices[0] + + // 3. Build index schema and create the search index. + indexSchema := buildIndexSchema(indexDirective.Name, entity) + idx := setup.CreateIndex(t, fmt.Sprintf("test_%s", setup.Name), indexSchema, []byte(indexDirective.ConfigJSON)) + + // 4. Populate with test data. + if err := idx.IndexDocuments(context.Background(), testProducts()); err != nil { + t.Fatalf("populate test data: %v", err) + } + if setup.Hooks.WaitForIndex != nil { + setup.Hooks.WaitForIndex(t) + } + + // 5. Start the entity subgraph server. + entityServer := httptest.NewServer(productdetails.Handler()) + t.Cleanup(entityServer.Close) + + // 6. Compose the subgraphs. + entitySDL := entitySubgraphSDL(t) + routerConfig := composeSubgraphs(t, searchSDL, entitySDL, entityServer.URL) + + // 7. Build the plan configuration. + searchConfig := entityToConfiguration(entity) + supergraphDef := routerConfig.EngineConfig.GraphqlSchema + planConfig := buildPlanConfiguration(t, routerConfig, idx, searchConfig, entityServer.URL, nil) + + return testEnv{ + Pipeline: &testPipeline{ + PlanConfig: planConfig, + SupergraphDef: supergraphDef, + }, + SupergraphDef: supergraphDef, + DefaultSort: `[{"field": "PRICE", "direction": "ASC"}]`, + } +} + +// RunAllScenarios orchestrates the full e2e test pipeline for a given backend. +func RunAllScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + // Run test scenarios. + t.Run("supergraph_sdl", func(t *testing.T) { + t.Parallel() + assertResponse(t, "supergraph_sdl", setup.ExpectedResponses, env.SupergraphDef) + }) + + t.Run("basic_search_with_entity_join", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { searchProducts(query: "*", sort: $s) { hits { node { id name price manufacturer } } totalCount } }` + if setup.Caps.HasTextSearch { + query = `query($s: [ProductSort!]) { searchProducts(query: "shoes", sort: $s) { hits { node { id name price manufacturer } } totalCount } }` + } + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "basic_search_with_entity_join", setup.ExpectedResponses, raw) + }) + + t.Run("filter_keyword_with_entity_join", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { hits { node { id name rating } } } }` + vars := `{"f": {"category": {"eq": "Footwear"}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_keyword_with_entity_join", setup.ExpectedResponses, raw) + }) + + t.Run("filter_boolean", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { hits { node { id manufacturer } } totalCount } }` + vars := `{"f": {"inStock": false}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_boolean", setup.ExpectedResponses, raw) + }) + + t.Run("filter_numeric_range", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { hits { node { id manufacturer } } totalCount } }` + vars := `{"f": {"price": {"gte": 30, "lte": 100}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_numeric_range", setup.ExpectedResponses, raw) + }) + + t.Run("filter_AND", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { hits { node { id manufacturer } } totalCount } }` + vars := `{"f": {"AND": [{"category": {"eq": "Footwear"}}, {"inStock": true}]}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_AND", setup.ExpectedResponses, raw) + }) + + t.Run("filter_OR", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { hits { node { id manufacturer } } totalCount } }` + vars := `{"f": {"OR": [{"category": {"eq": "Accessories"}}, {"price": {"gte": 100}}]}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_OR", setup.ExpectedResponses, raw) + }) + + t.Run("filter_NOT", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { hits { node { id manufacturer } } totalCount } }` + vars := `{"f": {"NOT": {"category": {"eq": "Footwear"}}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_NOT", setup.ExpectedResponses, raw) + }) + + t.Run("sort_with_entity_join", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { searchProducts(query: "*", sort: $s) { hits { node { id name price manufacturer } } } }` + vars := `{"s": [{"field": "PRICE", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "sort_with_entity_join", setup.ExpectedResponses, raw) + }) + + t.Run("pagination_with_entity_join", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!], $lim: Int, $off: Int) { + searchProducts(query: "*", sort: $s, limit: $lim, offset: $off) { + hits { node { id reviews { text stars } } } + totalCount + } + }` + vars := `{"s": [{"field": "PRICE", "direction": "ASC"}], "lim": 2, "off": 1}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "pagination_with_entity_join", setup.ExpectedResponses, raw) + }) + + t.Run("score_and_totalCount", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { searchProducts(query: "*", sort: $s) { hits { score node { id manufacturer } } totalCount } }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "score_and_totalCount", setup.ExpectedResponses, raw) + }) + + if setup.Caps.HasFacets { + t.Run("facets_with_entity_join", func(t *testing.T) { + t.Parallel() + query := `query($fac: [String!], $s: [ProductSort!]) { searchProducts(query: "*", facets: $fac, sort: $s) { hits { node { id manufacturer } } facets { field values { value count } } } }` + vars := `{"fac": ["category"], "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "facets_with_entity_join", setup.ExpectedResponses, raw) + }) + } +} + +// RunInlineScenarios runs e2e scenarios for inline style (no wrapper types). +func RunInlineScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + t.Run("supergraph_sdl", func(t *testing.T) { + t.Parallel() + assertResponse(t, "supergraph_sdl", setup.ExpectedResponses, env.SupergraphDef) + }) + + t.Run("basic_search_inline", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { searchProducts(query: "*", sort: $s) { id name price manufacturer } }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "basic_search_inline", setup.ExpectedResponses, raw) + }) + + t.Run("filter_keyword_inline", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { id name } }` + vars := `{"f": {"category": {"eq": "Footwear"}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_keyword_inline", setup.ExpectedResponses, raw) + }) + + t.Run("filter_boolean_inline", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { id manufacturer } }` + vars := `{"f": {"inStock": false}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_boolean_inline", setup.ExpectedResponses, raw) + }) + + t.Run("filter_numeric_range_inline", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { id manufacturer } }` + vars := `{"f": {"price": {"gte": 30, "lte": 100}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_numeric_range_inline", setup.ExpectedResponses, raw) + }) + + t.Run("filter_AND_inline", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { id manufacturer } }` + vars := `{"f": {"AND": [{"category": {"eq": "Footwear"}}, {"inStock": true}]}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_AND_inline", setup.ExpectedResponses, raw) + }) + + t.Run("filter_OR_inline", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { id manufacturer } }` + vars := `{"f": {"OR": [{"category": {"eq": "Accessories"}}, {"price": {"gte": 100}}]}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_OR_inline", setup.ExpectedResponses, raw) + }) + + t.Run("filter_NOT_inline", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { searchProducts(query: "*", filter: $f, sort: $s) { id manufacturer } }` + vars := `{"f": {"NOT": {"category": {"eq": "Footwear"}}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "filter_NOT_inline", setup.ExpectedResponses, raw) + }) + + t.Run("sort_inline", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { searchProducts(query: "*", sort: $s) { id name price manufacturer } }` + vars := `{"s": [{"field": "PRICE", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "sort_inline", setup.ExpectedResponses, raw) + }) + + t.Run("pagination_inline", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!], $lim: Int, $off: Int) { searchProducts(query: "*", sort: $s, limit: $lim, offset: $off) { id name } }` + vars := `{"s": [{"field": "PRICE", "direction": "ASC"}], "lim": 2, "off": 1}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "pagination_inline", setup.ExpectedResponses, raw) + }) +} + +// RunCursorScenarios runs cursor-based pagination e2e scenarios for a given backend. +// Validates structure and content dynamically because cursor values depend on backend internals. +func RunCursorScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupTestEnv(t, setup) + pipeline := env.Pipeline + + t.Run("cursor_forward_page1", func(t *testing.T) { + t.Parallel() + gqlQuery := `query($s: [ProductSort!], $first: Int) { + searchProducts(query: "*", sort: $s, first: $first) { + edges { cursor node { id name price manufacturer } } + pageInfo { hasNextPage hasPreviousPage startCursor endCursor } + totalCount + } + }` + vars := `{"s": [{"field": "PRICE", "direction": "ASC"}], "first": 2}` + raw := executeQuery(t, pipeline, gqlQuery, vars) + assertCursorResponse(t, raw, cursorExpectation{ + edgeCount: 2, + firstNodeID: "4", // Wool Socks + lastNodeID: "3", // Leather Belt + hasNextPage: true, + hasPreviousPage: false, + totalCount: 4, + checkCursors: true, + }) + }) + + t.Run("cursor_forward_page2", func(t *testing.T) { + t.Parallel() + // Get page 1 to obtain cursor. + gqlQuery1 := `query($s: [ProductSort!], $first: Int) { + searchProducts(query: "*", sort: $s, first: $first) { + edges { cursor node { id } } + pageInfo { endCursor } + } + }` + vars1 := `{"s": [{"field": "PRICE", "direction": "ASC"}], "first": 2}` + raw1 := executeQuery(t, pipeline, gqlQuery1, vars1) + endCursor := extractEndCursor(t, raw1) + + gqlQuery2 := `query($s: [ProductSort!], $first: Int, $after: String) { + searchProducts(query: "*", sort: $s, first: $first, after: $after) { + edges { cursor node { id name manufacturer } } + pageInfo { hasNextPage hasPreviousPage } + totalCount + } + }` + vars2 := fmt.Sprintf(`{"s": [{"field": "PRICE", "direction": "ASC"}], "first": 2, "after": %q}`, endCursor) + raw2 := executeQuery(t, pipeline, gqlQuery2, vars2) + assertCursorResponse(t, raw2, cursorExpectation{ + edgeCount: 2, + firstNodeID: "1", // Running Shoes + lastNodeID: "2", // Basketball Shoes + hasNextPage: false, + hasPreviousPage: true, + totalCount: 4, + }) + }) + + t.Run("cursor_entity_join", func(t *testing.T) { + t.Parallel() + gqlQuery := `query($s: [ProductSort!], $first: Int) { + searchProducts(query: "*", sort: $s, first: $first) { + edges { node { id manufacturer } } + } + }` + vars := `{"s": [{"field": "PRICE", "direction": "ASC"}], "first": 1}` + raw := executeQuery(t, pipeline, gqlQuery, vars) + assertCursorResponse(t, raw, cursorExpectation{ + edgeCount: 1, + firstNodeID: "4", + }) + assertContainsJSON(t, raw, `"manufacturer":"Smartwool"`) + }) +} + +type cursorExpectation struct { + edgeCount int + firstNodeID string + lastNodeID string + hasNextPage bool + hasPreviousPage bool + totalCount int + checkCursors bool // when true, asserts that each edge has a non-empty cursor +} + +func assertCursorResponse(t *testing.T, raw string, expect cursorExpectation) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Edges []struct { + Cursor string `json:"cursor"` + Node struct { + ID string `json:"id"` + } `json:"node"` + } `json:"edges"` + PageInfo struct { + HasNextPage bool `json:"hasNextPage"` + HasPreviousPage bool `json:"hasPreviousPage"` + StartCursor *string `json:"startCursor"` + EndCursor *string `json:"endCursor"` + } `json:"pageInfo"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal response: %v\nraw: %s", err, raw) + } + sp := resp.Data.SearchProducts + if len(sp.Edges) != expect.edgeCount { + t.Errorf("expected %d edges, got %d\nraw: %s", expect.edgeCount, len(sp.Edges), raw) + return + } + if expect.firstNodeID != "" && len(sp.Edges) > 0 && sp.Edges[0].Node.ID != expect.firstNodeID { + t.Errorf("first edge id=%q, want %q\nraw: %s", sp.Edges[0].Node.ID, expect.firstNodeID, raw) + } + if expect.lastNodeID != "" && len(sp.Edges) > 1 && sp.Edges[len(sp.Edges)-1].Node.ID != expect.lastNodeID { + t.Errorf("last edge id=%q, want %q\nraw: %s", sp.Edges[len(sp.Edges)-1].Node.ID, expect.lastNodeID, raw) + } + if sp.PageInfo.HasNextPage != expect.hasNextPage { + t.Errorf("hasNextPage=%v, want %v\nraw: %s", sp.PageInfo.HasNextPage, expect.hasNextPage, raw) + } + if sp.PageInfo.HasPreviousPage != expect.hasPreviousPage { + t.Errorf("hasPreviousPage=%v, want %v\nraw: %s", sp.PageInfo.HasPreviousPage, expect.hasPreviousPage, raw) + } + if expect.totalCount > 0 && sp.TotalCount != expect.totalCount { + t.Errorf("totalCount=%d, want %d\nraw: %s", sp.TotalCount, expect.totalCount, raw) + } + if expect.checkCursors { + for i, edge := range sp.Edges { + if edge.Cursor == "" { + t.Errorf("edge[%d] has empty cursor\nraw: %s", i, raw) + } + } + } +} + +func extractEndCursor(t *testing.T, raw string) string { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + PageInfo struct { + EndCursor *string `json:"endCursor"` + } `json:"pageInfo"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal response for cursor extraction: %v\nraw: %s", err, raw) + } + if resp.Data.SearchProducts.PageInfo.EndCursor == nil { + t.Fatalf("endCursor is null\nraw: %s", raw) + } + return *resp.Data.SearchProducts.PageInfo.EndCursor +} + +func assertContainsJSON(t *testing.T, raw, substr string) { + t.Helper() + if !strings.Contains(raw, substr) { + t.Errorf("expected response to contain %q\nraw: %s", substr, raw) + } +} + +func assertResponse(t *testing.T, testName string, expected map[string]string, got string) { + t.Helper() + want, ok := expected[testName] + if !ok { + t.Fatalf("no expected response for %q (got: %s)", testName, got) + return + } + if got != want { + t.Fatalf("response mismatch\ngot: %s\nwant: %s", got, want) + } +} + +// --- Vector search test infrastructure --- + +// vectorConfigSDL returns a config SDL with @embedding for the given backend. +func vectorConfigSDL(backend, configJSON string) string { + return fmt.Sprintf(` +extend schema @index(name: "products", backend: "%s", config: "%s") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) + _embedding: [Float!] @embedding(fields: "name description", template: "{{name}}. {{description}}", model: "test-model") +} +`, backend, configJSON) +} + +// VectorBackendSetup extends BackendSetup with an embedder for vector search tests. +type VectorBackendSetup struct { + BackendSetup + Embedder searchindex.Embedder +} + +// setupVectorTestEnv is like setupTestEnv but populates vector data and wires up the embedder. +func setupVectorTestEnv(t *testing.T, setup VectorBackendSetup) testEnv { + t.Helper() + + // 1. Parse the config schema SDL. + doc, parseReport := astparser.ParseGraphqlDocumentString(setup.ConfigSDL) + if parseReport.HasErrors() { + t.Fatalf("parse config schema: %s", parseReport.Error()) + } + parsedConfig, err := search_datasource.ParseConfigSchema(&doc) + if err != nil { + t.Fatalf("ParseConfigSchema: %v", err) + } + if len(parsedConfig.Entities) == 0 { + t.Fatal("no entities found in config schema") + } + + // 2. Generate the search subgraph SDL. + searchSDL, err := search_datasource.GenerateSubgraphSDL(parsedConfig) + if err != nil { + t.Fatalf("GenerateSubgraphSDL: %v", err) + } + + entity := &parsedConfig.Entities[0] + indexDirective := parsedConfig.Indices[0] + + // 3. Build index schema and set vector dimensions from embedder. + indexSchema := buildIndexSchema(indexDirective.Name, entity) + for i, f := range indexSchema.Fields { + if f.Type == searchindex.FieldTypeVector && f.Dimensions == 0 { + indexSchema.Fields[i].Dimensions = setup.Embedder.Dimensions() + } + } + + idx := setup.CreateIndex(t, fmt.Sprintf("test_%s_vector", setup.Name), indexSchema, []byte(indexDirective.ConfigJSON)) + + // 4. Populate with vector test data. + if err := idx.IndexDocuments(context.Background(), testVectorProducts(setup.Embedder)); err != nil { + t.Fatalf("populate vector test data: %v", err) + } + if setup.Hooks.WaitForIndex != nil { + setup.Hooks.WaitForIndex(t) + } + + // 5. Start the entity subgraph server. + entityServer := httptest.NewServer(productdetails.Handler()) + t.Cleanup(entityServer.Close) + + // 6. Compose the subgraphs. + entitySDL := entitySubgraphSDL(t) + routerConfig := composeSubgraphs(t, searchSDL, entitySDL, entityServer.URL) + + // 7. Build the plan configuration with embedder registry. + searchConfig := entityToConfiguration(entity) + supergraphDef := routerConfig.EngineConfig.GraphqlSchema + + embedderRegistry := searchindex.NewEmbedderRegistry() + if len(searchConfig.EmbeddingFields) > 0 { + embedderRegistry.Register(searchConfig.EmbeddingFields[0].Model, setup.Embedder) + } + + planConfig := buildPlanConfiguration(t, routerConfig, idx, searchConfig, entityServer.URL, embedderRegistry) + + return testEnv{ + Pipeline: &testPipeline{ + PlanConfig: planConfig, + SupergraphDef: supergraphDef, + }, + SupergraphDef: supergraphDef, + DefaultSort: `[{"field": "PRICE", "direction": "ASC"}]`, + } +} + +// RunVectorScenarios runs e2e vector search scenarios for a given backend. +// Uses structural assertions since distance values are backend-specific. +func RunVectorScenarios(t *testing.T, setup VectorBackendSetup) { + t.Helper() + + env := setupVectorTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + t.Run("vector_text_query_auto_embed", func(t *testing.T) { + t.Parallel() + // Uses search: {query: "..."} which Source auto-embeds via the mock embedder. + query := `query($s: [ProductSort!]) { + searchProducts(search: {query: "shoes for running"}, sort: $s) { + hits { score distance node { id name price manufacturer } } + totalCount + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + totalCount: 4, + hasEntityJoin: map[string]string{ + "manufacturer": "", + }, + }) + }) + + t.Run("vector_raw_vector_query", func(t *testing.T) { + t.Parallel() + // Use product 1's exact vector — it should be the closest match (distance ≈ 0). + vec, err := setup.Embedder.EmbedSingle(context.Background(), "Running Shoes. Great for jogging and marathons") + if err != nil { + t.Fatalf("embed: %v", err) + } + vecJSON := formatVectorJSON(vec) + query := `query($search: SearchProductInput!, $s: [ProductSort!]) { + searchProducts(search: $search, sort: $s) { + hits { distance node { id name } } + totalCount + } + }` + vars := fmt.Sprintf(`{"search": {"vector": %s}, "s": %s}`, vecJSON, defaultSort) + raw := executeQuery(t, pipeline, query, vars) + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + totalCount: 4, + }) + }) + + t.Run("vector_with_filter", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(search: {query: "shoes"}, filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"category": {"eq": "Footwear"}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + maxTotalCount: 3, // at most 3 footwear products + allMatchFilter: func(node map[string]any) bool { + // All returned nodes should be footwear (resolved via entity join) + return node["id"] != "3" // id=3 is the Leather Belt (Accessories) + }, + }) + }) + + t.Run("vector_distance_populated", func(t *testing.T) { + t.Parallel() + query := `query { + searchProducts(search: {query: "running shoes"}) { + hits { distance node { id } } + } + }` + raw := executeQuery(t, pipeline, query, "") + assertVectorDistances(t, raw) + }) + + t.Run("vector_entity_join", func(t *testing.T) { + t.Parallel() + // Verify federation entity join works with vector search results. + query := `query($s: [ProductSort!]) { + searchProducts(search: {query: "socks"}, sort: $s) { + hits { node { id manufacturer rating } } + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + hasEntityJoin: map[string]string{ + "manufacturer": "", + "rating": "", + }, + }) + }) +} + +// vectorExpectation defines structural assertions for vector search responses. +type vectorExpectation struct { + minHits int + totalCount int // exact expected totalCount (0 = skip check) + maxTotalCount int // max expected totalCount (0 = skip check) + hasEntityJoin map[string]string // fields that should be present in nodes (value ignored) + allMatchFilter func(map[string]any) bool // if set, all nodes must pass this +} + +func assertVectorResponse(t *testing.T, raw string, expect vectorExpectation) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Hits []struct { + Score float64 `json:"score"` + Distance float64 `json:"distance"` + Node map[string]any `json:"node"` + } `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal: %v\nraw: %s", err, raw) + } + sp := resp.Data.SearchProducts + if len(sp.Hits) < expect.minHits { + t.Errorf("expected at least %d hits, got %d\nraw: %s", expect.minHits, len(sp.Hits), raw) + } + if expect.totalCount > 0 && sp.TotalCount != expect.totalCount { + t.Errorf("totalCount=%d, want %d\nraw: %s", sp.TotalCount, expect.totalCount, raw) + } + if expect.maxTotalCount > 0 && sp.TotalCount > expect.maxTotalCount { + t.Errorf("totalCount=%d, want at most %d\nraw: %s", sp.TotalCount, expect.maxTotalCount, raw) + } + for field := range expect.hasEntityJoin { + for i, hit := range sp.Hits { + if _, ok := hit.Node[field]; !ok { + t.Errorf("hit[%d] missing entity join field %q\nraw: %s", i, field, raw) + } + } + } + if expect.allMatchFilter != nil { + for i, hit := range sp.Hits { + if !expect.allMatchFilter(hit.Node) { + t.Errorf("hit[%d] failed filter assertion\nnode: %v\nraw: %s", i, hit.Node, raw) + } + } + } +} + +func assertVectorDistances(t *testing.T, raw string) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Hits []struct { + Distance float64 `json:"distance"` + Node struct { + ID string `json:"id"` + } `json:"node"` + } `json:"hits"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal: %v\nraw: %s", err, raw) + } + if len(resp.Data.SearchProducts.Hits) == 0 { + t.Fatalf("expected hits, got 0\nraw: %s", raw) + } +} + +func formatVectorJSON(vec []float32) string { + b, _ := json.Marshal(vec) + return string(b) +} + +// --- Geo search test infrastructure --- + +// boostConfigSDL returns a config SDL with name field boosted to weight 2.0 for the given backend. +func boostConfigSDL(backend, configJSON string) string { + return fmt.Sprintf(` +extend schema @index(name: "products", backend: "%s", config: "%s") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true, weight: 2.0) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +`, backend, configJSON) +} + +// geoConfigSDL returns a config SDL with a location GEO field for the given backend. +func geoConfigSDL(backend, configJSON string) string { + return fmt.Sprintf(` +extend schema @index(name: "products", backend: "%s", config: "%s") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) + location: GeoPoint @indexed(type: GEO, filterable: true, sortable: true) +} +`, backend, configJSON) +} + +// GeoBackendSetup extends BackendSetup for geo tests. +type GeoBackendSetup struct { + BackendSetup +} + +// setupGeoTestEnv is like setupTestEnv but populates with geo data. +func setupGeoTestEnv(t *testing.T, setup GeoBackendSetup) testEnv { + t.Helper() + + doc, parseReport := astparser.ParseGraphqlDocumentString(setup.ConfigSDL) + if parseReport.HasErrors() { + t.Fatalf("parse config schema: %s", parseReport.Error()) + } + parsedConfig, err := search_datasource.ParseConfigSchema(&doc) + if err != nil { + t.Fatalf("ParseConfigSchema: %v", err) + } + if len(parsedConfig.Entities) == 0 { + t.Fatal("no entities found in config schema") + } + + searchSDL, err := search_datasource.GenerateSubgraphSDL(parsedConfig) + if err != nil { + t.Fatalf("GenerateSubgraphSDL: %v", err) + } + + entity := &parsedConfig.Entities[0] + indexDirective := parsedConfig.Indices[0] + + indexSchema := buildIndexSchema(indexDirective.Name, entity) + idx := setup.CreateIndex(t, fmt.Sprintf("test_%s_geo", setup.Name), indexSchema, []byte(indexDirective.ConfigJSON)) + + if err := idx.IndexDocuments(context.Background(), testGeoProducts()); err != nil { + t.Fatalf("populate geo test data: %v", err) + } + if setup.Hooks.WaitForIndex != nil { + setup.Hooks.WaitForIndex(t) + } + + entityServer := httptest.NewServer(productdetails.Handler()) + t.Cleanup(entityServer.Close) + + entitySDL := entitySubgraphSDL(t) + routerConfig := composeSubgraphs(t, searchSDL, entitySDL, entityServer.URL) + + searchConfig := entityToConfiguration(entity) + supergraphDef := routerConfig.EngineConfig.GraphqlSchema + planConfig := buildPlanConfiguration(t, routerConfig, idx, searchConfig, entityServer.URL, nil) + + return testEnv{ + Pipeline: &testPipeline{ + PlanConfig: planConfig, + SupergraphDef: supergraphDef, + }, + SupergraphDef: supergraphDef, + DefaultSort: `[{"field": "PRICE", "direction": "ASC"}]`, + } +} + +// RunGeoScenarios runs full-stack e2e geo-spatial search scenarios with federation entity joins. +// Products are at: +// +// #1 Running Shoes: New York (40.7128, -74.0060) +// #2 Basketball Shoes: Midtown Manhattan (40.7580, -73.9855) — ~5km from #1 +// #3 Leather Belt: Los Angeles (34.0522, -118.2437) — ~3,940km from #1 +// #4 Wool Socks: London (51.5074, -0.1278) — ~5,570km from #1 +func RunGeoScenarios(t *testing.T, setup GeoBackendSetup) { + t.Helper() + + env := setupGeoTestEnv(t, setup) + pipeline := env.Pipeline + + t.Run("geo_distance_filter_with_entity_join", func(t *testing.T) { + t.Parallel() + // Search within 10km of New York — should find #1 and #2. + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"location_distance": {"center": {"lat": 40.7128, "lon": -74.0060}, "distance": "10km"}}, "s": [{"field": "PRICE", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertGeoResponse(t, raw, geoExpectation{hitCount: 2}) + }) + + t.Run("geo_distance_filter_wide", func(t *testing.T) { + t.Parallel() + // Search within 5000km of New York — should find #1, #2, #3 (not London). + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id manufacturer } } + totalCount + } + }` + vars := `{"f": {"location_distance": {"center": {"lat": 40.7128, "lon": -74.0060}, "distance": "5000km"}}, "s": [{"field": "PRICE", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertGeoResponse(t, raw, geoExpectation{hitCount: 3}) + }) + + t.Run("geo_bounding_box_filter", func(t *testing.T) { + t.Parallel() + // Bounding box around NYC area — should find #1 and #2. + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"location_boundingBox": {"topLeft": {"lat": 41.0, "lon": -74.5}, "bottomRight": {"lat": 40.5, "lon": -73.5}}}, "s": [{"field": "PRICE", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertGeoResponse(t, raw, geoExpectation{hitCount: 2}) + }) + + t.Run("geo_distance_sort_with_entity_join", func(t *testing.T) { + t.Parallel() + // Sort by distance from NYC ASC. + query := `query($gs: GeoDistanceSortInput) { + searchProducts(query: "*", geoSort: $gs) { + hits { geoDistance node { id name manufacturer } } + totalCount + } + }` + vars := `{"gs": {"field": "location", "center": {"lat": 40.7128, "lon": -74.0060}, "direction": "ASC", "unit": "km"}}` + raw := executeQuery(t, pipeline, query, vars) + assertGeoSortResponse(t, raw) + }) + + t.Run("geo_filter_combined_with_keyword", func(t *testing.T) { + t.Parallel() + // Footwear within 100km of NYC. + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"location_distance": {"center": {"lat": 40.7128, "lon": -74.0060}, "distance": "100km"}, "category": {"eq": "Footwear"}}, "s": [{"field": "PRICE", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertGeoResponse(t, raw, geoExpectation{hitCount: 2}) + }) +} + +type geoExpectation struct { + hitCount int +} + +func assertGeoResponse(t *testing.T, raw string, expect geoExpectation) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Hits []any `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal: %v\nraw: %s", err, raw) + } + if resp.Data.SearchProducts.TotalCount != expect.hitCount { + t.Errorf("expected totalCount=%d, got %d\nraw: %s", expect.hitCount, resp.Data.SearchProducts.TotalCount, raw) + } +} + +func assertGeoSortResponse(t *testing.T, raw string) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Hits []struct { + GeoDistance *float64 `json:"geoDistance"` + Node map[string]any `json:"node"` + } `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal: %v\nraw: %s", err, raw) + } + sp := resp.Data.SearchProducts + if len(sp.Hits) < 4 { + t.Fatalf("expected >= 4 hits, got %d\nraw: %s", len(sp.Hits), raw) + } + // Nearest to NYC should be Running Shoes. + name, _ := sp.Hits[0].Node["name"].(string) + if name != "Running Shoes" { + t.Errorf("expected first hit to be Running Shoes (nearest to NYC), got %q\nraw: %s", name, raw) + } + // Farthest should be Wool Socks (London). + lastName, _ := sp.Hits[3].Node["name"].(string) + if lastName != "Wool Socks" { + t.Errorf("expected last hit to be Wool Socks (London, farthest), got %q\nraw: %s", lastName, raw) + } + // All hits should have geoDistance populated. + for i, hit := range sp.Hits { + if hit.GeoDistance == nil { + t.Errorf("hit[%d]: expected geoDistance to be populated\nraw: %s", i, raw) + } + } + // Entity join should have resolved manufacturer. + for i, hit := range sp.Hits { + if _, ok := hit.Node["manufacturer"]; !ok { + t.Errorf("hit[%d]: missing entity join field 'manufacturer'\nraw: %s", i, raw) + } + } +} + +// --- Date/DateTime test infrastructure --- + +// dateConfigSDL returns a config SDL with Date and DateTime fields for the given backend. +func dateConfigSDL(backend, configJSON string) string { + return fmt.Sprintf(` +extend schema @index(name: "products", backend: "%s", config: "%s") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) + createdAt: Date @indexed(type: DATE, filterable: true, sortable: true) + updatedAt: DateTime @indexed(type: DATETIME, filterable: true, sortable: true) +} +`, backend, configJSON) +} + +// setupDateTestEnv is like setupTestEnv but populates with date data. +func setupDateTestEnv(t *testing.T, setup BackendSetup) testEnv { + t.Helper() + + doc, parseReport := astparser.ParseGraphqlDocumentString(setup.ConfigSDL) + if parseReport.HasErrors() { + t.Fatalf("parse config schema: %s", parseReport.Error()) + } + parsedConfig, err := search_datasource.ParseConfigSchema(&doc) + if err != nil { + t.Fatalf("ParseConfigSchema: %v", err) + } + if len(parsedConfig.Entities) == 0 { + t.Fatal("no entities found in config schema") + } + + searchSDL, err := search_datasource.GenerateSubgraphSDL(parsedConfig) + if err != nil { + t.Fatalf("GenerateSubgraphSDL: %v", err) + } + + entity := &parsedConfig.Entities[0] + indexDirective := parsedConfig.Indices[0] + + indexSchema := buildIndexSchema(indexDirective.Name, entity) + idx := setup.CreateIndex(t, fmt.Sprintf("test_%s_date", setup.Name), indexSchema, []byte(indexDirective.ConfigJSON)) + + if err := idx.IndexDocuments(context.Background(), testDateProducts()); err != nil { + t.Fatalf("populate date test data: %v", err) + } + if setup.Hooks.WaitForIndex != nil { + setup.Hooks.WaitForIndex(t) + } + + entityServer := httptest.NewServer(productdetails.Handler()) + t.Cleanup(entityServer.Close) + + entitySDL := entitySubgraphSDL(t) + routerConfig := composeSubgraphs(t, searchSDL, entitySDL, entityServer.URL) + + searchConfig := entityToConfiguration(entity) + supergraphDef := routerConfig.EngineConfig.GraphqlSchema + planConfig := buildPlanConfiguration(t, routerConfig, idx, searchConfig, entityServer.URL, nil) + + return testEnv{ + Pipeline: &testPipeline{ + PlanConfig: planConfig, + SupergraphDef: supergraphDef, + }, + SupergraphDef: supergraphDef, + DefaultSort: `[{"field": "PRICE", "direction": "ASC"}]`, + } +} + +// RunDateScenarios runs full-stack e2e date/datetime filter and sort scenarios. +// Products have: +// +// #1 Running Shoes: CreatedAt 2024-01-15, UpdatedAt 2024-01-15T10:30:00Z +// #2 Basketball Shoes: CreatedAt 2024-03-20, UpdatedAt 2024-03-20T14:00:00Z +// #3 Leather Belt: CreatedAt 2024-06-01, UpdatedAt 2024-06-01T09:00:00Z +// #4 Wool Socks: CreatedAt 2024-09-10, UpdatedAt 2024-09-10T16:45:00Z +func RunDateScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupDateTestEnv(t, setup) + pipeline := env.Pipeline + + t.Run("date_eq_filter", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"createdAt": {"eq": "2024-01-15"}}, "s": [{"field": "CREATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "date_eq_filter", setup.ExpectedResponses, raw) + }) + + t.Run("date_range_gte_lte", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"createdAt": {"gte": "2024-01-15", "lte": "2024-06-01"}}, "s": [{"field": "CREATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "date_range_gte_lte", setup.ExpectedResponses, raw) + }) + + t.Run("date_gt_lt", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"createdAt": {"gt": "2024-01-15", "lt": "2024-09-10"}}, "s": [{"field": "CREATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "date_gt_lt", setup.ExpectedResponses, raw) + }) + + t.Run("date_after_before", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"createdAt": {"after": "2024-03-20", "before": "2024-09-10"}}, "s": [{"field": "CREATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "date_after_before", setup.ExpectedResponses, raw) + }) + + t.Run("datetime_eq_filter", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"updatedAt": {"eq": "2024-03-20T14:00:00Z"}}, "s": [{"field": "UPDATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "datetime_eq_filter", setup.ExpectedResponses, raw) + }) + + t.Run("datetime_range_gte_lte", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"updatedAt": {"gte": "2024-01-15T10:30:00Z", "lte": "2024-06-01T09:00:00Z"}}, "s": [{"field": "UPDATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "datetime_range_gte_lte", setup.ExpectedResponses, raw) + }) + + t.Run("datetime_after_before", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"updatedAt": {"after": "2024-06-01T09:00:00Z"}}, "s": [{"field": "UPDATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "datetime_after_before", setup.ExpectedResponses, raw) + }) + + t.Run("date_sort_asc", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { + searchProducts(query: "*", sort: $s) { + hits { node { id name manufacturer } } + } + }` + vars := `{"s": [{"field": "CREATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "date_sort_asc", setup.ExpectedResponses, raw) + }) + + t.Run("date_sort_desc", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { + searchProducts(query: "*", sort: $s) { + hits { node { id name manufacturer } } + } + }` + vars := `{"s": [{"field": "CREATEDAT", "direction": "DESC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "date_sort_desc", setup.ExpectedResponses, raw) + }) + + t.Run("datetime_sort_asc", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { + searchProducts(query: "*", sort: $s) { + hits { node { id name manufacturer } } + } + }` + vars := `{"s": [{"field": "UPDATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "datetime_sort_asc", setup.ExpectedResponses, raw) + }) + + t.Run("date_combined_filter", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"AND": [{"category": {"eq": "Footwear"}}, {"createdAt": {"gte": "2024-03-01"}}]}, "s": [{"field": "CREATEDAT", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + assertResponse(t, "date_combined_filter", setup.ExpectedResponses, raw) + }) +} + +// --- Highlight test infrastructure --- + +// RunHighlightScenarios runs e2e scenarios that exercise search highlights. +// Highlights are backend-dependent — uses structural assertions. +func RunHighlightScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + t.Run("highlights_returned_for_text_match", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { + searchProducts(query: "shoes", sort: $s) { + hits { highlights { field fragments } node { id name } } + totalCount + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertHighlightResponse(t, raw) + }) +} + +func assertHighlightResponse(t *testing.T, raw string) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Hits []struct { + Highlights []struct { + Field string `json:"field"` + Fragments []string `json:"fragments"` + } `json:"highlights"` + Node map[string]any `json:"node"` + } `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal: %v\nraw: %s", err, raw) + } + sp := resp.Data.SearchProducts + if len(sp.Hits) == 0 { + t.Fatalf("expected at least 1 hit\nraw: %s", raw) + } + // At least one hit should have highlights. + hasHighlights := false + for _, hit := range sp.Hits { + if len(hit.Highlights) > 0 { + hasHighlights = true + for _, hl := range hit.Highlights { + if hl.Field == "" { + t.Errorf("highlight has empty field\nraw: %s", raw) + } + if len(hl.Fragments) == 0 { + t.Errorf("highlight for field %q has no fragments\nraw: %s", hl.Field, raw) + } + } + } + } + if !hasHighlights { + t.Errorf("expected at least one hit to have highlights\nraw: %s", raw) + } +} + +// --- Additional filter test infrastructure --- + +// RunAdditionalFilterScenarios runs e2e scenarios for filter operators not covered +// by RunAllScenarios (ne, IN for strings, startsWith). +func RunAdditionalFilterScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + t.Run("filter_string_ne", func(t *testing.T) { + t.Parallel() + // "ne" on keyword — should exclude category "Footwear". + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id name manufacturer } } + totalCount + } + }` + vars := `{"f": {"category": {"ne": "Footwear"}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertFilterResponse(t, raw, "filter_string_ne", setup.ExpectedResponses) + }) + + t.Run("filter_string_in", func(t *testing.T) { + t.Parallel() + // "in" on keyword — should match Footwear OR Accessories = all 4. + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id manufacturer } } + totalCount + } + }` + vars := `{"f": {"category": {"in": ["Footwear", "Accessories"]}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertFilterResponse(t, raw, "filter_string_in", setup.ExpectedResponses) + }) + + if setup.Caps.HasTextSearch { + t.Run("filter_string_startsWith", func(t *testing.T) { + t.Parallel() + // "startsWith" on keyword — should match "Foot*". + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(query: "*", filter: $f, sort: $s) { + hits { node { id manufacturer } } + totalCount + } + }` + vars := `{"f": {"category": {"startsWith": "Foot"}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertFilterResponse(t, raw, "filter_string_startsWith", setup.ExpectedResponses) + }) + } +} + +func assertFilterResponse(t *testing.T, raw, testName string, expected map[string]string) { + t.Helper() + want, ok := expected[testName] + if !ok { + // If no exact expected response, at least verify it parses. + var resp struct { + Data struct { + SearchProducts struct { + Hits []any `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("response doesn't parse as JSON: %v\nraw: %s", err, raw) + } + return + } + if raw != want { + t.Fatalf("response mismatch\ngot: %s\nwant: %s", raw, want) + } +} + +// RunHybridScenarios runs e2e hybrid search scenarios (text + vector combined). +// It uses the vector test environment where Source auto-embeds text queries +// and sets both TextQuery and Vector on the SearchRequest. +func RunHybridScenarios(t *testing.T, setup VectorBackendSetup) { + t.Helper() + + env := setupVectorTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + t.Run("hybrid_text_query_returns_results", func(t *testing.T) { + t.Parallel() + // With an embedder configured, search: {query: "..."} sets both TextQuery and Vector. + query := `query($s: [ProductSort!]) { + searchProducts(search: {query: "shoes"}, sort: $s) { + hits { score node { id name } } + totalCount + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + }) + }) + + t.Run("hybrid_text_relevance", func(t *testing.T) { + t.Parallel() + // Search for "running" — Running Shoes should appear in results. + query := `query { + searchProducts(search: {query: "running"}) { + hits { score node { id name } } + totalCount + } + }` + raw := executeQuery(t, pipeline, query, "") + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + }) + // Verify "Running Shoes" (id=1) is in the results. + assertContainsJSON(t, raw, `"id":"1"`) + }) + + t.Run("hybrid_with_filter", func(t *testing.T) { + t.Parallel() + // Hybrid search + category filter. + query := `query($f: ProductFilter, $s: [ProductSort!]) { + searchProducts(search: {query: "shoes"}, filter: $f, sort: $s) { + hits { node { id name } } + totalCount + } + }` + vars := `{"f": {"category": {"eq": "Footwear"}}, "s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + maxTotalCount: 3, + allMatchFilter: func(node map[string]any) bool { + return node["id"] != "3" // id=3 is Leather Belt (Accessories) + }, + }) + }) + + t.Run("hybrid_entity_join", func(t *testing.T) { + t.Parallel() + // Hybrid search with federation entity join. + query := `query($s: [ProductSort!]) { + searchProducts(search: {query: "leather"}, sort: $s) { + hits { node { id name manufacturer rating } } + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertVectorResponse(t, raw, vectorExpectation{ + minHits: 1, + hasEntityJoin: map[string]string{ + "manufacturer": "", + "rating": "", + }, + }) + }) +} + +// RunBoostingScenarios runs e2e scenarios that verify field boosting/weights +// flow through the full pipeline (config parsing → SDL generation → composition → execution). +func RunBoostingScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + t.Run("boosted_search_returns_results", func(t *testing.T) { + t.Parallel() + // Verify that a search with weighted fields produces results. + query := `query($s: [ProductSort!]) { + searchProducts(query: "shoes", sort: $s) { + hits { score node { id name } } + totalCount + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertBoostingResponse(t, raw, 2) // "shoes" in name of products 1 and 2 + }) + + t.Run("boosted_search_without_sort", func(t *testing.T) { + t.Parallel() + // Verify that a boosted search without explicit sort returns results. + query := `query { + searchProducts(query: "leather") { + hits { score node { id name } } + totalCount + } + }` + raw := executeQuery(t, pipeline, query, "") + assertBoostingResponse(t, raw, 1) // "leather" in product 3's name and description + }) +} + +func assertBoostingResponse(t *testing.T, raw string, minHits int) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Hits []struct { + Score float64 `json:"score"` + Node map[string]any `json:"node"` + } `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal response: %v\nraw: %s", err, raw) + } + hits := resp.Data.SearchProducts.Hits + if len(hits) < minHits { + t.Fatalf("expected at least %d hits, got %d\nraw: %s", minHits, len(hits), raw) + } +} + +// RunFuzzyScenarios runs fuzzy matching / typo tolerance tests through the full +// composition + plan + resolve pipeline. +func RunFuzzyScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupTestEnv(t, setup) + pipeline := env.Pipeline + defaultSort := env.DefaultSort + + t.Run("fuzzy_low_finds_typo", func(t *testing.T) { + t.Parallel() + // "runing" is 1 edit away from "running" — fuzziness LOW should find it. + query := `query($s: [ProductSort!]) { + searchProducts(query: "runing", fuzziness: LOW, sort: $s) { + hits { score node { id name } } + totalCount + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertFuzzyResponse(t, raw, 1) + }) + + t.Run("fuzzy_exact_misses_typo", func(t *testing.T) { + t.Parallel() + // "runing" with fuzziness EXACT should find nothing. + query := `query($s: [ProductSort!]) { + searchProducts(query: "runing", fuzziness: EXACT, sort: $s) { + hits { score node { id name } } + totalCount + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertFuzzyResponse(t, raw, 0) + }) + + t.Run("fuzzy_high_finds_typo", func(t *testing.T) { + t.Parallel() + // "runnin" with fuzziness HIGH should still find results. + query := `query($s: [ProductSort!]) { + searchProducts(query: "runnin", fuzziness: HIGH, sort: $s) { + hits { score node { id name } } + totalCount + } + }` + vars := `{"s": ` + defaultSort + `}` + raw := executeQuery(t, pipeline, query, vars) + assertFuzzyResponse(t, raw, 1) + }) +} + +func assertFuzzyResponse(t *testing.T, raw string, expectedMinHits int) { + t.Helper() + var resp struct { + Data struct { + SearchProducts struct { + Hits []struct { + Score float64 `json:"score"` + Node map[string]any `json:"node"` + } `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal response: %v\nraw: %s", err, raw) + } + hits := resp.Data.SearchProducts.Hits + if expectedMinHits == 0 { + if len(hits) != 0 { + t.Fatalf("expected 0 hits, got %d\nraw: %s", len(hits), raw) + } + return + } + if len(hits) < expectedMinHits { + t.Fatalf("expected at least %d hits, got %d\nraw: %s", expectedMinHits, len(hits), raw) + } +} + +// --- Suggest / autocomplete test infrastructure --- + +// suggestConfigSDL returns a config SDL with suggestField and autocomplete: true on TEXT fields. +func suggestConfigSDL(backend, configJSON string) string { + return fmt.Sprintf(` +extend schema @index(name: "products", backend: "%s", config: "%s") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts", suggestField: "suggestProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true, autocomplete: true) + description: String @indexed(type: TEXT, autocomplete: true) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +`, backend, configJSON) +} + +// filterMetadataRootFields returns a copy of metadata with only the specified field +// kept in the Query root node. All other root nodes and child nodes are preserved. +func filterMetadataRootFields(meta *plan.DataSourceMetadata, keepField string) *plan.DataSourceMetadata { + clone := &plan.DataSourceMetadata{ + FederationMetaData: meta.FederationMetaData, + Directives: meta.Directives, + } + for _, rn := range meta.RootNodes { + if rn.TypeName == "Query" { + var filtered []string + for _, fn := range rn.FieldNames { + if fn == keepField { + filtered = append(filtered, fn) + } + } + if len(filtered) > 0 { + clone.RootNodes = append(clone.RootNodes, plan.TypeField{ + TypeName: rn.TypeName, + FieldNames: filtered, + }) + } + } else { + clone.RootNodes = append(clone.RootNodes, rn) + } + } + clone.ChildNodes = make([]plan.TypeField, len(meta.ChildNodes)) + copy(clone.ChildNodes, meta.ChildNodes) + return clone +} + +// setupSuggestTestEnv builds a test environment with both search and suggest datasource entries. +func setupSuggestTestEnv(t *testing.T, setup BackendSetup) testEnv { + t.Helper() + + // 1. Parse the config schema SDL. + doc, parseReport := astparser.ParseGraphqlDocumentString(setup.ConfigSDL) + if parseReport.HasErrors() { + t.Fatalf("parse config schema: %s", parseReport.Error()) + } + parsedConfig, err := search_datasource.ParseConfigSchema(&doc) + if err != nil { + t.Fatalf("ParseConfigSchema: %v", err) + } + if len(parsedConfig.Entities) == 0 { + t.Fatal("no entities found in config schema") + } + + // 2. Generate the search subgraph SDL. + searchSDL, err := search_datasource.GenerateSubgraphSDL(parsedConfig) + if err != nil { + t.Fatalf("GenerateSubgraphSDL: %v", err) + } + + entity := &parsedConfig.Entities[0] + indexDirective := parsedConfig.Indices[0] + + // 3. Build index schema and create the search index. + indexSchema := buildIndexSchema(indexDirective.Name, entity) + idx := setup.CreateIndex(t, fmt.Sprintf("test_%s", setup.Name), indexSchema, []byte(indexDirective.ConfigJSON)) + + // 4. Populate with test data. + if err := idx.IndexDocuments(context.Background(), testProducts()); err != nil { + t.Fatalf("populate test data: %v", err) + } + if setup.Hooks.WaitForIndex != nil { + setup.Hooks.WaitForIndex(t) + } + + // 5. Start the entity subgraph server. + entityServer := httptest.NewServer(productdetails.Handler()) + t.Cleanup(entityServer.Close) + + // 6. Compose the subgraphs. + entitySDL := entitySubgraphSDL(t) + routerConfig := composeSubgraphs(t, searchSDL, entitySDL, entityServer.URL) + + // 7. Build plan configuration with split datasources. + searchConfig := entityToConfiguration(entity) + + suggestConfig := entityToConfiguration(entity) + suggestConfig.SearchField = entity.SuggestField + suggestConfig.IsSuggest = true + suggestConfig.ResultsMetaInformation = false + suggestConfig.CursorBasedPagination = false + + supergraphDef := routerConfig.EngineConfig.GraphqlSchema + planConfig := buildSuggestPlanConfiguration(t, routerConfig, idx, searchConfig, suggestConfig, entityServer.URL) + + return testEnv{ + Pipeline: &testPipeline{ + PlanConfig: planConfig, + SupergraphDef: supergraphDef, + }, + SupergraphDef: supergraphDef, + DefaultSort: `[{"field": "PRICE", "direction": "ASC"}]`, + } +} + +// buildSuggestPlanConfiguration builds a plan.Configuration that splits the search datasource +// into two entries: one for searchProducts and one for suggestProducts. +func buildSuggestPlanConfiguration( + t *testing.T, + routerConfig *nodev1.RouterConfig, + idx searchindex.Index, + searchConfig search_datasource.Configuration, + suggestConfig search_datasource.Configuration, + entityServerURL string, +) plan.Configuration { + t.Helper() + + engineConfig := routerConfig.EngineConfig + var planConfig plan.Configuration + planConfig.DefaultFlushIntervalMillis = engineConfig.DefaultFlushInterval + + for _, fc := range engineConfig.FieldConfigurations { + var args []plan.ArgumentConfiguration + for _, ac := range fc.ArgumentsConfiguration { + arg := plan.ArgumentConfiguration{ + Name: ac.Name, + RenderConfig: plan.RenderArgumentAsJSONValue, + } + switch ac.SourceType { + case nodev1.ArgumentSource_FIELD_ARGUMENT: + arg.SourceType = plan.FieldArgumentSource + case nodev1.ArgumentSource_OBJECT_FIELD: + arg.SourceType = plan.ObjectFieldSource + } + args = append(args, arg) + } + planConfig.Fields = append(planConfig.Fields, plan.FieldConfiguration{ + TypeName: fc.TypeName, + FieldName: fc.FieldName, + Arguments: args, + }) + } + + for _, tc := range engineConfig.TypeConfigurations { + planConfig.Types = append(planConfig.Types, plan.TypeConfiguration{ + TypeName: tc.TypeName, + RenameTo: tc.RenameTo, + }) + } + + for _, ds := range engineConfig.DatasourceConfigurations { + metadata := extractDataSourceMetadata(ds) + + fetchURL := "" + if ds.CustomGraphql != nil && ds.CustomGraphql.Fetch != nil { + fetchURL = ds.CustomGraphql.Fetch.GetUrl().GetStaticVariableContent() + } + + if fetchURL == "http://search.local" { + // Split into search and suggest datasources sharing the same factory and index. + searchFactory := search_datasource.NewFactory(context.Background(), nil, nil) + searchFactory.RegisterIndex(searchConfig.IndexName, idx) + + // Search datasource — only searchProducts in root nodes. + searchMeta := filterMetadataRootFields(metadata, searchConfig.SearchField) + searchDS, err := plan.NewDataSourceConfiguration[search_datasource.Configuration]( + ds.Id+"_search", + searchFactory, + searchMeta, + searchConfig, + ) + if err != nil { + t.Fatalf("NewDataSourceConfiguration (search): %v", err) + } + planConfig.DataSources = append(planConfig.DataSources, searchDS) + + // Suggest datasource — only suggestProducts in root nodes. + suggestMeta := filterMetadataRootFields(metadata, suggestConfig.SearchField) + // Add SuggestTerm as a child node for the suggest datasource. + suggestMeta.ChildNodes = append(suggestMeta.ChildNodes, plan.TypeField{ + TypeName: "SuggestTerm", + FieldNames: []string{"term", "count"}, + }) + + suggestDS, err := plan.NewDataSourceConfiguration[search_datasource.Configuration]( + ds.Id+"_suggest", + searchFactory, + suggestMeta, + suggestConfig, + ) + if err != nil { + t.Fatalf("NewDataSourceConfiguration (suggest): %v", err) + } + planConfig.DataSources = append(planConfig.DataSources, suggestDS) + } else { + // Entity datasource — same as buildPlanConfiguration. + graphqlSchema, err := loadInternedString(engineConfig, ds.CustomGraphql.GetUpstreamSchema()) + if err != nil { + t.Fatalf("load upstream schema: %v", err) + } + + schemaConfig, err := graphql_datasource.NewSchemaConfiguration( + graphqlSchema, + &graphql_datasource.FederationConfiguration{ + Enabled: ds.CustomGraphql.Federation.Enabled, + ServiceSDL: ds.CustomGraphql.Federation.ServiceSdl, + }, + ) + if err != nil { + t.Fatalf("NewSchemaConfiguration (entity): %v", err) + } + + entityConfig, err := graphql_datasource.NewConfiguration(graphql_datasource.ConfigurationInput{ + Fetch: &graphql_datasource.FetchConfiguration{ + URL: entityServerURL, + }, + SchemaConfiguration: schemaConfig, + }) + if err != nil { + t.Fatalf("NewConfiguration (entity): %v", err) + } + + entityFactory, err := graphql_datasource.NewFactory(context.Background(), http.DefaultClient, &noopSubscriptionClient{}) + if err != nil { + t.Fatalf("NewFactory (entity): %v", err) + } + + entityDS, err := plan.NewDataSourceConfiguration[graphql_datasource.Configuration]( + ds.Id, + entityFactory, + metadata, + entityConfig, + ) + if err != nil { + t.Fatalf("NewDataSourceConfiguration (entity): %v", err) + } + planConfig.DataSources = append(planConfig.DataSources, entityDS) + } + } + + planConfig.DisableResolveFieldPositions = true + + return planConfig +} + +// RunSuggestScenarios runs suggest/autocomplete e2e scenarios for a given backend. +func RunSuggestScenarios(t *testing.T, setup BackendSetup) { + t.Helper() + + env := setupSuggestTestEnv(t, setup) + pipeline := env.Pipeline + + t.Run("suggest_basic", func(t *testing.T) { + t.Parallel() + // "shoe" should match terms from name/description tokens. + query := `{ suggestProducts(prefix: "shoe") { term count } }` + raw := executeQuery(t, pipeline, query, "") + assertSuggestResponse(t, raw, suggestExpectation{ + minResults: 1, + }) + }) + + t.Run("suggest_short_prefix_returns_empty", func(t *testing.T) { + t.Parallel() + // Single character is below minPrefixLength (2). + query := `{ suggestProducts(prefix: "s") { term count } }` + raw := executeQuery(t, pipeline, query, "") + assertSuggestResponse(t, raw, suggestExpectation{exactCount: intPtr(0)}) + }) + + t.Run("suggest_with_valid_prefix_and_limit", func(t *testing.T) { + t.Parallel() + query := `{ suggestProducts(prefix: "sh", limit: 1) { term count } }` + raw := executeQuery(t, pipeline, query, "") + assertSuggestResponse(t, raw, suggestExpectation{ + minResults: 1, + maxResults: 1, + }) + }) + + t.Run("suggest_no_match", func(t *testing.T) { + t.Parallel() + query := `{ suggestProducts(prefix: "zzzzz") { term count } }` + raw := executeQuery(t, pipeline, query, "") + assertSuggestResponse(t, raw, suggestExpectation{exactCount: intPtr(0)}) + }) + + t.Run("suggest_has_term_and_count", func(t *testing.T) { + t.Parallel() + query := `{ suggestProducts(prefix: "shoe") { term count } }` + raw := executeQuery(t, pipeline, query, "") + assertSuggestResponse(t, raw, suggestExpectation{ + minResults: 1, + requireFields: []string{"term", "count"}, + }) + }) + + t.Run("suggest_case_insensitive", func(t *testing.T) { + t.Parallel() + query := `{ suggestProducts(prefix: "SHOE") { term count } }` + raw := executeQuery(t, pipeline, query, "") + assertSuggestResponse(t, raw, suggestExpectation{minResults: 1}) + }) + + t.Run("suggest_only_autocomplete_fields", func(t *testing.T) { + t.Parallel() + // "footwear" is a category value (KEYWORD field, no autocomplete). + // It should not appear in suggest results. + query := `{ suggestProducts(prefix: "footwear") { term count } }` + raw := executeQuery(t, pipeline, query, "") + assertSuggestResponse(t, raw, suggestExpectation{exactCount: intPtr(0)}) + }) + + t.Run("suggest_search_still_works", func(t *testing.T) { + t.Parallel() + // Verify the search datasource still works alongside suggest. + query := `query($s: [ProductSort!]) { searchProducts(query: "*", sort: $s) { hits { node { id name } } totalCount } }` + vars := `{"s": [{"field": "PRICE", "direction": "ASC"}]}` + raw := executeQuery(t, pipeline, query, vars) + var resp struct { + Data struct { + SearchProducts struct { + Hits []any `json:"hits"` + TotalCount int `json:"totalCount"` + } `json:"searchProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal: %v\nraw: %s", err, raw) + } + if resp.Data.SearchProducts.TotalCount != 4 { + t.Fatalf("expected 4 products, got %d\nraw: %s", resp.Data.SearchProducts.TotalCount, raw) + } + }) +} + +type suggestExpectation struct { + minResults int + maxResults int // 0 = no max + exactCount *int // if non-nil, assert exact count + requireFields []string // fields that must exist in each result + containsTerm string // at least one result must have this term +} + +func assertSuggestResponse(t *testing.T, raw string, expect suggestExpectation) { + t.Helper() + var resp struct { + Data struct { + SuggestProducts []struct { + Term string `json:"term"` + Count int `json:"count"` + } `json:"suggestProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal suggest response: %v\nraw: %s", err, raw) + } + + results := resp.Data.SuggestProducts + if expect.exactCount != nil { + if len(results) != *expect.exactCount { + t.Fatalf("expected exactly %d results, got %d: %s", *expect.exactCount, len(results), raw) + } + return + } + if len(results) < expect.minResults { + t.Fatalf("expected at least %d results, got %d: %s", expect.minResults, len(results), raw) + } + if expect.maxResults > 0 && len(results) > expect.maxResults { + t.Fatalf("expected at most %d results, got %d: %s", expect.maxResults, len(results), raw) + } + if expect.containsTerm != "" { + found := false + for _, r := range results { + if r.Term == expect.containsTerm { + found = true + break + } + } + if !found { + t.Fatalf("expected term %q not found in results: %s", expect.containsTerm, raw) + } + } + if len(expect.requireFields) > 0 { + // For structural checks, verify via raw JSON that each result has the required fields. + var rawResp struct { + Data struct { + SuggestProducts []map[string]any `json:"suggestProducts"` + } `json:"data"` + } + if err := json.Unmarshal([]byte(raw), &rawResp); err != nil { + t.Fatalf("unmarshal raw suggest response: %v", err) + } + for i, r := range rawResp.Data.SuggestProducts { + for _, field := range expect.requireFields { + if _, ok := r[field]; !ok { + t.Errorf("result[%d] missing required field %q: %s", i, field, raw) + } + } + } + } +} + +func intPtr(i int) *int { return &i } diff --git a/execution/searchtesting/meilisearch_test.go b/execution/searchtesting/meilisearch_test.go new file mode 100644 index 0000000000..ac60107b5d --- /dev/null +++ b/execution/searchtesting/meilisearch_test.go @@ -0,0 +1,148 @@ +//go:build integration + +package searchtesting + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/meilisearch" +) + +const meilisearchMasterKey = "test-master-key" + +const meilisearchConfigSDL = ` +extend schema @index(name: "products", backend: "meilisearch", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func startMeilisearch(t *testing.T) string { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "getmeili/meilisearch:v1.6", + ExposedPorts: []string{"7700/tcp"}, + Env: map[string]string{ + "MEILI_MASTER_KEY": meilisearchMasterKey, + }, + WaitingFor: wait.ForHTTP("/health").WithPort("7700/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start meilisearch container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "7700/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return fmt.Sprintf("http://%s:%s", host, port.Port()) +} + +func TestMeilisearch(t *testing.T) { + t.Parallel() + meiliHost := startMeilisearch(t) + + makeSetup := func(name, configSDL string) BackendSetup { + return BackendSetup{ + Name: name, + ConfigSDL: configSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := meilisearch.NewFactory() + cfg := meilisearch.Config{ + Host: meiliHost, + APIKey: meilisearchMasterKey, + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + idx, err := factory.CreateIndex(context.Background(), name, schema, cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + } + } + + t.Run("standard", func(t *testing.T) { + t.Parallel() + setup := makeSetup("meilisearch", meilisearchConfigSDL) + setup.ExpectedResponses = map[string]string{ + "supergraph_sdl": expectedSupergraphSDL, + "basic_search_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_keyword_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","rating":4.7}},{"node":{"id":"1","name":"Running Shoes","rating":4.5}},{"node":{"id":"2","name":"Basketball Shoes","rating":4.2}}]}}}`, + "filter_boolean": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_numeric_range": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}}],"totalCount":2}}}`, + "filter_AND": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + "filter_OR": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_NOT": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "sort_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}]}}}`, + "pagination_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","reviews":[{"text":"Nice belt","stars":3}]}},{"node":{"id":"1","reviews":[{"text":"Great shoes","stars":5}]}}],"totalCount":4}}}`, + "score_and_totalCount": `{"data":{"searchProducts":{"hits":[{"score":0,"node":{"id":"4","manufacturer":"Smartwool"}},{"score":0,"node":{"id":"3","manufacturer":"Gucci"}},{"score":0,"node":{"id":"1","manufacturer":"Nike"}},{"score":0,"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + "facets_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"facets":[{"field":"category","values":[{"value":"Footwear","count":3},{"value":"Accessories","count":1}]}]}}}`, + } + RunAllScenarios(t, setup) + }) + + t.Run("suggest", func(t *testing.T) { + t.Parallel() + RunSuggestScenarios(t, makeSetup("meilisearch_suggest", suggestConfigSDL("meilisearch", "{}"))) + }) + + t.Run("date", func(t *testing.T) { + t.Parallel() + setup := makeSetup("meilisearch_date", dateConfigSDL("meilisearch", "{}")) + setup.ExpectedResponses = map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + } + RunDateScenarios(t, setup) + }) +} diff --git a/execution/searchtesting/mock_embedder.go b/execution/searchtesting/mock_embedder.go new file mode 100644 index 0000000000..2a7394375c --- /dev/null +++ b/execution/searchtesting/mock_embedder.go @@ -0,0 +1,59 @@ +package searchtesting + +import ( + "context" + "hash/fnv" + "math" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +const mockDimensions = 4 + +// MockEmbedder implements searchindex.Embedder with deterministic hash-based vectors. +// Given the same text, it always produces the same 4-dimensional unit vector. +type MockEmbedder struct{} + +var _ searchindex.Embedder = (*MockEmbedder)(nil) + +func (m *MockEmbedder) Embed(_ context.Context, texts []string) ([][]float32, error) { + result := make([][]float32, len(texts)) + for i, text := range texts { + result[i] = hashToVector(text) + } + return result, nil +} + +func (m *MockEmbedder) EmbedSingle(_ context.Context, text string) ([]float32, error) { + return hashToVector(text), nil +} + +func (m *MockEmbedder) Dimensions() int { + return mockDimensions +} + +// hashToVector produces a deterministic 4-dimensional unit vector from any string. +func hashToVector(text string) []float32 { + h := fnv.New64a() + h.Write([]byte(text)) + seed := h.Sum64() + + vec := make([]float32, mockDimensions) + var norm float64 + for i := 0; i < mockDimensions; i++ { + // Mix bits for each dimension using different shifts + mixed := seed ^ (seed >> uint(16+i*8)) + // Map to [-1, 1] + val := float64(int64(mixed)) / float64(math.MaxInt64) + vec[i] = float32(val) + norm += val * val + } + // Normalize to unit vector for cosine distance consistency + norm = math.Sqrt(norm) + if norm > 0 { + for i := range vec { + vec[i] = float32(float64(vec[i]) / norm) + } + } + return vec +} diff --git a/execution/searchtesting/pgvector_test.go b/execution/searchtesting/pgvector_test.go new file mode 100644 index 0000000000..3b52ae6d85 --- /dev/null +++ b/execution/searchtesting/pgvector_test.go @@ -0,0 +1,162 @@ +//go:build integration + +package searchtesting + +import ( + "context" + "database/sql" + "fmt" + "testing" + "time" + + _ "github.com/lib/pq" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/pgvector" +) + +const pgvectorConfigSDL = ` +extend schema @index(name: "products", backend: "pgvector", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func startPgvector(t *testing.T) *sql.DB { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "pgvector/pgvector:pg16", + ExposedPorts: []string{"5432/tcp"}, + Env: map[string]string{ + "POSTGRES_USER": "test", + "POSTGRES_PASSWORD": "test", + "POSTGRES_DB": "testdb", + }, + WaitingFor: wait.ForListeningPort("5432/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start pgvector container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "5432/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + dsn := fmt.Sprintf("postgres://test:test@%s:%s/testdb?sslmode=disable", host, port.Port()) + + db, err := sql.Open("postgres", dsn) + if err != nil { + t.Fatalf("failed to open database: %v", err) + } + t.Cleanup(func() { db.Close() }) + + // Wait for database to be ready. + for i := 0; i < 30; i++ { + if err := db.PingContext(ctx); err == nil { + return db + } + time.Sleep(500 * time.Millisecond) + } + t.Fatal("database did not become ready after 30 ping attempts") + return nil +} + +func TestPgvector(t *testing.T) { + t.Parallel() + db := startPgvector(t) + + makeSetup := func(name, configSDL string) BackendSetup { + return BackendSetup{ + Name: name, + ConfigSDL: configSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := pgvector.NewFactory(db) + idx, err := factory.CreateIndex(context.Background(), name, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + } + } + + // Pgvector subtests run sequentially because CREATE EXTENSION races on the same DB. + t.Run("standard", func(t *testing.T) { + setup := makeSetup("pgvector", pgvectorConfigSDL) + setup.ExpectedResponses = map[string]string{ + "supergraph_sdl": expectedSupergraphSDL, + "basic_search_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_keyword_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","rating":4.7}},{"node":{"id":"1","name":"Running Shoes","rating":4.5}},{"node":{"id":"2","name":"Basketball Shoes","rating":4.2}}]}}}`, + "filter_boolean": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_numeric_range": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}}],"totalCount":2}}}`, + "filter_AND": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + "filter_OR": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_NOT": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "sort_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}]}}}`, + "pagination_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","reviews":[{"text":"Nice belt","stars":3}]}},{"node":{"id":"1","reviews":[{"text":"Great shoes","stars":5}]}}],"totalCount":4}}}`, + "score_and_totalCount": `{"data":{"searchProducts":{"hits":[{"score":0,"node":{"id":"4","manufacturer":"Smartwool"}},{"score":0,"node":{"id":"3","manufacturer":"Gucci"}},{"score":0,"node":{"id":"1","manufacturer":"Nike"}},{"score":0,"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + "facets_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"facets":[{"field":"category","values":[{"value":"Footwear","count":3},{"value":"Accessories","count":1}]}]}}}`, + } + RunAllScenarios(t, setup) + }) + + t.Run("vector", func(t *testing.T) { + RunVectorScenarios(t, VectorBackendSetup{ + BackendSetup: makeSetup("pgvector_vector", vectorConfigSDL("pgvector", "{}")), + Embedder: &MockEmbedder{}, + }) + }) + + t.Run("suggest", func(t *testing.T) { + RunSuggestScenarios(t, makeSetup("pgvector_suggest", suggestConfigSDL("pgvector", "{}"))) + }) + + t.Run("date", func(t *testing.T) { + setup := makeSetup("pgvector_date", dateConfigSDL("pgvector", "{}")) + setup.ExpectedResponses = map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + } + RunDateScenarios(t, setup) + }) +} diff --git a/execution/searchtesting/productdetails/gqlgen.yml b/execution/searchtesting/productdetails/gqlgen.yml new file mode 100644 index 0000000000..333e5f22f7 --- /dev/null +++ b/execution/searchtesting/productdetails/gqlgen.yml @@ -0,0 +1,32 @@ +schema: + - graph/*.graphqls + +exec: + filename: graph/generated/generated.go + package: generated + +federation: + filename: graph/generated/federation.go + package: generated + +model: + filename: graph/model/models_gen.go + package: model + +resolver: + layout: follow-schema + dir: graph + package: graph + +models: + ID: + model: + - github.com/99designs/gqlgen/graphql.ID + - github.com/99designs/gqlgen/graphql.Int + - github.com/99designs/gqlgen/graphql.Int64 + - github.com/99designs/gqlgen/graphql.Int32 + Int: + model: + - github.com/99designs/gqlgen/graphql.Int + - github.com/99designs/gqlgen/graphql.Int64 + - github.com/99designs/gqlgen/graphql.Int32 diff --git a/execution/searchtesting/productdetails/graph/data.go b/execution/searchtesting/productdetails/graph/data.go new file mode 100644 index 0000000000..9ca373786e --- /dev/null +++ b/execution/searchtesting/productdetails/graph/data.go @@ -0,0 +1,45 @@ +package graph + +import ( + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/productdetails/graph/model" + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/shareddata" +) + +var productDetails map[string]*model.Product + +func init() { + productDetails = make(map[string]*model.Product) + for _, p := range shareddata.Products() { + p := p + reviews := make([]*model.Review, len(p.Reviews)) + for i, r := range p.Reviews { + reviews[i] = &model.Review{Text: r.Text, Stars: r.Stars} + } + name := p.Name + desc := p.Description + cat := p.Category + price := p.Price + inStock := p.InStock + rating := p.Rating + mfr := p.Manufacturer + productDetails[p.ID] = &model.Product{ + ID: p.ID, + Name: &name, + Description: &desc, + Category: &cat, + Price: &price, + InStock: &inStock, + Reviews: reviews, + Rating: &rating, + Manufacturer: &mfr, + } + } +} + +func LookupProduct(id string) *model.Product { + p, ok := productDetails[id] + if !ok { + return nil + } + return p +} diff --git a/execution/searchtesting/productdetails/graph/entity.resolvers.go b/execution/searchtesting/productdetails/graph/entity.resolvers.go new file mode 100644 index 0000000000..486233ad2c --- /dev/null +++ b/execution/searchtesting/productdetails/graph/entity.resolvers.go @@ -0,0 +1,22 @@ +package graph + +// This file will be automatically regenerated based on the schema, any resolver implementations +// will be copied through when generating and any unknown code will be moved to the end. +// Code generated by github.com/99designs/gqlgen version v0.17.76 + +import ( + "context" + + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/productdetails/graph/generated" + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/productdetails/graph/model" +) + +// FindProductByID is the resolver for the findProductByID field. +func (r *entityResolver) FindProductByID(ctx context.Context, id string) (*model.Product, error) { + return LookupProduct(id), nil +} + +// Entity returns generated.EntityResolver implementation. +func (r *Resolver) Entity() generated.EntityResolver { return &entityResolver{r} } + +type entityResolver struct{ *Resolver } diff --git a/execution/searchtesting/productdetails/graph/generated/federation.go b/execution/searchtesting/productdetails/graph/generated/federation.go new file mode 100644 index 0000000000..fb6884ef7e --- /dev/null +++ b/execution/searchtesting/productdetails/graph/generated/federation.go @@ -0,0 +1,234 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package generated + +import ( + "context" + "errors" + "fmt" + "strings" + "sync" + + "github.com/99designs/gqlgen/plugin/federation/fedruntime" +) + +var ( + ErrUnknownType = errors.New("unknown type") + ErrTypeNotFound = errors.New("type not found") +) + +func (ec *executionContext) __resolve__service(ctx context.Context) (fedruntime.Service, error) { + if ec.DisableIntrospection { + return fedruntime.Service{}, errors.New("federated introspection disabled") + } + + var sdl []string + + for _, src := range sources { + if src.BuiltIn { + continue + } + sdl = append(sdl, src.Input) + } + + return fedruntime.Service{ + SDL: strings.Join(sdl, "\n"), + }, nil +} + +func (ec *executionContext) __resolve_entities(ctx context.Context, representations []map[string]any) []fedruntime.Entity { + list := make([]fedruntime.Entity, len(representations)) + + repsMap := ec.buildRepresentationGroups(ctx, representations) + + switch len(repsMap) { + case 0: + return list + case 1: + for typeName, reps := range repsMap { + ec.resolveEntityGroup(ctx, typeName, reps, list) + } + return list + default: + var g sync.WaitGroup + g.Add(len(repsMap)) + for typeName, reps := range repsMap { + go func(typeName string, reps []EntityWithIndex) { + ec.resolveEntityGroup(ctx, typeName, reps, list) + g.Done() + }(typeName, reps) + } + g.Wait() + return list + } +} + +type EntityWithIndex struct { + // The index in the original representation array + index int + entity EntityRepresentation +} + +// EntityRepresentation is the JSON representation of an entity sent by the Router +// used as the inputs for us to resolve. +// +// We make it a map because we know the top level JSON is always an object. +type EntityRepresentation map[string]any + +// We group entities by typename so that we can parallelize their resolution. +// This is particularly helpful when there are entity groups in multi mode. +func (ec *executionContext) buildRepresentationGroups( + ctx context.Context, + representations []map[string]any, +) map[string][]EntityWithIndex { + repsMap := make(map[string][]EntityWithIndex) + for i, rep := range representations { + typeName, ok := rep["__typename"].(string) + if !ok { + // If there is no __typename, we just skip the representation; + // we just won't be resolving these unknown types. + ec.Error(ctx, errors.New("__typename must be an existing string")) + continue + } + + repsMap[typeName] = append(repsMap[typeName], EntityWithIndex{ + index: i, + entity: rep, + }) + } + + return repsMap +} + +func (ec *executionContext) resolveEntityGroup( + ctx context.Context, + typeName string, + reps []EntityWithIndex, + list []fedruntime.Entity, +) { + if isMulti(typeName) { + err := ec.resolveManyEntities(ctx, typeName, reps, list) + if err != nil { + ec.Error(ctx, err) + } + } else { + // if there are multiple entities to resolve, parallelize (similar to + // graphql.FieldSet.Dispatch) + var e sync.WaitGroup + e.Add(len(reps)) + for i, rep := range reps { + i, rep := i, rep + go func(i int, rep EntityWithIndex) { + entity, err := ec.resolveEntity(ctx, typeName, rep.entity) + if err != nil { + ec.Error(ctx, err) + } else { + list[rep.index] = entity + } + e.Done() + }(i, rep) + } + e.Wait() + } +} + +func isMulti(typeName string) bool { + switch typeName { + default: + return false + } +} + +func (ec *executionContext) resolveEntity( + ctx context.Context, + typeName string, + rep EntityRepresentation, +) (e fedruntime.Entity, err error) { + // we need to do our own panic handling, because we may be called in a + // goroutine, where the usual panic handling can't catch us + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + } + }() + + switch typeName { + case "Product": + resolverName, err := entityResolverNameForProduct(ctx, rep) + if err != nil { + return nil, fmt.Errorf(`finding resolver for Entity "Product": %w`, err) + } + switch resolverName { + + case "findProductByID": + id0, err := ec.unmarshalNID2string(ctx, rep["id"]) + if err != nil { + return nil, fmt.Errorf(`unmarshalling param 0 for findProductByID(): %w`, err) + } + entity, err := ec.resolvers.Entity().FindProductByID(ctx, id0) + if err != nil { + return nil, fmt.Errorf(`resolving Entity "Product": %w`, err) + } + + return entity, nil + } + + } + return nil, fmt.Errorf("%w: %s", ErrUnknownType, typeName) +} + +func (ec *executionContext) resolveManyEntities( + ctx context.Context, + typeName string, + reps []EntityWithIndex, + list []fedruntime.Entity, +) (err error) { + // we need to do our own panic handling, because we may be called in a + // goroutine, where the usual panic handling can't catch us + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + } + }() + + switch typeName { + + default: + return errors.New("unknown type: " + typeName) + } +} + +func entityResolverNameForProduct(ctx context.Context, rep EntityRepresentation) (string, error) { + // we collect errors because a later entity resolver may work fine + // when an entity has multiple keys + entityResolverErrs := []error{} + for { + var ( + m EntityRepresentation + val any + ok bool + ) + _ = val + // if all of the KeyFields values for this resolver are null, + // we shouldn't use use it + allNull := true + m = rep + val, ok = m["id"] + if !ok { + entityResolverErrs = append(entityResolverErrs, + fmt.Errorf("%w due to missing Key Field \"id\" for Product", ErrTypeNotFound)) + break + } + if allNull { + allNull = val == nil + } + if allNull { + entityResolverErrs = append(entityResolverErrs, + fmt.Errorf("%w due to all null value KeyFields for Product", ErrTypeNotFound)) + break + } + return "findProductByID", nil + } + return "", fmt.Errorf("%w for Product due to %v", ErrTypeNotFound, + errors.Join(entityResolverErrs...).Error()) +} diff --git a/execution/searchtesting/productdetails/graph/generated/generated.go b/execution/searchtesting/productdetails/graph/generated/generated.go new file mode 100644 index 0000000000..ee33c22ded --- /dev/null +++ b/execution/searchtesting/productdetails/graph/generated/generated.go @@ -0,0 +1,4768 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package generated + +import ( + "bytes" + "context" + "errors" + "fmt" + "strconv" + "sync" + "sync/atomic" + + "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql/introspection" + "github.com/99designs/gqlgen/plugin/federation/fedruntime" + gqlparser "github.com/vektah/gqlparser/v2" + "github.com/vektah/gqlparser/v2/ast" + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/productdetails/graph/model" +) + +// region ************************** generated!.gotpl ************************** + +// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface. +func NewExecutableSchema(cfg Config) graphql.ExecutableSchema { + return &executableSchema{ + schema: cfg.Schema, + resolvers: cfg.Resolvers, + directives: cfg.Directives, + complexity: cfg.Complexity, + } +} + +type Config struct { + Schema *ast.Schema + Resolvers ResolverRoot + Directives DirectiveRoot + Complexity ComplexityRoot +} + +type ResolverRoot interface { + Entity() EntityResolver +} + +type DirectiveRoot struct { +} + +type ComplexityRoot struct { + Entity struct { + FindProductByID func(childComplexity int, id string) int + } + + Product struct { + Category func(childComplexity int) int + Description func(childComplexity int) int + ID func(childComplexity int) int + InStock func(childComplexity int) int + Manufacturer func(childComplexity int) int + Name func(childComplexity int) int + Price func(childComplexity int) int + Rating func(childComplexity int) int + Reviews func(childComplexity int) int + } + + Query struct { + __resolve__service func(childComplexity int) int + __resolve_entities func(childComplexity int, representations []map[string]any) int + } + + Review struct { + Stars func(childComplexity int) int + Text func(childComplexity int) int + } + + _Service struct { + SDL func(childComplexity int) int + } +} + +type EntityResolver interface { + FindProductByID(ctx context.Context, id string) (*model.Product, error) +} + +type executableSchema struct { + schema *ast.Schema + resolvers ResolverRoot + directives DirectiveRoot + complexity ComplexityRoot +} + +func (e *executableSchema) Schema() *ast.Schema { + if e.schema != nil { + return e.schema + } + return parsedSchema +} + +func (e *executableSchema) Complexity(ctx context.Context, typeName, field string, childComplexity int, rawArgs map[string]any) (int, bool) { + ec := executionContext{nil, e, 0, 0, nil} + _ = ec + switch typeName + "." + field { + + case "Entity.findProductByID": + if e.complexity.Entity.FindProductByID == nil { + break + } + + args, err := ec.field_Entity_findProductByID_args(ctx, rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Entity.FindProductByID(childComplexity, args["id"].(string)), true + + case "Product.category": + if e.complexity.Product.Category == nil { + break + } + + return e.complexity.Product.Category(childComplexity), true + + case "Product.description": + if e.complexity.Product.Description == nil { + break + } + + return e.complexity.Product.Description(childComplexity), true + + case "Product.id": + if e.complexity.Product.ID == nil { + break + } + + return e.complexity.Product.ID(childComplexity), true + + case "Product.inStock": + if e.complexity.Product.InStock == nil { + break + } + + return e.complexity.Product.InStock(childComplexity), true + + case "Product.manufacturer": + if e.complexity.Product.Manufacturer == nil { + break + } + + return e.complexity.Product.Manufacturer(childComplexity), true + + case "Product.name": + if e.complexity.Product.Name == nil { + break + } + + return e.complexity.Product.Name(childComplexity), true + + case "Product.price": + if e.complexity.Product.Price == nil { + break + } + + return e.complexity.Product.Price(childComplexity), true + + case "Product.rating": + if e.complexity.Product.Rating == nil { + break + } + + return e.complexity.Product.Rating(childComplexity), true + + case "Product.reviews": + if e.complexity.Product.Reviews == nil { + break + } + + return e.complexity.Product.Reviews(childComplexity), true + + case "Query._service": + if e.complexity.Query.__resolve__service == nil { + break + } + + return e.complexity.Query.__resolve__service(childComplexity), true + + case "Query._entities": + if e.complexity.Query.__resolve_entities == nil { + break + } + + args, err := ec.field_Query__entities_args(ctx, rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.__resolve_entities(childComplexity, args["representations"].([]map[string]any)), true + + case "Review.stars": + if e.complexity.Review.Stars == nil { + break + } + + return e.complexity.Review.Stars(childComplexity), true + + case "Review.text": + if e.complexity.Review.Text == nil { + break + } + + return e.complexity.Review.Text(childComplexity), true + + case "_Service.sdl": + if e.complexity._Service.SDL == nil { + break + } + + return e.complexity._Service.SDL(childComplexity), true + + } + return 0, false +} + +func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { + opCtx := graphql.GetOperationContext(ctx) + ec := executionContext{opCtx, e, 0, 0, make(chan graphql.DeferredResult)} + inputUnmarshalMap := graphql.BuildUnmarshalerMap() + first := true + + switch opCtx.Operation.Operation { + case ast.Query: + return func(ctx context.Context) *graphql.Response { + var response graphql.Response + var data graphql.Marshaler + if first { + first = false + ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap) + data = ec._Query(ctx, opCtx.Operation.SelectionSet) + } else { + if atomic.LoadInt32(&ec.pendingDeferred) > 0 { + result := <-ec.deferredResults + atomic.AddInt32(&ec.pendingDeferred, -1) + data = result.Result + response.Path = result.Path + response.Label = result.Label + response.Errors = result.Errors + } else { + return nil + } + } + var buf bytes.Buffer + data.MarshalGQL(&buf) + response.Data = buf.Bytes() + if atomic.LoadInt32(&ec.deferred) > 0 { + hasNext := atomic.LoadInt32(&ec.pendingDeferred) > 0 + response.HasNext = &hasNext + } + + return &response + } + + default: + return graphql.OneShot(graphql.ErrorResponse(ctx, "unsupported GraphQL operation")) + } +} + +type executionContext struct { + *graphql.OperationContext + *executableSchema + deferred int32 + pendingDeferred int32 + deferredResults chan graphql.DeferredResult +} + +func (ec *executionContext) processDeferredGroup(dg graphql.DeferredGroup) { + atomic.AddInt32(&ec.pendingDeferred, 1) + go func() { + ctx := graphql.WithFreshResponseContext(dg.Context) + dg.FieldSet.Dispatch(ctx) + ds := graphql.DeferredResult{ + Path: dg.Path, + Label: dg.Label, + Result: dg.FieldSet, + Errors: graphql.GetErrors(ctx), + } + // null fields should bubble up + if dg.FieldSet.Invalids > 0 { + ds.Result = graphql.Null + } + ec.deferredResults <- ds + }() +} + +func (ec *executionContext) introspectSchema() (*introspection.Schema, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapSchema(ec.Schema()), nil +} + +func (ec *executionContext) introspectType(name string) (*introspection.Type, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapTypeFromDef(ec.Schema(), ec.Schema().Types[name]), nil +} + +var sources = []*ast.Source{ + {Name: "../schema.graphqls", Input: `type Product @key(fields: "id") { + id: ID! + name: String + description: String + category: String + price: Float + inStock: Boolean + reviews: [Review!]! + rating: Float + manufacturer: String +} + +type Review { + text: String! + stars: Int! +} +`, BuiltIn: false}, + {Name: "../../federation/directives.graphql", Input: ` + directive @key(fields: _FieldSet!) repeatable on OBJECT | INTERFACE + directive @requires(fields: _FieldSet!) on FIELD_DEFINITION + directive @provides(fields: _FieldSet!) on FIELD_DEFINITION + directive @extends on OBJECT | INTERFACE + directive @external on FIELD_DEFINITION + scalar _Any + scalar _FieldSet +`, BuiltIn: true}, + {Name: "../../federation/entity.graphql", Input: ` +# a union of all types that use the @key directive +union _Entity = Product + +# fake type to build resolver interfaces for users to implement +type Entity { + findProductByID(id: ID!,): Product! +} + +type _Service { + sdl: String +} + +extend type Query { + _entities(representations: [_Any!]!): [_Entity]! + _service: _Service! +} +`, BuiltIn: true}, +} +var parsedSchema = gqlparser.MustLoadSchema(sources...) + +// endregion ************************** generated!.gotpl ************************** + +// region ***************************** args.gotpl ***************************** + +func (ec *executionContext) field_Entity_findProductByID_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { + var err error + args := map[string]any{} + arg0, err := ec.field_Entity_findProductByID_argsID(ctx, rawArgs) + if err != nil { + return nil, err + } + args["id"] = arg0 + return args, nil +} +func (ec *executionContext) field_Entity_findProductByID_argsID( + ctx context.Context, + rawArgs map[string]any, +) (string, error) { + if _, ok := rawArgs["id"]; !ok { + var zeroVal string + return zeroVal, nil + } + + ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + if tmp, ok := rawArgs["id"]; ok { + return ec.unmarshalNID2string(ctx, tmp) + } + + var zeroVal string + return zeroVal, nil +} + +func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { + var err error + args := map[string]any{} + arg0, err := ec.field_Query___type_argsName(ctx, rawArgs) + if err != nil { + return nil, err + } + args["name"] = arg0 + return args, nil +} +func (ec *executionContext) field_Query___type_argsName( + ctx context.Context, + rawArgs map[string]any, +) (string, error) { + if _, ok := rawArgs["name"]; !ok { + var zeroVal string + return zeroVal, nil + } + + ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + if tmp, ok := rawArgs["name"]; ok { + return ec.unmarshalNString2string(ctx, tmp) + } + + var zeroVal string + return zeroVal, nil +} + +func (ec *executionContext) field_Query__entities_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { + var err error + args := map[string]any{} + arg0, err := ec.field_Query__entities_argsRepresentations(ctx, rawArgs) + if err != nil { + return nil, err + } + args["representations"] = arg0 + return args, nil +} +func (ec *executionContext) field_Query__entities_argsRepresentations( + ctx context.Context, + rawArgs map[string]any, +) ([]map[string]any, error) { + if _, ok := rawArgs["representations"]; !ok { + var zeroVal []map[string]any + return zeroVal, nil + } + + ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("representations")) + if tmp, ok := rawArgs["representations"]; ok { + return ec.unmarshalN_Any2ᚕmapᚄ(ctx, tmp) + } + + var zeroVal []map[string]any + return zeroVal, nil +} + +func (ec *executionContext) field___Directive_args_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { + var err error + args := map[string]any{} + arg0, err := ec.field___Directive_args_argsIncludeDeprecated(ctx, rawArgs) + if err != nil { + return nil, err + } + args["includeDeprecated"] = arg0 + return args, nil +} +func (ec *executionContext) field___Directive_args_argsIncludeDeprecated( + ctx context.Context, + rawArgs map[string]any, +) (*bool, error) { + if _, ok := rawArgs["includeDeprecated"]; !ok { + var zeroVal *bool + return zeroVal, nil + } + + ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + if tmp, ok := rawArgs["includeDeprecated"]; ok { + return ec.unmarshalOBoolean2ᚖbool(ctx, tmp) + } + + var zeroVal *bool + return zeroVal, nil +} + +func (ec *executionContext) field___Field_args_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { + var err error + args := map[string]any{} + arg0, err := ec.field___Field_args_argsIncludeDeprecated(ctx, rawArgs) + if err != nil { + return nil, err + } + args["includeDeprecated"] = arg0 + return args, nil +} +func (ec *executionContext) field___Field_args_argsIncludeDeprecated( + ctx context.Context, + rawArgs map[string]any, +) (*bool, error) { + if _, ok := rawArgs["includeDeprecated"]; !ok { + var zeroVal *bool + return zeroVal, nil + } + + ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + if tmp, ok := rawArgs["includeDeprecated"]; ok { + return ec.unmarshalOBoolean2ᚖbool(ctx, tmp) + } + + var zeroVal *bool + return zeroVal, nil +} + +func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { + var err error + args := map[string]any{} + arg0, err := ec.field___Type_enumValues_argsIncludeDeprecated(ctx, rawArgs) + if err != nil { + return nil, err + } + args["includeDeprecated"] = arg0 + return args, nil +} +func (ec *executionContext) field___Type_enumValues_argsIncludeDeprecated( + ctx context.Context, + rawArgs map[string]any, +) (bool, error) { + if _, ok := rawArgs["includeDeprecated"]; !ok { + var zeroVal bool + return zeroVal, nil + } + + ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + if tmp, ok := rawArgs["includeDeprecated"]; ok { + return ec.unmarshalOBoolean2bool(ctx, tmp) + } + + var zeroVal bool + return zeroVal, nil +} + +func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArgs map[string]any) (map[string]any, error) { + var err error + args := map[string]any{} + arg0, err := ec.field___Type_fields_argsIncludeDeprecated(ctx, rawArgs) + if err != nil { + return nil, err + } + args["includeDeprecated"] = arg0 + return args, nil +} +func (ec *executionContext) field___Type_fields_argsIncludeDeprecated( + ctx context.Context, + rawArgs map[string]any, +) (bool, error) { + if _, ok := rawArgs["includeDeprecated"]; !ok { + var zeroVal bool + return zeroVal, nil + } + + ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + if tmp, ok := rawArgs["includeDeprecated"]; ok { + return ec.unmarshalOBoolean2bool(ctx, tmp) + } + + var zeroVal bool + return zeroVal, nil +} + +// endregion ***************************** args.gotpl ***************************** + +// region ************************** directives.gotpl ************************** + +// endregion ************************** directives.gotpl ************************** + +// region **************************** field.gotpl ***************************** + +func (ec *executionContext) _Entity_findProductByID(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Entity_findProductByID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Entity().FindProductByID(rctx, fc.Args["id"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*model.Product) + fc.Result = res + return ec.marshalNProduct2ᚖgithubᚗcomᚋwundergraphᚋgraphqlᚑgoᚑtoolsᚋexecutionᚋsearchtestingᚋproductdetailsᚋgraphᚋmodelᚐProduct(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Entity_findProductByID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Entity", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Product_id(ctx, field) + case "name": + return ec.fieldContext_Product_name(ctx, field) + case "description": + return ec.fieldContext_Product_description(ctx, field) + case "category": + return ec.fieldContext_Product_category(ctx, field) + case "price": + return ec.fieldContext_Product_price(ctx, field) + case "inStock": + return ec.fieldContext_Product_inStock(ctx, field) + case "reviews": + return ec.fieldContext_Product_reviews(ctx, field) + case "rating": + return ec.fieldContext_Product_rating(ctx, field) + case "manufacturer": + return ec.fieldContext_Product_manufacturer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Product", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Entity_findProductByID_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) _Product_id(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNID2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_id(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_name(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_description(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_category(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_category(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Category, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_category(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_price(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_price(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Price, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_price(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_inStock(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_inStock(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.InStock, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2ᚖbool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_inStock(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_reviews(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_reviews(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Reviews, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*model.Review) + fc.Result = res + return ec.marshalNReview2ᚕᚖgithubᚗcomᚋwundergraphᚋgraphqlᚑgoᚑtoolsᚋexecutionᚋsearchtestingᚋproductdetailsᚋgraphᚋmodelᚐReviewᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_reviews(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "text": + return ec.fieldContext_Review_text(ctx, field) + case "stars": + return ec.fieldContext_Review_stars(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Review", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_rating(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_rating(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Rating, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_rating(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Product_manufacturer(ctx context.Context, field graphql.CollectedField, obj *model.Product) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Product_manufacturer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Manufacturer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Product_manufacturer(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Product", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Query__entities(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query__entities(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return ec.__resolve_entities(ctx, fc.Args["representations"].([]map[string]any)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]fedruntime.Entity) + fc.Result = res + return ec.marshalN_Entity2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋpluginᚋfederationᚋfedruntimeᚐEntity(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query__entities(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type _Entity does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query__entities_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) _Query__service(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query__service(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return ec.__resolve__service(ctx) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(fedruntime.Service) + fc.Result = res + return ec.marshalN_Service2githubᚗcomᚋ99designsᚋgqlgenᚋpluginᚋfederationᚋfedruntimeᚐService(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query__service(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sdl": + return ec.fieldContext__Service_sdl(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type _Service", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectType(fc.Args["name"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query___type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query___type_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectSchema() + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Schema) + fc.Result = res + return ec.marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query___schema(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "description": + return ec.fieldContext___Schema_description(ctx, field) + case "types": + return ec.fieldContext___Schema_types(ctx, field) + case "queryType": + return ec.fieldContext___Schema_queryType(ctx, field) + case "mutationType": + return ec.fieldContext___Schema_mutationType(ctx, field) + case "subscriptionType": + return ec.fieldContext___Schema_subscriptionType(ctx, field) + case "directives": + return ec.fieldContext___Schema_directives(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Schema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Review_text(ctx context.Context, field graphql.CollectedField, obj *model.Review) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Review_text(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Text, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Review_text(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Review", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Review_stars(ctx context.Context, field graphql.CollectedField, obj *model.Review) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Review_stars(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Stars, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Review_stars(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Review", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) __Service_sdl(ctx context.Context, field graphql.CollectedField, obj *fedruntime.Service) (ret graphql.Marshaler) { + fc, err := ec.fieldContext__Service_sdl(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.SDL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext__Service_sdl(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "_Service", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_isRepeatable(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsRepeatable, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_isRepeatable(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_locations(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Locations, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalN__DirectiveLocation2ᚕstringᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_locations(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __DirectiveLocation does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_args(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_args(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + case "isDeprecated": + return ec.fieldContext___InputValue_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___InputValue_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Directive_args_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_isDeprecated(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_deprecationReason(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_args(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_args(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + case "isDeprecated": + return ec.fieldContext___InputValue_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___InputValue_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Field_args_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_type(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_isDeprecated(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_deprecationReason(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_type(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_defaultValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.DefaultValue, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_defaultValue(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_isDeprecated(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_deprecationReason(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_types(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Types(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_types(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_queryType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.QueryType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_queryType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_mutationType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.MutationType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_mutationType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_subscriptionType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.SubscriptionType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_subscriptionType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_directives(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Directives(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Directive) + fc.Result = res + return ec.marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirectiveᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_directives(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Directive_name(ctx, field) + case "description": + return ec.fieldContext___Directive_description(ctx, field) + case "isRepeatable": + return ec.fieldContext___Directive_isRepeatable(ctx, field) + case "locations": + return ec.fieldContext___Directive_locations(ctx, field) + case "args": + return ec.fieldContext___Directive_args(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Directive", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_kind(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Kind(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalN__TypeKind2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_kind(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __TypeKind does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_specifiedByURL(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_specifiedByURL(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.SpecifiedByURL(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_specifiedByURL(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields(fc.Args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Field) + fc.Result = res + return ec.marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐFieldᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Field_name(ctx, field) + case "description": + return ec.fieldContext___Field_description(ctx, field) + case "args": + return ec.fieldContext___Field_args(ctx, field) + case "type": + return ec.fieldContext___Field_type(ctx, field) + case "isDeprecated": + return ec.fieldContext___Field_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___Field_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Field", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_fields_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_interfaces(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.Interfaces(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_interfaces(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_possibleTypes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.PossibleTypes(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_possibleTypes(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_enumValues(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.EnumValues(fc.Args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.EnumValue) + fc.Result = res + return ec.marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_enumValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___EnumValue_name(ctx, field) + case "description": + return ec.fieldContext___EnumValue_description(ctx, field) + case "isDeprecated": + return ec.fieldContext___EnumValue_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___EnumValue_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __EnumValue", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_enumValues_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_inputFields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.InputFields(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_inputFields(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + case "isDeprecated": + return ec.fieldContext___InputValue_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___InputValue_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_ofType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.OfType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_ofType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "isOneOf": + return ec.fieldContext___Type_isOneOf(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_isOneOf(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_isOneOf(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsOneOf(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalOBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_isOneOf(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +// endregion **************************** field.gotpl ***************************** + +// region **************************** input.gotpl ***************************** + +// endregion **************************** input.gotpl ***************************** + +// region ************************** interface.gotpl *************************** + +func (ec *executionContext) __Entity(ctx context.Context, sel ast.SelectionSet, obj fedruntime.Entity) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case model.Product: + return ec._Product(ctx, sel, &obj) + case *model.Product: + if obj == nil { + return graphql.Null + } + return ec._Product(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +// endregion ************************** interface.gotpl *************************** + +// region **************************** object.gotpl **************************** + +var entityImplementors = []string{"Entity"} + +func (ec *executionContext) _Entity(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, entityImplementors) + ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ + Object: "Entity", + }) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Entity") + case "findProductByID": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Entity_findProductByID(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&fs.Invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var productImplementors = []string{"Product", "_Entity"} + +func (ec *executionContext) _Product(ctx context.Context, sel ast.SelectionSet, obj *model.Product) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, productImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Product") + case "id": + out.Values[i] = ec._Product_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "name": + out.Values[i] = ec._Product_name(ctx, field, obj) + case "description": + out.Values[i] = ec._Product_description(ctx, field, obj) + case "category": + out.Values[i] = ec._Product_category(ctx, field, obj) + case "price": + out.Values[i] = ec._Product_price(ctx, field, obj) + case "inStock": + out.Values[i] = ec._Product_inStock(ctx, field, obj) + case "reviews": + out.Values[i] = ec._Product_reviews(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "rating": + out.Values[i] = ec._Product_rating(ctx, field, obj) + case "manufacturer": + out.Values[i] = ec._Product_manufacturer(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var queryImplementors = []string{"Query"} + +func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, queryImplementors) + ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ + Object: "Query", + }) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Query") + case "_entities": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query__entities(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&fs.Invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "_service": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query__service(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&fs.Invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "__type": + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___type(ctx, field) + }) + case "__schema": + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___schema(ctx, field) + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var reviewImplementors = []string{"Review"} + +func (ec *executionContext) _Review(ctx context.Context, sel ast.SelectionSet, obj *model.Review) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, reviewImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Review") + case "text": + out.Values[i] = ec._Review_text(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "stars": + out.Values[i] = ec._Review_stars(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var _ServiceImplementors = []string{"_Service"} + +func (ec *executionContext) __Service(ctx context.Context, sel ast.SelectionSet, obj *fedruntime.Service) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, _ServiceImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("_Service") + case "sdl": + out.Values[i] = ec.__Service_sdl(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __DirectiveImplementors = []string{"__Directive"} + +func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __DirectiveImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Directive") + case "name": + out.Values[i] = ec.___Directive_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___Directive_description(ctx, field, obj) + case "isRepeatable": + out.Values[i] = ec.___Directive_isRepeatable(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "locations": + out.Values[i] = ec.___Directive_locations(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "args": + out.Values[i] = ec.___Directive_args(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __EnumValueImplementors = []string{"__EnumValue"} + +func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __EnumValueImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__EnumValue") + case "name": + out.Values[i] = ec.___EnumValue_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___EnumValue_description(ctx, field, obj) + case "isDeprecated": + out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "deprecationReason": + out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __FieldImplementors = []string{"__Field"} + +func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __FieldImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Field") + case "name": + out.Values[i] = ec.___Field_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___Field_description(ctx, field, obj) + case "args": + out.Values[i] = ec.___Field_args(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "type": + out.Values[i] = ec.___Field_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "isDeprecated": + out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "deprecationReason": + out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __InputValueImplementors = []string{"__InputValue"} + +func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __InputValueImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__InputValue") + case "name": + out.Values[i] = ec.___InputValue_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___InputValue_description(ctx, field, obj) + case "type": + out.Values[i] = ec.___InputValue_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "defaultValue": + out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj) + case "isDeprecated": + out.Values[i] = ec.___InputValue_isDeprecated(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "deprecationReason": + out.Values[i] = ec.___InputValue_deprecationReason(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __SchemaImplementors = []string{"__Schema"} + +func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __SchemaImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Schema") + case "description": + out.Values[i] = ec.___Schema_description(ctx, field, obj) + case "types": + out.Values[i] = ec.___Schema_types(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "queryType": + out.Values[i] = ec.___Schema_queryType(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "mutationType": + out.Values[i] = ec.___Schema_mutationType(ctx, field, obj) + case "subscriptionType": + out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj) + case "directives": + out.Values[i] = ec.___Schema_directives(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __TypeImplementors = []string{"__Type"} + +func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __TypeImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Type") + case "kind": + out.Values[i] = ec.___Type_kind(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "name": + out.Values[i] = ec.___Type_name(ctx, field, obj) + case "description": + out.Values[i] = ec.___Type_description(ctx, field, obj) + case "specifiedByURL": + out.Values[i] = ec.___Type_specifiedByURL(ctx, field, obj) + case "fields": + out.Values[i] = ec.___Type_fields(ctx, field, obj) + case "interfaces": + out.Values[i] = ec.___Type_interfaces(ctx, field, obj) + case "possibleTypes": + out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj) + case "enumValues": + out.Values[i] = ec.___Type_enumValues(ctx, field, obj) + case "inputFields": + out.Values[i] = ec.___Type_inputFields(ctx, field, obj) + case "ofType": + out.Values[i] = ec.___Type_ofType(ctx, field, obj) + case "isOneOf": + out.Values[i] = ec.___Type_isOneOf(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +// endregion **************************** object.gotpl **************************** + +// region ***************************** type.gotpl ***************************** + +func (ec *executionContext) unmarshalNBoolean2bool(ctx context.Context, v any) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + _ = sel + res := graphql.MarshalBoolean(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNID2string(ctx context.Context, v any) (string, error) { + res, err := graphql.UnmarshalID(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNID2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + _ = sel + res := graphql.MarshalID(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v any) (int, error) { + res, err := graphql.UnmarshalInt(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.SelectionSet, v int) graphql.Marshaler { + _ = sel + res := graphql.MarshalInt(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) marshalNProduct2githubᚗcomᚋwundergraphᚋgraphqlᚑgoᚑtoolsᚋexecutionᚋsearchtestingᚋproductdetailsᚋgraphᚋmodelᚐProduct(ctx context.Context, sel ast.SelectionSet, v model.Product) graphql.Marshaler { + return ec._Product(ctx, sel, &v) +} + +func (ec *executionContext) marshalNProduct2ᚖgithubᚗcomᚋwundergraphᚋgraphqlᚑgoᚑtoolsᚋexecutionᚋsearchtestingᚋproductdetailsᚋgraphᚋmodelᚐProduct(ctx context.Context, sel ast.SelectionSet, v *model.Product) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Product(ctx, sel, v) +} + +func (ec *executionContext) marshalNReview2ᚕᚖgithubᚗcomᚋwundergraphᚋgraphqlᚑgoᚑtoolsᚋexecutionᚋsearchtestingᚋproductdetailsᚋgraphᚋmodelᚐReviewᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.Review) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNReview2ᚖgithubᚗcomᚋwundergraphᚋgraphqlᚑgoᚑtoolsᚋexecutionᚋsearchtestingᚋproductdetailsᚋgraphᚋmodelᚐReview(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNReview2ᚖgithubᚗcomᚋwundergraphᚋgraphqlᚑgoᚑtoolsᚋexecutionᚋsearchtestingᚋproductdetailsᚋgraphᚋmodelᚐReview(ctx context.Context, sel ast.SelectionSet, v *model.Review) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Review(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNString2string(ctx context.Context, v any) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + _ = sel + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalN_Any2map(ctx context.Context, v any) (map[string]any, error) { + res, err := graphql.UnmarshalMap(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN_Any2map(ctx context.Context, sel ast.SelectionSet, v map[string]any) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + _ = sel + res := graphql.MarshalMap(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalN_Any2ᚕmapᚄ(ctx context.Context, v any) ([]map[string]any, error) { + var vSlice []any + vSlice = graphql.CoerceList(v) + var err error + res := make([]map[string]any, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalN_Any2map(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalN_Any2ᚕmapᚄ(ctx context.Context, sel ast.SelectionSet, v []map[string]any) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalN_Any2map(ctx, sel, v[i]) + } + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN_Entity2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋpluginᚋfederationᚋfedruntimeᚐEntity(ctx context.Context, sel ast.SelectionSet, v []fedruntime.Entity) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalO_Entity2githubᚗcomᚋ99designsᚋgqlgenᚋpluginᚋfederationᚋfedruntimeᚐEntity(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + return ret +} + +func (ec *executionContext) unmarshalN_FieldSet2string(ctx context.Context, v any) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN_FieldSet2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + _ = sel + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) marshalN_Service2githubᚗcomᚋ99designsᚋgqlgenᚋpluginᚋfederationᚋfedruntimeᚐService(ctx context.Context, sel ast.SelectionSet, v fedruntime.Service) graphql.Marshaler { + return ec.__Service(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler { + return ec.___Directive(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirectiveᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Directive) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2string(ctx context.Context, v any) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + _ = sel + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2ᚕstringᚄ(ctx context.Context, v any) ([]string, error) { + var vSlice []any + vSlice = graphql.CoerceList(v) + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalN__DirectiveLocation2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalN__DirectiveLocation2ᚕstringᚄ(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__DirectiveLocation2string(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx context.Context, sel ast.SelectionSet, v introspection.EnumValue) graphql.Marshaler { + return ec.___EnumValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx context.Context, sel ast.SelectionSet, v introspection.Field) graphql.Marshaler { + return ec.___Field(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx context.Context, sel ast.SelectionSet, v introspection.InputValue) graphql.Marshaler { + return ec.___InputValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v introspection.Type) graphql.Marshaler { + return ec.___Type(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +func (ec *executionContext) unmarshalN__TypeKind2string(ctx context.Context, v any) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + _ = sel + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v any) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + _ = sel + _ = ctx + res := graphql.MarshalBoolean(v) + return res +} + +func (ec *executionContext) unmarshalOBoolean2ᚖbool(ctx context.Context, v any) (*bool, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalBoolean(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2ᚖbool(ctx context.Context, sel ast.SelectionSet, v *bool) graphql.Marshaler { + if v == nil { + return graphql.Null + } + _ = sel + _ = ctx + res := graphql.MarshalBoolean(*v) + return res +} + +func (ec *executionContext) unmarshalOFloat2ᚖfloat64(ctx context.Context, v any) (*float64, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalFloatContext(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOFloat2ᚖfloat64(ctx context.Context, sel ast.SelectionSet, v *float64) graphql.Marshaler { + if v == nil { + return graphql.Null + } + _ = sel + res := graphql.MarshalFloatContext(*v) + return graphql.WrapContextMarshaler(ctx, res) +} + +func (ec *executionContext) unmarshalOString2string(ctx context.Context, v any) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + _ = sel + _ = ctx + res := graphql.MarshalString(v) + return res +} + +func (ec *executionContext) unmarshalOString2ᚖstring(ctx context.Context, v any) (*string, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalString(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOString2ᚖstring(ctx context.Context, sel ast.SelectionSet, v *string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + _ = sel + _ = ctx + res := graphql.MarshalString(*v) + return res +} + +func (ec *executionContext) marshalO_Entity2githubᚗcomᚋ99designsᚋgqlgenᚋpluginᚋfederationᚋfedruntimeᚐEntity(ctx context.Context, sel ast.SelectionSet, v fedruntime.Entity) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.__Entity(ctx, sel, v) +} + +func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐFieldᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Field) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx context.Context, sel ast.SelectionSet, v *introspection.Schema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Schema(ctx, sel, v) +} + +func (ec *executionContext) marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +// endregion ***************************** type.gotpl ***************************** diff --git a/execution/searchtesting/productdetails/graph/handler.go b/execution/searchtesting/productdetails/graph/handler.go new file mode 100644 index 0000000000..e0c2d8a0c0 --- /dev/null +++ b/execution/searchtesting/productdetails/graph/handler.go @@ -0,0 +1,18 @@ +package graph + +import ( + "net/http" + + "github.com/99designs/gqlgen/graphql/handler" + "github.com/99designs/gqlgen/graphql/handler/extension" + "github.com/99designs/gqlgen/graphql/handler/transport" + + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/productdetails/graph/generated" +) + +func GraphQLEndpointHandler() http.Handler { + srv := handler.New(generated.NewExecutableSchema(generated.Config{Resolvers: &Resolver{}})) + srv.AddTransport(transport.POST{}) + srv.Use(extension.Introspection{}) + return srv +} diff --git a/execution/searchtesting/productdetails/graph/model/models_gen.go b/execution/searchtesting/productdetails/graph/model/models_gen.go new file mode 100644 index 0000000000..a566c5b884 --- /dev/null +++ b/execution/searchtesting/productdetails/graph/model/models_gen.go @@ -0,0 +1,25 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package model + +type Product struct { + ID string `json:"id"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + Category *string `json:"category,omitempty"` + Price *float64 `json:"price,omitempty"` + InStock *bool `json:"inStock,omitempty"` + Reviews []*Review `json:"reviews"` + Rating *float64 `json:"rating,omitempty"` + Manufacturer *string `json:"manufacturer,omitempty"` +} + +func (Product) IsEntity() {} + +type Query struct { +} + +type Review struct { + Text string `json:"text"` + Stars int `json:"stars"` +} diff --git a/execution/searchtesting/productdetails/graph/resolver.go b/execution/searchtesting/productdetails/graph/resolver.go new file mode 100644 index 0000000000..7b272f364d --- /dev/null +++ b/execution/searchtesting/productdetails/graph/resolver.go @@ -0,0 +1,3 @@ +package graph + +type Resolver struct{} diff --git a/execution/searchtesting/productdetails/graph/schema.graphqls b/execution/searchtesting/productdetails/graph/schema.graphqls new file mode 100644 index 0000000000..80d33cb30d --- /dev/null +++ b/execution/searchtesting/productdetails/graph/schema.graphqls @@ -0,0 +1,16 @@ +type Product @key(fields: "id") { + id: ID! + name: String + description: String + category: String + price: Float + inStock: Boolean + reviews: [Review!]! + rating: Float + manufacturer: String +} + +type Review { + text: String! + stars: Int! +} diff --git a/execution/searchtesting/productdetails/handler.go b/execution/searchtesting/productdetails/handler.go new file mode 100644 index 0000000000..7890111c61 --- /dev/null +++ b/execution/searchtesting/productdetails/handler.go @@ -0,0 +1,12 @@ +//go:generate go run github.com/99designs/gqlgen generate +package productdetails + +import ( + "net/http" + + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/productdetails/graph" +) + +func Handler() http.Handler { + return graph.GraphQLEndpointHandler() +} diff --git a/execution/searchtesting/qdrant_test.go b/execution/searchtesting/qdrant_test.go new file mode 100644 index 0000000000..fa0cd988fb --- /dev/null +++ b/execution/searchtesting/qdrant_test.go @@ -0,0 +1,145 @@ +//go:build integration + +package searchtesting + +import ( + "context" + "encoding/json" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/qdrant" +) + +const qdrantConfigSDL = ` +extend schema @index(name: "products", backend: "qdrant", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func startQdrant(t *testing.T) (string, int) { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "qdrant/qdrant:v1.12.5", + ExposedPorts: []string{"6333/tcp"}, + WaitingFor: wait.ForHTTP("/healthz").WithPort("6333/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start qdrant container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate qdrant container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "6333/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return host, port.Int() +} + +func TestQdrant(t *testing.T) { + t.Parallel() + host, port := startQdrant(t) + + makeSetup := func(name, configSDL string) BackendSetup { + return BackendSetup{ + Name: name, + ConfigSDL: configSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := qdrant.NewFactory() + cfgJSON, err := json.Marshal(qdrant.Config{Host: host, Port: port}) + if err != nil { + t.Fatalf("marshal qdrant config: %v", err) + } + idx, err := factory.CreateIndex(context.Background(), name, schema, cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: false, + HasFacets: false, + }, + Hooks: BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(1 * time.Second) + }, + }, + } + } + + t.Run("standard", func(t *testing.T) { + t.Parallel() + setup := makeSetup("qdrant", qdrantConfigSDL) + setup.ExpectedResponses = map[string]string{ + "supergraph_sdl": expectedSupergraphSDL, + "basic_search_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}],"totalCount":4}}}`, + "filter_keyword_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","rating":4.7}},{"node":{"id":"1","name":"Running Shoes","rating":4.5}},{"node":{"id":"2","name":"Basketball Shoes","rating":4.2}}]}}}`, + "filter_boolean": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_numeric_range": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}}],"totalCount":2}}}`, + "filter_AND": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + "filter_OR": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_NOT": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "sort_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}]}}}`, + "pagination_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","reviews":[{"text":"Nice belt","stars":3}]}},{"node":{"id":"1","reviews":[{"text":"Great shoes","stars":5}]}}],"totalCount":2}}}`, + "score_and_totalCount": `{"data":{"searchProducts":{"hits":[{"score":0,"node":{"id":"4","manufacturer":"Smartwool"}},{"score":0,"node":{"id":"3","manufacturer":"Gucci"}},{"score":0,"node":{"id":"1","manufacturer":"Nike"}},{"score":0,"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + } + RunAllScenarios(t, setup) + }) + + t.Run("vector", func(t *testing.T) { + t.Parallel() + RunVectorScenarios(t, VectorBackendSetup{ + BackendSetup: makeSetup("qdrant_vector", vectorConfigSDL("qdrant", "{}")), + Embedder: &MockEmbedder{}, + }) + }) + + t.Run("date", func(t *testing.T) { + t.Parallel() + setup := makeSetup("qdrant_date", dateConfigSDL("qdrant", "{}")) + setup.ExpectedResponses = map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + } + RunDateScenarios(t, setup) + }) +} diff --git a/execution/searchtesting/response.go b/execution/searchtesting/response.go new file mode 100644 index 0000000000..91a184f53a --- /dev/null +++ b/execution/searchtesting/response.go @@ -0,0 +1,104 @@ +package searchtesting + +import ( + "bytes" + "context" + "testing" + + "github.com/wundergraph/astjson" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astnormalization" + "github.com/wundergraph/graphql-go-tools/v2/pkg/asttransform" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astvalidation" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/plan" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/postprocess" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/resolve" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astparser" + "github.com/wundergraph/graphql-go-tools/v2/pkg/operationreport" +) + +type testPipeline struct { + PlanConfig plan.Configuration + SupergraphDef string +} + +func executeQuery(t *testing.T, pipeline *testPipeline, query string, variables string) string { + t.Helper() + + def, parseReport := astparser.ParseGraphqlDocumentString(pipeline.SupergraphDef) + if parseReport.HasErrors() { + t.Fatalf("parse supergraph definition: %s", parseReport.Error()) + } + op, parseReport := astparser.ParseGraphqlDocumentString(query) + if parseReport.HasErrors() { + t.Fatalf("parse query: %s", parseReport.Error()) + } + + // Set variables before normalization so that inline arguments (e.g. query: "shoes") + // are extracted into the variables map alongside explicit variables. + if variables != "" { + op.Input.Variables = []byte(variables) + } + + if err := asttransform.MergeDefinitionWithBaseSchema(&def); err != nil { + t.Fatalf("MergeDefinitionWithBaseSchema: %v", err) + } + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + if report.HasErrors() { + t.Fatalf("normalize: %s", report.Error()) + } + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + if report.HasErrors() { + t.Fatalf("validate: %s", report.Error()) + } + + p, err := plan.NewPlanner(pipeline.PlanConfig) + if err != nil { + t.Fatalf("NewPlanner: %v", err) + } + + executionPlan := p.Plan(&op, &def, "", report) + if report.HasErrors() { + t.Fatalf("plan: %s", report.Error()) + } + + proc := postprocess.NewProcessor() + proc.Process(executionPlan) + + syncPlan, ok := executionPlan.(*plan.SynchronousResponsePlan) + if !ok { + t.Fatalf("expected SynchronousResponsePlan, got %T", executionPlan) + } + + if syncPlan.Response.Info == nil { + syncPlan.Response.Info = &resolve.GraphQLResponseInfo{ + OperationType: ast.OperationTypeQuery, + } + } + + resolver := resolve.New(context.Background(), resolve.ResolverOptions{ + MaxConcurrency: 32, + PropagateSubgraphErrors: true, + }) + + ctx := resolve.NewContext(context.Background()) + // Use op.Input.Variables (post-normalization) which includes both explicit + // variables and any inline arguments extracted during normalization. + if len(op.Input.Variables) > 0 { + ctx.Variables = astjson.MustParseBytes(op.Input.Variables) + } + + buf := &bytes.Buffer{} + _, err = resolver.ResolveGraphQLResponse(ctx, syncPlan.Response, nil, buf) + if err != nil { + t.Fatalf("ResolveGraphQLResponse: %v", err) + } + + return buf.String() +} diff --git a/execution/searchtesting/shareddata/products.go b/execution/searchtesting/shareddata/products.go new file mode 100644 index 0000000000..63016e820c --- /dev/null +++ b/execution/searchtesting/shareddata/products.go @@ -0,0 +1,87 @@ +package shareddata + +type Review struct { + Text string + Stars int +} + +type GeoLocation struct { + Lat float64 + Lon float64 +} + +type TestProduct struct { + ID string + Name string + Description string + Category string + Price float64 + InStock bool + Reviews []Review + Rating float64 + Manufacturer string + Location *GeoLocation // optional geo coordinates for the store + CreatedAt string // ISO 8601 date, e.g. "2024-01-15" + UpdatedAt string // RFC 3339 datetime, e.g. "2024-01-15T10:30:00Z" +} + +func Products() []TestProduct { + return []TestProduct{ + { + ID: "1", + Name: "Running Shoes", + Description: "Great for jogging and marathons", + Category: "Footwear", + Price: 89.99, + InStock: true, + Reviews: []Review{{Text: "Great shoes", Stars: 5}}, + Rating: 4.5, + Manufacturer: "Nike", + Location: &GeoLocation{Lat: 40.7128, Lon: -74.0060}, // New York + CreatedAt: "2024-01-15", + UpdatedAt: "2024-01-15T10:30:00Z", + }, + { + ID: "2", + Name: "Basketball Shoes", + Description: "High-top basketball sneakers", + Category: "Footwear", + Price: 129.99, + InStock: true, + Reviews: []Review{{Text: "Good grip", Stars: 4}}, + Rating: 4.2, + Manufacturer: "Adidas", + Location: &GeoLocation{Lat: 40.7580, Lon: -73.9855}, // Midtown Manhattan (~5km from #1) + CreatedAt: "2024-03-20", + UpdatedAt: "2024-03-20T14:00:00Z", + }, + { + ID: "3", + Name: "Leather Belt", + Description: "Genuine leather dress belt", + Category: "Accessories", + Price: 35.00, + InStock: false, + Reviews: []Review{{Text: "Nice belt", Stars: 3}}, + Rating: 3.8, + Manufacturer: "Gucci", + Location: &GeoLocation{Lat: 34.0522, Lon: -118.2437}, // Los Angeles (~3,940km from #1) + CreatedAt: "2024-06-01", + UpdatedAt: "2024-06-01T09:00:00Z", + }, + { + ID: "4", + Name: "Wool Socks", + Description: "Warm wool socks for winter", + Category: "Footwear", + Price: 12.99, + InStock: true, + Reviews: []Review{{Text: "Warm socks", Stars: 5}}, + Rating: 4.7, + Manufacturer: "Smartwool", + Location: &GeoLocation{Lat: 51.5074, Lon: -0.1278}, // London (~5,570km from #1) + CreatedAt: "2024-09-10", + UpdatedAt: "2024-09-10T16:45:00Z", + }, + } +} diff --git a/execution/searchtesting/subscription_test.go b/execution/searchtesting/subscription_test.go new file mode 100644 index 0000000000..9e2d9a3def --- /dev/null +++ b/execution/searchtesting/subscription_test.go @@ -0,0 +1,222 @@ +package searchtesting + +import ( + "context" + "encoding/json" + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/astparser" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/search_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/bleve" +) + +// mockSubscriber implements search_datasource.GraphQLSubscriber for tests. +type mockSubscriber struct { + ch chan []byte +} + +func newMockSubscriber() *mockSubscriber { + return &mockSubscriber{ch: make(chan []byte, 10)} +} + +func (m *mockSubscriber) Subscribe(_ context.Context, _ string) (<-chan []byte, error) { + return m.ch, nil +} + +// mockExecutor implements search_datasource.GraphQLExecutor that returns pre-configured data. +type mockExecutor struct { + response []byte +} + +func (m *mockExecutor) Execute(_ context.Context, _ string) ([]byte, error) { + return m.response, nil +} + +const subscriptionConfigSDL = ` +extend schema + @index(name: "products", backend: "bleve", config: "{}") + @populate(index: "products", entity: "Product", path: "data.products", query: "{ products { id name description category price inStock } }") + @subscribe(index: "products", entity: "Product", path: "data.productUpdated", deletionPath: "data.productDeleted", subscription: "subscription { productUpdated { id name description category price inStock } productDeleted { id } }") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func TestSubscriptionUpdatesIndex(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // Set up registries. + indexRegistry := searchindex.NewIndexFactoryRegistry() + indexRegistry.Register("bleve", bleve.NewFactory()) + embedderRegistry := searchindex.NewEmbedderRegistry() + + // Parse config. + parsedConfig := parseConfig(t, subscriptionConfigSDL) + + // Provide initial population data (2 products). + populationResponse, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "products": []map[string]any{ + {"id": "1", "name": "Running Shoes", "description": "Great for jogging", "category": "Footwear", "price": 89.99, "inStock": true}, + {"id": "2", "name": "Basketball Shoes", "description": "High-top sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + }, + }) + + executor := &mockExecutor{response: populationResponse} + subscriber := newMockSubscriber() + + // Create Manager and start it. + factory := search_datasource.NewFactory(ctx, indexRegistry, embedderRegistry) + manager := search_datasource.NewManager(factory, indexRegistry, embedderRegistry, executor, parsedConfig) + manager.SetSubscriber(subscriber) + + if err := manager.Start(ctx); err != nil { + t.Fatalf("Manager.Start: %v", err) + } + defer manager.Stop() + + // Verify initial population: 2 documents indexed. + idx, ok := manager.GetIndex("products") + if !ok { + t.Fatal("index 'products' not found after Start") + } + assertSearchCount(t, idx, "shoes", 2) + + // --- Test 1: Subscription upsert adds a new document --- + upsertEvent, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "productUpdated": map[string]any{ + "id": "3", "name": "Leather Belt", "description": "Genuine leather", "category": "Accessories", "price": 35.0, "inStock": false, + }, + }, + }) + subscriber.ch <- upsertEvent + + // Wait for the event to be processed (empty TextQuery = match all in bleve). + waitForCondition(t, 2*time.Second, func() bool { + result, err := idx.Search(ctx, searchindex.SearchRequest{Limit: 10}) + return err == nil && result.TotalCount == 3 + }) + assertSearchCount(t, idx, "", 3) + + // Verify the new document is searchable by name. + result, err := idx.Search(ctx, searchindex.SearchRequest{TextQuery: "leather", Limit: 10}) + if err != nil { + t.Fatalf("search for 'leather': %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit for 'leather', got 0") + } + foundBelt := false + for _, hit := range result.Hits { + if hit.Identity.KeyFields["id"] == "3" { + foundBelt = true + break + } + } + if !foundBelt { + t.Fatalf("expected to find product id=3 in results, hits: %+v", result.Hits) + } + + // --- Test 2: Subscription upsert updates an existing document --- + updateEvent, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "productUpdated": map[string]any{ + "id": "1", "name": "Trail Running Shoes", "description": "Great for trail running", "category": "Footwear", "price": 99.99, "inStock": true, + }, + }, + }) + subscriber.ch <- updateEvent + + waitForCondition(t, 2*time.Second, func() bool { + result, err := idx.Search(ctx, searchindex.SearchRequest{TextQuery: "trail", Limit: 10}) + return err == nil && len(result.Hits) > 0 + }) + + result, err = idx.Search(ctx, searchindex.SearchRequest{TextQuery: "trail", Limit: 10}) + if err != nil { + t.Fatalf("search for 'trail': %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit for 'trail' after update, got 0") + } + + // Total count should still be 3 (upsert, not insert). + assertSearchCount(t, idx, "", 3) + + // --- Test 3: Subscription deletion removes a document --- + deleteEvent, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "productDeleted": map[string]any{ + "id": "3", + }, + }, + }) + subscriber.ch <- deleteEvent + + waitForCondition(t, 2*time.Second, func() bool { + result, err := idx.Search(ctx, searchindex.SearchRequest{Limit: 10}) + return err == nil && result.TotalCount == 2 + }) + assertSearchCount(t, idx, "", 2) + + // Verify the deleted document is no longer searchable. + result, err = idx.Search(ctx, searchindex.SearchRequest{TextQuery: "leather", Limit: 10}) + if err != nil { + t.Fatalf("search for 'leather' after delete: %v", err) + } + for _, hit := range result.Hits { + if hit.Identity.KeyFields["id"] == "3" { + t.Fatal("product id=3 should have been deleted but was found in search results") + } + } +} + +// parseConfig parses the config SDL into a ParsedConfig. +func parseConfig(t *testing.T, sdl string) *search_datasource.ParsedConfig { + t.Helper() + doc, parseReport := astparser.ParseGraphqlDocumentString(sdl) + if parseReport.HasErrors() { + t.Fatalf("parse config SDL: %s", parseReport.Error()) + } + config, err := search_datasource.ParseConfigSchema(&doc) + if err != nil { + t.Fatalf("ParseConfigSchema: %v", err) + } + return config +} + +// assertSearchCount verifies the total number of documents matching a query. +func assertSearchCount(t *testing.T, idx searchindex.Index, query string, expected int) { + t.Helper() + result, err := idx.Search(context.Background(), searchindex.SearchRequest{TextQuery: query, Limit: 10}) + if err != nil { + t.Fatalf("search failed: %v", err) + } + if result.TotalCount != expected { + t.Fatalf("expected %d results, got %d", expected, result.TotalCount) + } +} + +// waitForCondition polls the condition function until it returns true or the timeout expires. +func waitForCondition(t *testing.T, timeout time.Duration, condition func() bool) { + t.Helper() + deadline := time.Now().Add(timeout) + for time.Now().Before(deadline) { + if condition() { + return + } + time.Sleep(50 * time.Millisecond) + } + t.Fatal("condition not met within timeout") +} diff --git a/execution/searchtesting/testdata.go b/execution/searchtesting/testdata.go new file mode 100644 index 0000000000..d0b1b3d51a --- /dev/null +++ b/execution/searchtesting/testdata.go @@ -0,0 +1,67 @@ +package searchtesting + +import ( + "context" + "fmt" + + "github.com/wundergraph/graphql-go-tools/execution/searchtesting/shareddata" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func testProducts() []searchindex.EntityDocument { + var docs []searchindex.EntityDocument + for _, p := range shareddata.Products() { + docs = append(docs, searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": p.ID}}, + Fields: map[string]any{"name": p.Name, "description": p.Description, "category": p.Category, "price": p.Price, "inStock": p.InStock}, + }) + } + return docs +} + +// testGeoProducts returns the 4 standard test products with geo locations. +func testGeoProducts() []searchindex.EntityDocument { + var docs []searchindex.EntityDocument + for _, p := range shareddata.Products() { + fields := map[string]any{"name": p.Name, "description": p.Description, "category": p.Category, "price": p.Price, "inStock": p.InStock} + if p.Location != nil { + fields["location"] = map[string]any{"lat": p.Location.Lat, "lon": p.Location.Lon} + } + docs = append(docs, searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": p.ID}}, + Fields: fields, + }) + } + return docs +} + +// testDateProducts returns the 4 standard test products with date/datetime fields. +func testDateProducts() []searchindex.EntityDocument { + var docs []searchindex.EntityDocument + for _, p := range shareddata.Products() { + fields := map[string]any{"name": p.Name, "description": p.Description, "category": p.Category, "price": p.Price, "inStock": p.InStock} + fields["createdAt"] = p.CreatedAt + fields["updatedAt"] = p.UpdatedAt + docs = append(docs, searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": p.ID}}, + Fields: fields, + }) + } + return docs +} + +// testVectorProducts returns the same 4 test products with pre-computed embedding vectors. +// The template matches the @embedding directive: "{{name}}. {{description}}". +func testVectorProducts(embedder searchindex.Embedder) []searchindex.EntityDocument { + docs := testProducts() + for i := range docs { + name, _ := docs[i].Fields["name"].(string) + desc, _ := docs[i].Fields["description"].(string) + text := fmt.Sprintf("%s. %s", name, desc) + vec, _ := embedder.EmbedSingle(context.Background(), text) + docs[i].Vectors = map[string][]float32{ + "_embedding": vec, + } + } + return docs +} diff --git a/execution/searchtesting/typesense_test.go b/execution/searchtesting/typesense_test.go new file mode 100644 index 0000000000..12cfcdadcd --- /dev/null +++ b/execution/searchtesting/typesense_test.go @@ -0,0 +1,161 @@ +//go:build integration + +package searchtesting + +import ( + "context" + "encoding/json" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/typesense" +) + +const typesenseConfigSDL = ` +extend schema @index(name: "products", backend: "typesense", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func startTypesense(t *testing.T) (string, int) { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "typesense/typesense:27.1", + ExposedPorts: []string{"8108/tcp"}, + Env: map[string]string{ + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_DATA_DIR": "/data", + }, + Tmpfs: map[string]string{"/data": ""}, + WaitingFor: wait.ForHTTP("/health"). + WithPort("8108/tcp"). + WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start typesense container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "8108/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return host, port.Int() +} + +func TestTypesense(t *testing.T) { + t.Parallel() + host, port := startTypesense(t) + + makeSetup := func(name, configSDL string) BackendSetup { + return BackendSetup{ + Name: name, + ConfigSDL: configSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := typesense.NewFactory() + cfg := typesense.Config{ + Host: host, + Port: port, + APIKey: "test-api-key", + Protocol: "http", + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + idx, err := factory.CreateIndex(context.Background(), name, schema, cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: true, + }, + } + } + + t.Run("standard", func(t *testing.T) { + t.Parallel() + setup := makeSetup("typesense", typesenseConfigSDL) + setup.ExpectedResponses = map[string]string{ + "supergraph_sdl": expectedSupergraphSDL, + "basic_search_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_keyword_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","rating":4.7}},{"node":{"id":"1","name":"Running Shoes","rating":4.5}},{"node":{"id":"2","name":"Basketball Shoes","rating":4.2}}]}}}`, + "filter_boolean": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_numeric_range": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}}],"totalCount":2}}}`, + "filter_AND": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + "filter_OR": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_NOT": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "sort_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}]}}}`, + "pagination_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","reviews":[{"text":"Warm socks","stars":5}]}},{"node":{"id":"3","reviews":[{"text":"Nice belt","stars":3}]}}],"totalCount":4}}}`, + "score_and_totalCount": `{"data":{"searchProducts":{"hits":[{"score":0,"node":{"id":"4","manufacturer":"Smartwool"}},{"score":0,"node":{"id":"3","manufacturer":"Gucci"}},{"score":0,"node":{"id":"1","manufacturer":"Nike"}},{"score":0,"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + "facets_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"facets":[{"field":"category","values":[{"value":"Footwear","count":3},{"value":"Accessories","count":1}]}]}}}`, + } + RunAllScenarios(t, setup) + }) + + t.Run("boosting", func(t *testing.T) { + t.Parallel() + RunBoostingScenarios(t, makeSetup("typesense_boosting", boostConfigSDL("typesense", "{}"))) + }) + + t.Run("fuzzy", func(t *testing.T) { + t.Parallel() + RunFuzzyScenarios(t, makeSetup("typesense_fuzzy", typesenseConfigSDL)) + }) + + t.Run("suggest", func(t *testing.T) { + t.Parallel() + RunSuggestScenarios(t, makeSetup("typesense_suggest", suggestConfigSDL("typesense", "{}"))) + }) + + t.Run("date", func(t *testing.T) { + t.Parallel() + setup := makeSetup("typesense_date", dateConfigSDL("typesense", "{}")) + setup.ExpectedResponses = map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + } + RunDateScenarios(t, setup) + }) +} diff --git a/execution/searchtesting/weaviate_test.go b/execution/searchtesting/weaviate_test.go new file mode 100644 index 0000000000..f0ef8db7d6 --- /dev/null +++ b/execution/searchtesting/weaviate_test.go @@ -0,0 +1,153 @@ +//go:build integration + +package searchtesting + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/weaviate" +) + +const weaviateConfigSDL = ` +extend schema @index(name: "products", backend: "weaviate", config: "{}") + +type Product @key(fields: "id") @searchable(index: "products", searchField: "searchProducts") { + id: ID! + name: String @indexed(type: TEXT, filterable: true, sortable: true) + description: String @indexed(type: TEXT) + category: String @indexed(type: KEYWORD, filterable: true, sortable: true) + price: Float @indexed(type: NUMERIC, filterable: true, sortable: true) + inStock: Boolean @indexed(type: BOOL, filterable: true) +} +` + +func startWeaviate(t *testing.T) string { + t.Helper() + ctx := context.Background() + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: "semitechnologies/weaviate:1.27.0", + ExposedPorts: []string{"8080/tcp"}, + Env: map[string]string{ + "AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED": "true", + "PERSISTENCE_DATA_PATH": "/var/lib/weaviate", + "DEFAULT_VECTORIZER_MODULE": "none", + "CLUSTER_HOSTNAME": "node1", + }, + WaitingFor: wait.ForHTTP("/v1/.well-known/ready"). + WithPort("8080/tcp"). + WithStartupTimeout(60 * time.Second), + }, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start weaviate container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "8080/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return fmt.Sprintf("%s:%s", host, port.Port()) +} + +func TestWeaviate(t *testing.T) { + t.Parallel() + weaviateHost := startWeaviate(t) + + makeSetup := func(name, configSDL string) BackendSetup { + return BackendSetup{ + Name: name, + ConfigSDL: configSDL, + CreateIndex: func(t *testing.T, name string, schema searchindex.IndexConfig, _ []byte) searchindex.Index { + t.Helper() + factory := weaviate.NewFactory() + configJSON := []byte(fmt.Sprintf(`{"host":%q,"scheme":"http"}`, weaviateHost)) + idx, err := factory.CreateIndex(context.Background(), name, schema, configJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx + }, + Caps: BackendCaps{ + HasTextSearch: true, + HasFacets: false, + }, + Hooks: BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(1 * time.Second) + }, + }, + } + } + + t.Run("standard", func(t *testing.T) { + t.Parallel() + setup := makeSetup("weaviate", weaviateConfigSDL) + setup.ExpectedResponses = map[string]string{ + "supergraph_sdl": expectedSupergraphSDL, + "basic_search_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_keyword_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","rating":4.7}},{"node":{"id":"1","name":"Running Shoes","rating":4.5}},{"node":{"id":"2","name":"Basketball Shoes","rating":4.2}}]}}}`, + "filter_boolean": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "filter_numeric_range": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"1","manufacturer":"Nike"}}],"totalCount":2}}}`, + "filter_AND": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","manufacturer":"Smartwool"}},{"node":{"id":"1","manufacturer":"Nike"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":3}}}`, + "filter_OR": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}},{"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":2}}}`, + "filter_NOT": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "sort_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","price":12.99,"manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","price":35,"manufacturer":"Gucci"}},{"node":{"id":"1","name":"Running Shoes","price":89.99,"manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","price":129.99,"manufacturer":"Adidas"}}]}}}`, + "pagination_with_entity_join": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","reviews":[{"text":"Nice belt","stars":3}]}},{"node":{"id":"1","reviews":[{"text":"Great shoes","stars":5}]}}],"totalCount":2}}}`, + "score_and_totalCount": `{"data":{"searchProducts":{"hits":[{"score":0,"node":{"id":"4","manufacturer":"Smartwool"}},{"score":0,"node":{"id":"3","manufacturer":"Gucci"}},{"score":0,"node":{"id":"1","manufacturer":"Nike"}},{"score":0,"node":{"id":"2","manufacturer":"Adidas"}}],"totalCount":4}}}`, + } + RunAllScenarios(t, setup) + }) + + t.Run("vector", func(t *testing.T) { + t.Parallel() + RunVectorScenarios(t, VectorBackendSetup{ + BackendSetup: makeSetup("weaviate_vector", vectorConfigSDL("weaviate", "{}")), + Embedder: &MockEmbedder{}, + }) + }) + + t.Run("boosting", func(t *testing.T) { + t.Parallel() + RunBoostingScenarios(t, makeSetup("weaviate_boosting", boostConfigSDL("weaviate", "{}"))) + }) + + t.Run("date", func(t *testing.T) { + t.Parallel() + setup := makeSetup("weaviate_date", dateConfigSDL("weaviate", "{}")) + setup.ExpectedResponses = map[string]string{ + "date_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}],"totalCount":1}}}`, + "date_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "date_gt_lt": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":2}}}`, + "date_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":1}}}`, + "datetime_eq_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}}],"totalCount":1}}}`, + "datetime_range_gte_lte": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}}],"totalCount":3}}}`, + "datetime_after_before": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":1}}}`, + "date_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_sort_desc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}}]}}}`, + "datetime_sort_asc": `{"data":{"searchProducts":{"hits":[{"node":{"id":"1","name":"Running Shoes","manufacturer":"Nike"}},{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"3","name":"Leather Belt","manufacturer":"Gucci"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}]}}}`, + "date_combined_filter": `{"data":{"searchProducts":{"hits":[{"node":{"id":"2","name":"Basketball Shoes","manufacturer":"Adidas"}},{"node":{"id":"4","name":"Wool Socks","manufacturer":"Smartwool"}}],"totalCount":2}}}`, + } + RunDateScenarios(t, setup) + }) +} diff --git a/go.work.sum b/go.work.sum index 1aecd8d220..c5b2ec8f42 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,17 +1,31 @@ cel.dev/expr v0.19.1 h1:NciYrtDRIR0lNCnH1LFJegdjspNx9fI59O7TWcua/W4= cel.dev/expr v0.19.1/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= +cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= +cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= +cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +codeberg.org/go-fonts/liberation v0.5.0 h1:SsKoMO1v1OZmzkG2DY+7ZkCL9U+rrWI09niOLfQ5Bo0= +codeberg.org/go-fonts/liberation v0.5.0/go.mod h1:zS/2e1354/mJ4pGzIIaEtm/59VFCFnYC7YV6YdGl5GU= +codeberg.org/go-latex/latex v0.1.0 h1:hoGO86rIbWVyjtlDLzCqZPjNykpWQ9YuTZqAzPcfL3c= +codeberg.org/go-latex/latex v0.1.0/go.mod h1:LA0q/AyWIYrqVd+A9Upkgsb+IqPcmSTKc9Dny04MHMw= +codeberg.org/go-pdf/fpdf v0.10.0 h1:u+w669foDDx5Ds43mpiiayp40Ov6sZalgcPMDBcZRd4= +codeberg.org/go-pdf/fpdf v0.10.0/go.mod h1:Y0DGRAdZ0OmnZPvjbMp/1bYxmIPxm0ws4tfoPOc4LjU= connectrpc.com/connect v1.16.2 h1:ybd6y+ls7GOlb7Bh5C8+ghA6SvCBajHwxssO2CGFjqE= connectrpc.com/connect v1.16.2/go.mod h1:n2kgwskMHXC+lVqb18wngEpF95ldBHXjZYJussz5FRc= git.sr.ht/~sbinet/gg v0.3.1 h1:LNhjNn8DerC8f9DHLz6lS0YYul/b602DUxDgGkd/Aik= git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc= +git.sr.ht/~sbinet/gg v0.6.0 h1:RIzgkizAk+9r7uPzf/VfbJHBMKUr0F5hRFxTUGMnt38= +git.sr.ht/~sbinet/gg v0.6.0/go.mod h1:uucygbfC9wVPQIfrmwM2et0imr8L7KQWywX0xpFMm94= github.com/99designs/gqlgen v0.17.81 h1:kCkN/xVyRb5rEQpuwOHRTYq83i0IuTQg9vdIiwEerTs= github.com/99designs/gqlgen v0.17.81/go.mod h1:vgNcZlLwemsUhYim4dC1pvFP5FX0pr2Y+uYUoHFb1ig= github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 h1:3c8yed4lgqTt+oTQ+JNMDo+F4xprBf+O/il4ZC0nRLw= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0/go.mod h1:obipzmGjfSjam60XLwGfqUkJsfiheAl+TUjG+4yzyPM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 h1:UQUsRi8WTzhZntp5313l+CHIAT95ojUI2lpP/ExlZa4= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0/go.mod h1:Cz6ft6Dkn3Et6l2v2a9/RpN7epQ1GtDlO6lj8bEcOvw= github.com/KimMachineGun/automemlimit v0.6.1 h1:ILa9j1onAAMadBsyyUJv5cack8Y1WT26yLj/V+ulKp8= github.com/KimMachineGun/automemlimit v0.6.1/go.mod h1:T7xYht7B8r6AG/AqFcUdc7fzd2bIdBKmepfP2S1svPY= github.com/MicahParks/jwkset v0.11.0 h1:yc0zG+jCvZpWgFDFmvs8/8jqqVBG9oyIbmBtmjOhoyQ= @@ -32,10 +46,18 @@ github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/blevesearch/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:kDy+zgJFJJoJYBvdfBSiZYBbdsUL0XcjHYWezpQBGPA= +github.com/blevesearch/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:9eJDeqxJ3E7WnLebQUlPD7ZjSce7AnDb9vjGmMCbD0A= +github.com/blevesearch/goleveldb v1.0.1 h1:iAtV2Cu5s0GD1lwUiekkFHe2gTMCCNVj2foPclDLIFI= +github.com/blevesearch/goleveldb v1.0.1/go.mod h1:WrU8ltZbIp0wAoig/MHbrPCXSOLpe79nz5lv5nqfYrQ= +github.com/blevesearch/snowball v0.6.1 h1:cDYjn/NCH+wwt2UdehaLpr2e4BwLIjN4V/TdLsL+B5A= +github.com/blevesearch/snowball v0.6.1/go.mod h1:ZF0IBg5vgpeoUhnMza2v0A/z8m1cWPlwhke08LpNusg= +github.com/blevesearch/stempel v0.2.0 h1:CYzVPaScODMvgE9o+kf6D4RJ/VRomyi9uHF+PtB+Afc= +github.com/blevesearch/stempel v0.2.0/go.mod h1:wjeTHqQv+nQdbPuJ/YcvOjTInA2EIc6Ks1FoSUzSLvc= github.com/caarlos0/env/v11 v11.3.1 h1:cArPWC15hWmEt+gWk7YBi7lEXTXCvpaSdCiZE2X5mCA= github.com/caarlos0/env/v11 v11.3.1/go.mod h1:qupehSf/Y0TUTsxKywqRt/vJjN5nz6vauiYEUUr8P4U= -github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= -github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/campoy/embedmd v1.0.0 h1:V4kI2qTJJLf4J29RzI/MAt2c3Bl4dQSYPuflzwFH2hY= +github.com/campoy/embedmd v1.0.0/go.mod h1:oxyr9RCiSXg0M3VJ3ks0UGfp98BpSSGr0kpiX3MzVl8= github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= github.com/cep21/circuit/v4 v4.0.0 h1:g1AzMmRLuwCst0eccy1nGsD/CL2XKbDnaPUHVHDvVmI= @@ -49,13 +71,23 @@ github.com/cloudflare/backoff v0.0.0-20161212185259-647f3cdfc87a h1:8d1CEOF1xlde github.com/cloudflare/backoff v0.0.0-20161212185259-647f3cdfc87a/go.mod h1:rzgs2ZOiguV6/NpiDgADjRLPNyZlApIWxKpkT+X8SdY= github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3 h1:boJj011Hh+874zpIySeApCX4GeOjPl9qhRF3QuIZq+Q= github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/containerd/cgroups/v3 v3.0.2 h1:f5WFqIVSgo5IZmtTT3qVBo6TzI1ON6sycSBKkymb9L0= github.com/containerd/cgroups/v3 v3.0.2/go.mod h1:JUgITrzdFqp42uI2ryGA+ge0ap/nxzYgkGmIcetmErE= github.com/containerd/stargz-snapshotter/estargz v0.16.3 h1:7evrXtoh1mSbGj/pfRccTampEyKpjpOnS3CyiV1Ebr8= github.com/containerd/stargz-snapshotter/estargz v0.16.3/go.mod h1:uyr4BfYfOj3G9WBVE8cOlQmXAbPN9VEQpBBeJIuOipU= +github.com/containerd/typeurl/v2 v2.2.0 h1:6NBDbQzr7I5LHgp34xAXYF5DOTQDn05X58lsPEmzLso= +github.com/containerd/typeurl/v2 v2.2.0/go.mod h1:8XOOxnyatxSWuG8OfsZXVnAF4iZfedjS/8UHSPJnX4g= github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/couchbase/ghistogram v0.1.0 h1:b95QcQTCzjTUocDXp/uMgSNQi8oj1tGwnJ4bODWZnps= +github.com/couchbase/ghistogram v0.1.0/go.mod h1:s1Jhy76zqfEecpNWJfWUiKZookAFaiGOEoyzgHt9i7k= +github.com/couchbase/moss v0.2.0 h1:VCYrMzFwEryyhRSeI+/b3tRBSeTpi/8gn5Kf6dxqn+o= +github.com/couchbase/moss v0.2.0/go.mod h1:9MaHIaRuy9pvLPUJxB8sh8OrLfyDczECVL37grCIubs= github.com/creack/pty v1.1.9 h1:uDmaGzcdjhF4i/plgjmEsriH11Y0o7RKapEf/LDaM3w= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgraph-io/ristretto/v2 v2.1.0 h1:59LjpOJLNDULHh8MC4UaegN52lC4JnO2dITsie/Pa8I= github.com/dgraph-io/ristretto/v2 v2.1.0/go.mod h1:uejeqfYXpUomfse0+lO+13ATz4TypQYLJZzBSAemuB4= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= @@ -66,8 +98,6 @@ github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBi github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8= github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo= -github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= -github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d h1:W1n4DvpzZGOISgp7wWNtraLcHtnmnTwBlJidqtMIuwQ= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= @@ -81,20 +111,17 @@ github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfU github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= github.com/expr-lang/expr v1.17.6 h1:1h6i8ONk9cexhDmowO/A64VPxHScu7qfSl2k8OlINec= github.com/expr-lang/expr v1.17.6/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= -github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= -github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618= github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= github.com/go-fonts/liberation v0.3.0 h1:3BI2iaE7R/s6uUUtzNCjo3QijJu3aS4wmrMgfSpYQ+8= github.com/go-fonts/liberation v0.3.0/go.mod h1:jdJ+cqF+F4SUL2V+qxBth8fvBpBDS7yloUL5Fi8GTGY= github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/go-jose/go-jose/v4 v4.1.1 h1:JYhSgy4mXXzAdF3nUx3ygx347LRXJRrpgyU3adRmkAI= +github.com/go-jose/go-jose/v4 v4.1.1/go.mod h1:BdsZGqgdO3b6tTc6LSE56wcDbMMLuPsw5d4ZD5f94kA= github.com/go-latex/latex v0.0.0-20230307184459-12ec69307ad9 h1:NxXI5pTAtpEaU49bpLpQoDsu1zrteW/vxzTz8Cd2UAs= github.com/go-latex/latex v0.0.0-20230307184459-12ec69307ad9/go.mod h1:gWuR/CrFDDeVRFQwHPvsv9soJVB/iqymhuZQuJ3a9OM= -github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= -github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= -github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-pdf/fpdf v0.6.0 h1:MlgtGIfsdMEEQJr2le6b/HNr1ZlQwxyWr77r2aj2U/8= github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-redis/redis_rate/v10 v10.0.1 h1:calPxi7tVlxojKunJwQ72kwfozdy25RjA0bCj1h0MUo= @@ -108,16 +135,19 @@ github.com/goccy/go-yaml v1.17.1 h1:LI34wktB2xEE3ONG/2Ar54+/HJVBriAGJ55PHls4YuY= github.com/goccy/go-yaml v1.17.1/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/glog v1.2.5 h1:DrW6hGnjIhtvhOIiAKT6Psh/Kd/ldepEa81DKeiRJ5I= +github.com/golang/glog v1.2.5/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= -github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-containerregistry v0.20.3 h1:oNx7IdTI936V8CQRveCjaxOiegWwvM7kqkbXTpyiovI= github.com/google/go-containerregistry v0.20.3/go.mod h1:w00pIgBRDVUDFM6bq+Qx8lwNWK+cxgCuX1vd3PIBDNI= github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA= @@ -139,6 +169,8 @@ github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFO github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2 h1:rcanfLhLDA8nozr/K289V1zcntHr3V+SHlXwzz1ZI2g= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/invopop/jsonschema v0.13.0 h1:KvpoAJWEjR3uD9Kbm2HWJmqsEaHt8lBUpd0qHcIi21E= github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= @@ -147,14 +179,10 @@ github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwA github.com/kevinmbeaulieu/eq-go v1.0.0 h1:AQgYHURDOmnVJ62jnEk0W/7yFKEn+Lv8RHN6t7mB0Zo= github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= -github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= -github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4 h1:sIXJOMrYnQZJu7OB7ANSF4MYri2fTEGIsRLz6LwI4xE= github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= @@ -174,6 +202,12 @@ github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/sys/mount v0.3.4 h1:yn5jq4STPztkkzSKpZkLcmjue+bZJ0u2AuQY1iNI1Ww= +github.com/moby/sys/mount v0.3.4/go.mod h1:KcQJMbQdJHPlq5lcYT+/CjatWM4PuxKe+XLSVS4J6Os= +github.com/moby/sys/mountinfo v0.7.2 h1:1shs6aH5s4o5H2zQLn796ADW1wMrIwHsyJ2v9KouLrg= +github.com/moby/sys/mountinfo v0.7.2/go.mod h1:1YOa8w8Ih7uW0wALDUgT1dTTSBrZ+HiBLGws92L2RU4= +github.com/moby/sys/reexec v0.1.0 h1:RrBi8e0EBTLEgfruBOFcxtElzRGTEUkeIFaVXgU7wok= +github.com/moby/sys/reexec v0.1.0/go.mod h1:EqjBg8F3X7iZe5pU6nRZnYCMUTXoxsjiIfHup5wYIN8= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/nats-io/nats.go v1.35.0 h1:XFNqNM7v5B+MQMKqVGAyHwYhyKb48jrenXNxIU20ULk= @@ -182,10 +216,6 @@ github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI= github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDmGD0nc= github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= -github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= -github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg= github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= @@ -193,12 +223,13 @@ github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhM github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posthog/posthog-go v1.5.5 h1:2o3j7IrHbTIfxRtj4MPaXKeimuTYg49onNzNBZbwksM= github.com/posthog/posthog-go v1.5.5/go.mod h1:3RqUmSnPuwmeVj/GYrS75wNGqcAKdpODiwc83xZWgdE= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/pquerna/cachecontrol v0.2.0 h1:vBXSNuE5MYP9IJ5kjsdo8uq+w41jSPgvba2DEnkRx9k= github.com/pquerna/cachecontrol v0.2.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI= github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= @@ -211,8 +242,11 @@ github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0leargg github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/redis/go-redis/v9 v9.4.0 h1:Yzoz33UZw9I/mFhx4MNrB6Fk+XHO1VukNcCa1+lwyKk= github.com/redis/go-redis/v9 v9.4.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= +github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw= github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= github.com/shirou/gopsutil/v3 v3.24.3 h1:eoUGJSmdfLzJ3mxIhmOAhgKEKgQkeOwKpz1NbhVnuPE= @@ -223,20 +257,18 @@ github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= +github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= -github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/tidwall/sjson v1.0.4 h1:UcdIRXff12Lpnu3OLtZvnc03g4vH2suXDXhBwBqmzYg= -github.com/tidwall/sjson v1.0.4/go.mod h1:bURseu1nuBkFpIES5cz6zBtjmYeOQmEESshn7VpF15Y= -github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= -github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= -github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= -github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/tonglil/opentelemetry-go-datadog-propagator v0.1.3 h1:Ozy1UnlID19jL6+vixEcA1t4NMf8hp01uDAY1nwGl8U= github.com/tonglil/opentelemetry-go-datadog-propagator v0.1.3/go.mod h1:Ijp5eaviP2mk8CJM+0EDYFKNULr+kicPSB9FOvxOhW0= github.com/twmb/franz-go v1.16.1 h1:rpWc7fB9jd7TgmCyfxzenBI+QbgS8ZfJOUQE+tzPtbE= @@ -246,25 +278,32 @@ github.com/twmb/franz-go/pkg/kmsg v1.7.0/go.mod h1:se9Mjdt0Nwzc9lnjJ0HyDtLyBnaBD github.com/vbatts/tar-split v0.12.1 h1:CqKoORW7BUWBe7UL/iqTVvkTBOF8UvOMKOIZykxnnbo= github.com/vbatts/tar-split v0.12.1/go.mod h1:eF6B6i6ftWQcDqEn3/iGFRFRo8cBIMSJVOpnNdfTMFA= github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc= +github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/wundergraph/go-arena v0.0.0-20251008210416-55cb97e6f68f h1:5snewyMaIpajTu4wj22L/DgrGimICqXtUVjkZInBH3Y= github.com/wundergraph/go-arena v0.0.0-20251008210416-55cb97e6f68f/go.mod h1:ROOysEHWJjLQ8FSfNxZCziagb7Qw2nXY3/vgKRh7eWw= -github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4= github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= -github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= -github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= +github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= +go.etcd.io/gofail v0.2.0 h1:p19drv16FKK345a09a1iubchlw/vmRuksmRzgBIGjcA= +go.etcd.io/gofail v0.2.0/go.mod h1:nL3ILMGfkXTekKI3clMBNazKnjUZjYLKmBHzsVAnC1o= go.opentelemetry.io/contrib/detectors/gcp v1.34.0 h1:JRxssobiPg23otYU5SbWtQC//snGVIM3Tx6QRzlQBao= go.opentelemetry.io/contrib/detectors/gcp v1.34.0/go.mod h1:cV4BMFcscUR/ckqLkbfQmF0PRsq8w/lMGzdbCSveBHo= +go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw= +go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 h1:yd02MEjBdJkG3uabWP9apV+OuWRIXGDuJEUJbOHmCFU= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:umTcuxiv1n/s/S6/c2AT/g2CQ7u5C59sHDNmfSwgz7Q= go.opentelemetry.io/contrib/propagators/b3 v1.23.0 h1:aaIGWc5JdfRGpCafLRxMJbD65MfTa206AwSKkvGS0Hg= go.opentelemetry.io/contrib/propagators/b3 v1.23.0/go.mod h1:Gyz7V7XghvwTq+mIhLFlTgcc03UDroOg8vezs4NLhwU= go.opentelemetry.io/contrib/propagators/jaeger v1.23.0 h1:KFxfTCTkH1usVFzDaWzbmNdFX7ybUTCtkLsUTww0nG4= go.opentelemetry.io/contrib/propagators/jaeger v1.23.0/go.mod h1:xU+81opGquQICJGzwscLXAQLnIPWI+q7Zu4AQSrgXf8= +go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.44.0 h1:jd0+5t/YynESZqsSyPz+7PAFdEop0dlN0+PkyHYo8oI= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.44.0/go.mod h1:U707O40ee1FpQGyhvqnzmCJm1Wh6OX6GGBVn0E6Uyyk= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.44.0 h1:bflGWrfYyuulcdxf14V6n9+CoQcu5SAAdHmDPAJnlps= @@ -277,10 +316,17 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfg go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= go.opentelemetry.io/otel/exporters/prometheus v0.50.0 h1:2Ewsda6hejmbhGFyUvWZjUThC98Cf8Zy6g0zkIimOng= go.opentelemetry.io/otel/exporters/prometheus v0.50.0/go.mod h1:pMm5PkUo5YwbLiuEf7t2xg4wbP0/eSJrMxIMxKosynY= +go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU= +go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w= +go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps= +go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= @@ -295,12 +341,13 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 h1:yixxcjnhBmY0nkL253HFVIm0JsFHwrHdT3Yh6szTnfY= golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8/go.mod h1:jj3sYF3dwk5D+ghuXyeI3r5MFf+NT2An6/9dOA95KSI= golang.org/x/image v0.6.0 h1:bR8b5okrPI3g/gyZakLZHeWxAR8Dn5CyxXv1hLH5g/4= golang.org/x/image v0.6.0/go.mod h1:MXLdDR43H7cDJq5GEGXEVeeNhPgi+YYEQ2pC1byI1x0= +golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ= +golang.org/x/image v0.25.0/go.mod h1:tCAmOEGthTtkalusGp1g3xa2gke8J6c2N565dTyl9Rs= golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -316,10 +363,13 @@ golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/net v0.45.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY= golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70= golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= @@ -328,13 +378,13 @@ golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU= @@ -345,14 +395,13 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= @@ -361,16 +410,23 @@ golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58 golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= gonum.org/v1/plot v0.10.1 h1:dnifSs43YJuNMDzB7v8wV64O4ABBHReuAVAoBxqBqS4= gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo= +gonum.org/v1/plot v0.15.2 h1:Tlfh/jBk2tqjLZ4/P8ZIwGrLEWQSPDLRm/SNWKNXiGI= +gonum.org/v1/plot v0.15.2/go.mod h1:DX+x+DWso3LTha+AdkJEv5Txvi+Tql3KAGkehP0/Ubg= google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1:GVIKPyP/kLIyVOgOnTwFOrvQaQUzOzGMCxgFUOEmm24= google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw= +google.golang.org/genproto/googleapis/api v0.0.0-20250707201910-8d1bb00bc6a7/go.mod h1:kXqgZtrWaf6qS3jZOCnCH7WYfrvFjkC51bM8fz3RsCA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo= +google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20 h1:MLBCGN1O7GzIx+cBiwfYPwtmZ41U3Mn/cotLJciaArI= +google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20/go.mod h1:Nr5H8+MlGWr5+xX/STzdoEqJrO+YteqFbMyCsrb6mH0= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= gopkg.in/errgo.v2 v2.1.0 h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8= honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM= rsc.io/pdf v0.1.1 h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4= diff --git a/v2/go.mod b/v2/go.mod index b3c65250ab..53d8eb3f4d 100644 --- a/v2/go.mod +++ b/v2/go.mod @@ -4,6 +4,7 @@ go 1.25 require ( github.com/99designs/gqlgen v0.17.76 + github.com/blevesearch/bleve/v2 v2.5.7 github.com/bufbuild/protocompile v0.14.1 github.com/buger/jsonparser v1.1.1 github.com/cespare/xxhash/v2 v2.3.0 @@ -11,7 +12,7 @@ require ( github.com/davecgh/go-spew v1.1.1 github.com/gobwas/ws v1.4.0 github.com/golang/mock v1.6.0 - github.com/google/go-cmp v0.6.0 + github.com/google/go-cmp v0.7.0 github.com/google/uuid v1.6.0 github.com/gorilla/websocket v1.5.1 github.com/hashicorp/go-plugin v1.6.3 @@ -20,11 +21,13 @@ require ( github.com/jensneuse/diffview v1.0.0 github.com/kingledion/go-tools v0.6.0 github.com/kylelemons/godebug v1.1.0 + github.com/phf/go-queue v0.0.0-20170504031614-9abe38d0371d github.com/pkg/errors v0.9.1 github.com/r3labs/sse/v2 v2.8.1 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 github.com/sebdah/goldie/v2 v2.7.1 github.com/stretchr/testify v1.11.1 + github.com/testcontainers/testcontainers-go v0.40.0 github.com/tidwall/gjson v1.17.0 github.com/tidwall/sjson v1.0.4 github.com/vektah/gqlparser/v2 v2.5.30 @@ -34,47 +37,112 @@ require ( go.uber.org/goleak v1.3.0 go.uber.org/zap v1.26.0 golang.org/x/sync v0.17.0 - golang.org/x/sys v0.37.0 + golang.org/x/sys v0.40.0 golang.org/x/text v0.30.0 - gonum.org/v1/gonum v0.14.0 - google.golang.org/grpc v1.68.1 - google.golang.org/protobuf v1.36.9 + gonum.org/v1/gonum v0.16.0 + google.golang.org/grpc v1.75.1 + google.golang.org/protobuf v1.36.11 gopkg.in/yaml.v2 v2.4.0 ) require ( + dario.cat/mergo v1.0.2 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/RoaringBitmap/roaring/v2 v2.4.5 // indirect github.com/agnivade/levenshtein v1.2.1 // indirect + github.com/bits-and-blooms/bitset v1.22.0 // indirect + github.com/blevesearch/bleve_index_api v1.2.11 // indirect + github.com/blevesearch/geo v0.2.4 // indirect + github.com/blevesearch/go-faiss v1.0.26 // indirect + github.com/blevesearch/go-porterstemmer v1.0.3 // indirect + github.com/blevesearch/gtreap v0.1.1 // indirect + github.com/blevesearch/mmap-go v1.0.4 // indirect + github.com/blevesearch/scorch_segment_api/v2 v2.3.13 // indirect + github.com/blevesearch/segment v0.9.1 // indirect + github.com/blevesearch/snowballstem v0.9.0 // indirect + github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect + github.com/blevesearch/vellum v1.1.0 // indirect + github.com/blevesearch/zapx/v11 v11.4.2 // indirect + github.com/blevesearch/zapx/v12 v12.4.2 // indirect + github.com/blevesearch/zapx/v13 v13.4.2 // indirect + github.com/blevesearch/zapx/v14 v14.4.2 // indirect + github.com/blevesearch/zapx/v15 v15.4.2 // indirect + github.com/blevesearch/zapx/v16 v16.2.8 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v0.2.1 // indirect + github.com/cpuguy83/dockercfg v0.3.2 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/docker v28.5.1+incompatible // indirect + github.com/docker/go-connections v0.6.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/ebitengine/purego v0.8.4 // indirect github.com/fatih/color v1.16.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobwas/httphead v0.1.0 // indirect github.com/gobwas/pool v0.2.1 // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/golang/snappy v0.0.4 // indirect github.com/hashicorp/go-hclog v0.14.1 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/yamux v0.1.1 // indirect - github.com/kr/pretty v0.3.1 // indirect + github.com/json-iterator/go v0.0.0-20171115153421-f7279a603ede // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/lib/pq v1.11.2 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.10 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/go-archive v0.1.0 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/mschoch/smat v0.2.0 // indirect github.com/oklog/run v1.0.0 // indirect - github.com/phf/go-queue v0.0.0-20170504031614-9abe38d0371d // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rogpeppe/go-internal v1.13.1 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sergi/go-diff v1.3.1 // indirect + github.com/shirou/gopsutil/v4 v4.25.6 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sosodev/duration v1.3.1 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect github.com/urfave/cli/v2 v2.27.7 // indirect github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect + go.etcd.io/bbolt v1.4.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect + go.opentelemetry.io/otel v1.40.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.40.0 // indirect + go.opentelemetry.io/otel/metric v1.40.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.40.0 // indirect + go.opentelemetry.io/otel/trace v1.40.0 // indirect + go.opentelemetry.io/proto/otlp v1.9.0 // indirect go.uber.org/multierr v1.11.0 // indirect + golang.org/x/crypto v0.43.0 // indirect golang.org/x/mod v0.29.0 // indirect golang.org/x/net v0.46.0 // indirect golang.org/x/tools v0.38.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/v2/go.sum b/v2/go.sum index ffc8e96b71..f696da9973 100644 --- a/v2/go.sum +++ b/v2/go.sum @@ -1,31 +1,112 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= github.com/99designs/gqlgen v0.17.76 h1:YsJBcfACWmXWU2t1yCjoGdOmqcTfOFpjbLAE443fmYI= github.com/99designs/gqlgen v0.17.76/go.mod h1:miiU+PkAnTIDKMQ1BseUOIVeQHoiwYDZGCswoxl7xec= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/RoaringBitmap/roaring/v2 v2.4.5 h1:uGrrMreGjvAtTBobc0g5IrW1D5ldxDQYe2JW2gggRdg= +github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0= github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM= github.com/agnivade/levenshtein v1.2.1/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= +github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.22.0 h1:Tquv9S8+SGaS3EhyA+up3FXzmkhxPGjQQCkcs2uw7w4= +github.com/bits-and-blooms/bitset v1.22.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/blevesearch/bleve/v2 v2.5.7 h1:2d9YrL5zrX5EBBW++GOaEKjE+NPWeZGaX77IM26m1Z8= +github.com/blevesearch/bleve/v2 v2.5.7/go.mod h1:yj0NlS7ocGC4VOSAedqDDMktdh2935v2CSWOCDMHdSA= +github.com/blevesearch/bleve_index_api v1.2.11 h1:bXQ54kVuwP8hdrXUSOnvTQfgK0KI1+f9A0ITJT8tX1s= +github.com/blevesearch/bleve_index_api v1.2.11/go.mod h1:rKQDl4u51uwafZxFrPD1R7xFOwKnzZW7s/LSeK4lgo0= +github.com/blevesearch/geo v0.2.4 h1:ECIGQhw+QALCZaDcogRTNSJYQXRtC8/m8IKiA706cqk= +github.com/blevesearch/geo v0.2.4/go.mod h1:K56Q33AzXt2YExVHGObtmRSFYZKYGv0JEN5mdacJJR8= +github.com/blevesearch/go-faiss v1.0.26 h1:4dRLolFgjPyjkaXwff4NfbZFdE/dfywbzDqporeQvXI= +github.com/blevesearch/go-faiss v1.0.26/go.mod h1:OMGQwOaRRYxrmeNdMrXJPvVx8gBnvE5RYrr0BahNnkk= +github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo= +github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M= +github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y= +github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk= +github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc= +github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs= +github.com/blevesearch/scorch_segment_api/v2 v2.3.13 h1:ZPjv/4VwWvHJZKeMSgScCapOy8+DdmsmRyLmSB88UoY= +github.com/blevesearch/scorch_segment_api/v2 v2.3.13/go.mod h1:ENk2LClTehOuMS8XzN3UxBEErYmtwkE7MAArFTXs9Vc= +github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU= +github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw= +github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s= +github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs= +github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A= +github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ= +github.com/blevesearch/vellum v1.1.0 h1:CinkGyIsgVlYf8Y2LUQHvdelgXr6PYuvoDIajq6yR9w= +github.com/blevesearch/vellum v1.1.0/go.mod h1:QgwWryE8ThtNPxtgWJof5ndPfx0/YMBh+W2weHKPw8Y= +github.com/blevesearch/zapx/v11 v11.4.2 h1:l46SV+b0gFN+Rw3wUI1YdMWdSAVhskYuvxlcgpQFljs= +github.com/blevesearch/zapx/v11 v11.4.2/go.mod h1:4gdeyy9oGa/lLa6D34R9daXNUvfMPZqUYjPwiLmekwc= +github.com/blevesearch/zapx/v12 v12.4.2 h1:fzRbhllQmEMUuAQ7zBuMvKRlcPA5ESTgWlDEoB9uQNE= +github.com/blevesearch/zapx/v12 v12.4.2/go.mod h1:TdFmr7afSz1hFh/SIBCCZvcLfzYvievIH6aEISCte58= +github.com/blevesearch/zapx/v13 v13.4.2 h1:46PIZCO/ZuKZYgxI8Y7lOJqX3Irkc3N8W82QTK3MVks= +github.com/blevesearch/zapx/v13 v13.4.2/go.mod h1:knK8z2NdQHlb5ot/uj8wuvOq5PhDGjNYQQy0QDnopZk= +github.com/blevesearch/zapx/v14 v14.4.2 h1:2SGHakVKd+TrtEqpfeq8X+So5PShQ5nW6GNxT7fWYz0= +github.com/blevesearch/zapx/v14 v14.4.2/go.mod h1:rz0XNb/OZSMjNorufDGSpFpjoFKhXmppH9Hi7a877D8= +github.com/blevesearch/zapx/v15 v15.4.2 h1:sWxpDE0QQOTjyxYbAVjt3+0ieu8NCE0fDRaFxEsp31k= +github.com/blevesearch/zapx/v15 v15.4.2/go.mod h1:1pssev/59FsuWcgSnTa0OeEpOzmhtmr/0/11H0Z8+Nw= +github.com/blevesearch/zapx/v16 v16.2.8 h1:SlnzF0YGtSlrsOE3oE7EgEX6BIepGpeqxs1IjMbHLQI= +github.com/blevesearch/zapx/v16 v16.2.8/go.mod h1:murSoCJPCk25MqURrcJaBQ1RekuqSCSfMjXH4rHyA14= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/coder/websocket v1.8.12 h1:5bUXkEPPIbewrnkU8LTCLVaxi4N4J8ahufH2vlo4NAo= github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= +github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo= github.com/cpuguy83/go-md2man/v2 v2.0.7/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54 h1:SG7nF6SRlWhcT7cNTs5R6Hk4V2lcmLz2NsG2VnInyNo= github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM= +github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= +github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= @@ -38,13 +119,20 @@ github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs= github.com/hashicorp/go-hclog v0.14.1 h1:nQcJDQwIAGnmoUWp8ubocEX40cCml/17YkF6csQLReU= github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg= @@ -61,12 +149,15 @@ github.com/jensneuse/diffview v1.0.0 h1:4b6FQJ7y3295JUHU3tRko6euyEboL825ZsXeZZM4 github.com/jensneuse/diffview v1.0.0/go.mod h1:i6IacuD8LnEaPuiyzMHA+Wfz5mAuycMOf3R/orUY9y4= github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= +github.com/json-iterator/go v0.0.0-20171115153421-f7279a603ede h1:YrgBGwxMRK0Vq0WSCWFaZUnTsrA/PZE/xs1QZh+/edg= +github.com/json-iterator/go v0.0.0-20171115153421-f7279a603ede/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/kingledion/go-tools v0.6.0 h1:y8C/4mWoHgLkO45dB+Y/j0o4Y4WUB5lDTAcMPMtFpTg= github.com/kingledion/go-tools v0.6.0/go.mod h1:qcDJQxBui/H/hterGb90GMlLs9Yi7QrwaJL8OGdbsms= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -75,6 +166,12 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.11.2 h1:x6gxUeu39V0BHZiugWe8LXZYZ+Utk7hSJGThs8sdzfs= +github.com/lib/pq v1.11.2/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= @@ -82,22 +179,46 @@ github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= +github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM= +github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw= github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/phf/go-queue v0.0.0-20170504031614-9abe38d0371d h1:U+PMnTlV2tu7RuMK5etusZG3Cf+rpow5hqQByeCzJ2g= github.com/phf/go-queue v0.0.0-20170504031614-9abe38d0371d/go.mod h1:lXfE4PvvTW5xOjO6Mba8zDPyw8M93B6AQ7frTGnMlA8= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/r3labs/sse/v2 v2.8.1 h1:lZH+W4XOLIq88U5MIHOsLec7+R62uhz3bIi2yn0Sg8o= github.com/r3labs/sse/v2 v2.8.1/go.mod h1:Igau6Whc+F17QUgML1fYe1VPZzTV6EMCnYktEmkNJ7I= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= @@ -107,6 +228,8 @@ github.com/sebdah/goldie/v2 v2.7.1/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvK github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= +github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= @@ -114,12 +237,16 @@ github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU= +github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY= github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM= github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= @@ -129,19 +256,45 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.0.4 h1:UcdIRXff12Lpnu3OLtZvnc03g4vH2suXDXhBwBqmzYg= github.com/tidwall/sjson v1.0.4/go.mod h1:bURseu1nuBkFpIES5cz6zBtjmYeOQmEESshn7VpF15Y= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/urfave/cli/v2 v2.27.7 h1:bH59vdhbjLv3LAvIu6gd0usJHgoTTPhCFib8qqOwXYU= github.com/urfave/cli/v2 v2.27.7/go.mod h1:CyNAG/xg+iAOg0N4MPGZqVmv2rCoP267496AOXUZjA4= github.com/vektah/gqlparser/v2 v2.5.30 h1:EqLwGAFLIzt1wpx1IPpY67DwUujF1OfzgEyDsLrN6kE= github.com/vektah/gqlparser/v2 v2.5.30/go.mod h1:D1/VCZtV3LPnQrcPBeR/q5jkSQIPti0uYCP/RI0gIeo= github.com/wundergraph/astjson v1.0.0 h1:rETLJuQkMWWW03HCF6WBttEBOu8gi5vznj5KEUPVV2Q= github.com/wundergraph/astjson v1.0.0/go.mod h1:h12D/dxxnedtLzsKyBLK7/Oe4TAoGpRVC9nDpDrZSWw= -github.com/wundergraph/go-arena v1.0.0 h1:RVYWpDkJ1/6851BRHYehBeEcTLKmZygYIZsvBorcOjw= -github.com/wundergraph/go-arena v1.0.0/go.mod h1:ROOysEHWJjLQ8FSfNxZCziagb7Qw2nXY3/vgKRh7eWw= github.com/wundergraph/go-arena v1.1.0 h1:9+wSRkJAkA2vbYHp6s8tEGhPViRGQNGXqPHT0QzhdIc= github.com/wundergraph/go-arena v1.1.0/go.mod h1:ROOysEHWJjLQ8FSfNxZCziagb7Qw2nXY3/vgKRh7eWw= github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 h1:FnBeRrxr7OU4VvAzt5X7s6266i6cSVkkFPS0TuXWbIg= github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.etcd.io/bbolt v1.4.0 h1:TU77id3TnN/zKr7CO/uk+fBCwF2jGcMuw2B/FMAzYIk= +go.etcd.io/bbolt v1.4.0/go.mod h1:AsD+OCi/qPN1giOX1aiLAha3o1U8rAz65bvN4j0sRuk= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms= +go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.40.0 h1:QKdN8ly8zEMrByybbQgv8cWBcdAarwmIPZ6FThrWXJs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.40.0/go.mod h1:bTdK1nhqF76qiPoCCdyFIV+N/sRHYXYCTQc+3VCi3MI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= +go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g= +go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc= +go.opentelemetry.io/otel/sdk v1.40.0 h1:KHW/jUzgo6wsPh9At46+h4upjtccTmuZCFAc9OJ71f8= +go.opentelemetry.io/otel/sdk v1.40.0/go.mod h1:Ph7EFdYvxq72Y8Li9q8KebuYUr2KoeyHx0DRMKrYBUE= +go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw= +go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg= +go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw= +go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA= +go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A= +go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= @@ -157,6 +310,8 @@ go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= @@ -177,19 +332,29 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 h1:vVKdlvoWBphwdxWKrFZEuM0kGgGLxUOYcY4U/2Vjg44= +golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= @@ -201,15 +366,18 @@ golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gonum.org/v1/gonum v0.14.0 h1:2NiG67LD1tEH0D7kM+ps2V+fXmsAnpUeec7n8tcr4S0= -gonum.org/v1/gonum v0.14.0/go.mod h1:AoWeoz0becf9QMWtE8iWXNXc27fK4fNeHNf/oMejGfU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f h1:OxYkA3wjPsZyBylwymxSHa7ViiW1Sml4ToBrncvFehI= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f/go.mod h1:+2Yz8+CLJbIfL9z73EW45avw8Lmge3xVElCP9zEKi50= -google.golang.org/grpc v1.68.1 h1:oI5oTa11+ng8r8XMMN7jAOmWfPZWbYpCFaMUTACxkM0= -google.golang.org/grpc v1.68.1/go.mod h1:+q1XYFJjShcqn0QZHvCyeR4CXPA+llXIeUIfIe00waw= -google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= -google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY= +google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5/go.mod h1:M4/wBTSeyLxupu3W3tJtOgB14jILAS/XWPSSa3TAlJc= +google.golang.org/grpc v1.75.1 h1:/ODCNEuf9VghjgO3rqLcfg8fiOP0nSluljWFlDxELLI= +google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -222,6 +390,9 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= diff --git a/v2/pkg/engine/datasource/search_datasource/CLAUDE.md b/v2/pkg/engine/datasource/search_datasource/CLAUDE.md new file mode 100644 index 0000000000..45656216a5 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/CLAUDE.md @@ -0,0 +1,249 @@ +# Search DataSource Implementation + +GraphQL-native search integration for the WunderGraph router. Adds full-text search, vector search, filtering, sorting, pagination, and faceted search to any federated GraphQL graph via schema directives. + +**Status:** Work in progress. ~19,800 lines across 80+ files. + +## Remaining TODO + +- Check vector search +- Create instructions to implement search in cosmo router + +## Package Layout + +``` +v2/pkg/engine/datasource/search_datasource/ # GraphQL datasource (planner, source, SDL generation) +v2/pkg/engine/datasource/search_datasource/searche2e/ # Unit-level e2e tests (no composition) +v2/pkg/searchindex/ # Backend-agnostic abstractions (Index, Filter, Embedder) +v2/pkg/searchindex/{bleve,elasticsearch,...}/ # Backend implementations +v2/pkg/searchindex/embedder/{openai,ollama}/ # Embedding providers +execution/searchtesting/ # Full-stack e2e tests (with cosmo composition) +execution/searchtesting/productdetails/ # Federation entity subgraph (gqlgen) +execution/searchtesting/shareddata/ # Shared test data (4 products) +``` + +## Architecture Overview + +``` +Schema Directives (@index, @searchable, @indexed, @embedding) + | + v +directives.go: ParseConfigSchema() -> ParsedConfig + | + v +generator.go: GenerateSubgraphSDL() -> GraphQL subgraph schema + | + v +lifecycle.go: Manager.Start() -> creates indices, populates data, starts subscriptions + | + v +planner.go: Planner -> walks GraphQL operation, collects search arguments + | + v +source.go: Source.Load() -> builds SearchRequest, calls Index.Search(), formats response + | + v +searchindex.Index (backend) -> SearchResult (hits, scores, facets, cursors) +``` + +## Key Files + +### `search_datasource/` (GraphQL integration) + +| File | Lines | Purpose | +|------|-------|---------| +| `directives.go` | 434 | Parses `@index`, `@searchable`, `@indexed`, `@embedding`, `@populate`, `@subscribe` from schema AST into `ParsedConfig` | +| `generator.go` | 328 | Generates subgraph SDL from `ParsedConfig` (filter inputs, sort enums, result types, query fields, entity stubs) | +| `source.go` | 354 | `Source.Load()` - builds `SearchRequest` from planner input, calls backend, formats response (inline/wrapper/connection) | +| `lifecycle.go` | 280 | `Manager` - creates indices, sets up embedding pipelines, runs populate queries, starts subscriptions, handles shutdown | +| `filter_parser.go` | 239 | Converts GraphQL filter JSON to `searchindex.Filter` tree (term, range, prefix, AND/OR/NOT) | +| `planner.go` | 119 | `Planner` - visitor-based, detects search field, collects arguments, builds fetch input JSON with `{{.arguments.X}}` templates | +| `entity_extractor.go` | 118 | Extracts `EntityDocument` slice from GraphQL response JSON (for populate/subscribe) | +| `factory.go` | 87 | `Factory` - creates planners, holds index/embedder registries | +| `configuration.go` | 53 | `Configuration` struct (JSON-serialized per-entity config for planner/source) | +| `cursor.go` | 26 | Base64+JSON cursor encoding/decoding for Relay-style pagination | + +### `searchindex/` (backend abstractions) + +| File | Lines | Purpose | +|------|-------|---------| +| `index.go` | 25 | `Index` interface: `IndexDocument`, `IndexDocuments`, `DeleteDocument`, `DeleteDocuments`, `Search`, `Close` | +| `document.go` | 77 | `EntityDocument`, `SearchRequest`, `SearchResult`, `SearchHit`, `SortField`, `FacetRequest/Result` | +| `filter.go` | 47 | `Filter` tree: `And`, `Or`, `Not`, `Term`, `Terms`, `Range`, `Prefix`, `Exists` | +| `config.go` | 62 | `FieldType` enum (Text, Keyword, Numeric, Bool, Vector), `FieldConfig`, `IndexConfig` | +| `embedder.go` | 49 | `Embedder` interface, `TextTransformer` interface, `EmbeddingPipeline` (transformer + embedder) | +| `registry.go` | 68 | Thread-safe `IndexFactoryRegistry` and `EmbedderRegistry` | +| `template_transformer.go` | 69 | `TemplateTransformer` - converts `{{title}}` shorthand to Go templates for embedding text generation | + +### Backend implementations (all implement `searchindex.Index`) + +| Backend | Lines | Notes | +|---------|-------|-------| +| `bleve/` | 688 | In-memory/file-based. Full-featured: text, facets, prefix, cursor pagination. No external deps at runtime. | +| `elasticsearch/` | 898 | HTTP client. Uses `_search` API with query DSL. Supports `search_after` for cursor pagination. | +| `pgvector/` | 1342 | PostgreSQL with pgvector extension. SQL-based filters. Supports both text (tsvector) and vector search. | +| `weaviate/` | 1219 | GraphQL-based vector DB. Uses BM25 for text, nearVector for vectors. Class-based schema. | +| `typesense/` | 911 | HTTP client. Filter syntax string-based. Native text search with faceting. | +| `qdrant/` | 838 | gRPC client. Point-based storage. Vector search with payload filtering. | +| `algolia/` | 806 | HTTP client. Uses Algolia search API with faceting and filters. | +| `meilisearch/` | 757 | HTTP client. Filter string syntax. Native text search with faceting. | + +### Embedding providers + +| Provider | Lines | Notes | +|----------|-------|-------| +| `openai/` | 270 | OpenAI embeddings API. Batch support (up to 2048). Exponential backoff retries. | +| `ollama/` | 148 | Local Ollama server. Batch via `/api/embed`. Caller provides dimensions. | + +## Schema Directives + +```graphql +# On schema extension: declare a search index +extend schema @index(name: "products", backend: "bleve", config: "{}", cursorBasedPagination: true) + +# On object type: make it searchable +type Product @key(fields: "id") @searchable( + index: "products" + searchField: "searchProducts" + resultsMetaInformation: true +) { + id: ID! + name: String! @indexed(type: TEXT, filterable: true, sortable: true) + price: Float! @indexed(type: NUMERIC, filterable: true, sortable: true) + category: String! @indexed(type: KEYWORD, filterable: true, sortable: true) + inStock: Boolean! @indexed(type: BOOLEAN, filterable: true) + embedding: [Float!]! @embedding(sourceFields: ["name", "description"], template: "{{name}}. {{description}}", model: "text-embedding-3-small") +} +``` + +**Index types:** `TEXT` (full-text), `KEYWORD` (exact match), `NUMERIC` (range queries), `BOOLEAN`, `VECTOR` (pre-computed embeddings) + +## Generated SDL Modes + +The generator produces different SDL based on configuration: + +1. **Inline** (`resultsMetaInformation: false`, no cursor): Returns `[Product!]!` directly +2. **Wrapper** (`resultsMetaInformation: true`, no cursor): Returns `SearchProductResult!` with `hits[]{score, node}`, `totalCount`, `facets` +3. **Connection** (cursor pagination): Returns `SearchProductConnection!` with `edges[]{cursor, node, score}`, `pageInfo`, `totalCount` + +Query arguments vary by mode: +- Inline/Wrapper: `query`, `filter`, `sort`, `limit`, `offset`, `facets` +- Connection: `query`, `filter`, `sort`, `first`, `after`, `last` (if bidirectional), `before`, `facets` + +Vector search adds: `search: SearchProductInput!` (a `@oneOf` input with `query: String` or `vector: [Float!]`) + +## Response Formatting + +Source formats responses differently per mode, all wrapped in `{"data": {"": ...}}`: + +- **Inline:** `[{id, name, ...}, ...]` +- **Wrapper:** `{hits: [{score, node: {...}}, ...], totalCount, facets}` +- **Connection:** `{edges: [{cursor, node: {...}, score}, ...], pageInfo: {hasNextPage, ...}, totalCount}` + +## Lifecycle Manager + +`Manager.Start()` orchestrates: +1. Creates indices from `@index` directives via `IndexFactoryRegistry` +2. Sets up `EmbeddingPipeline` for each `@embedding` field (TemplateTransformer + Embedder) +3. Runs initial population via `@populate` queries (executes GraphQL, extracts entities, batches embeddings, indexes) +4. Starts subscription listeners via `@subscribe` for real-time updates +5. Schedules periodic resync if `resyncInterval` is set + +## Filter System + +GraphQL filter JSON is parsed into a composable `searchindex.Filter` tree: + +```json +{"category": {"eq": "Footwear"}, "price": {"gte": 10, "lte": 100}, "AND": [...], "OR": [...], "NOT": {...}} +``` + +Operators by field type: +- **String (Text/Keyword):** `eq`, `ne`, `in`, `contains`, `startsWith` +- **Numeric:** `eq`, `gt`, `gte`, `lt`, `lte` +- **Boolean:** direct value + +Each backend translates the `Filter` tree to its native query language. + +## Cursor Pagination + +Uses base64-encoded JSON sort values as opaque cursors. Over-fetches by 1 to detect `hasNextPage`/`hasPreviousPage`. Backward pagination (`last`/`before`) reverses sort direction, then reverses results back. + +Backend support varies: Bleve and pgvector support bidirectional; Elasticsearch supports forward-only. + +## Test Architecture + +### Two test layers + +**`searche2e/` (unit-level e2e):** Tests `Source.Load()` directly with a real index backend. No HTTP, no composition. Backend-agnostic framework with `RunBackendTests()`, `RunCursorTests()`, `RunGeoTests()`, and `RunFederatedBackendTests()`. + +**`execution/searchtesting/` (full-stack e2e):** Full pipeline: parse config SDL -> generate SDL -> compose subgraphs (cosmo composition-go) -> plan -> resolve. Includes a real gqlgen federation entity subgraph for entity joins. Tests wrapper, inline, cursor, vector, hybrid, geo, highlight, and additional filter modes. + +### Test data + +4 products (shared via `shareddata.Products()`): +- Running Shoes ($89.99, Footwear, Nike, in stock, New York) +- Basketball Shoes ($129.99, Footwear, Adidas, in stock, Midtown Manhattan) +- Leather Belt ($35.00, Accessories, Gucci, out of stock, Los Angeles) +- Wool Socks ($12.99, Footwear, Smartwool, in stock, London) + +### Test scenarios covered + +- Text search, keyword/boolean/numeric-range filtering +- AND/OR/NOT filter combinations, IN (terms), NE, prefix filters +- Sorting (price ASC/DESC), offset pagination, cursor pagination (forward + backward) +- Faceted search (category aggregation) +- Federation entity joins (manufacturer, rating, reviews from separate subgraph) +- Document CRUD (index, upsert, delete single/batch) +- SDL generation (text-only, vector, inline, cursor variants) +- Identity roundtrip (`__typename` + key fields) +- Geo-spatial search (distance filter, bounding box, distance sort, combined filters) +- Search highlights (field + fragments validation) +- Vector search (auto-embed, raw vector, with filter, distance populated, entity join) +- Hybrid search (text + vector combined, relevance, filter, entity join) + +### Running tests + +```bash +# Unit-level e2e (bleve only, no external services) +cd v2 && go test ./pkg/engine/datasource/search_datasource/searche2e/ -run TestBleve -v + +# Full-stack e2e (requires composition-go in execution/) +cd execution && go test ./searchtesting/ -run TestBleve -v + +# External backends require running services (ES, pgvector, etc.) +# Use -tags=integration for external backend tests +cd execution && go test -tags=integration ./searchtesting/ -run TestElasticsearch -v +``` + +### IMPORTANT: Adding integration tests for new features + +**Every new feature MUST have extensive integration tests in `execution/searchtesting/`.** This is the primary test layer that validates the full pipeline end-to-end with federation composition. + +When adding a new search capability: + +1. **Add a `RunScenarios()` function** to `execution/searchtesting/framework.go`. Follow the pattern of existing functions: + - `RunAllScenarios()` — basic text search + filters + sort + pagination + facets + - `RunInlineScenarios()` — inline result style (no wrapper types) + - `RunCursorScenarios()` — cursor-based pagination + - `RunVectorScenarios()` — vector/semantic search + - `RunHybridScenarios()` — text + vector hybrid search + - `RunGeoScenarios()` — geo-spatial search (distance, bounding box, geo sort) + - `RunHighlightScenarios()` — search result highlights + - `RunAdditionalFilterScenarios()` — NE, IN, startsWith filter operators + +2. **Create a config SDL builder** if the feature needs additional fields (e.g., `geoConfigSDL()`, `vectorConfigSDL()`). + +3. **Add a `setupTestEnv()` function** if the feature requires custom data or configuration (e.g., `setupGeoTestEnv()`, `setupVectorTestEnv()`). + +4. **Add test data functions** in `testdata.go` if the feature needs specialized data (e.g., `testGeoProducts()`, `testVectorProducts()`). + +5. **Add backend test functions** in the per-backend `*_test.go` files: + - `bleve_test.go` — always add here first (no external deps, fast CI) + - `elasticsearch_test.go` — add for features ES supports (geo, highlights, etc.) + - Other backends as applicable + +6. **Use structural assertions** for backend-dependent values (scores, distances, highlights). Use exact JSON matching only for deterministic results (filter counts, sort order, entity joins). + +7. **Also add unit-level tests** to `searche2e/framework.go` (`RunBackendTests()`, `RunGeoTests()`, etc.) — these test `Source.Load()` directly without composition overhead. + +The goal: every `Run*Scenarios()` function covers the feature end-to-end through the full pipeline (parse SDL -> generate -> compose -> plan -> resolve -> entity join). If Bleve can run it, test it with Bleve first for fast feedback. diff --git a/v2/pkg/engine/datasource/search_datasource/configuration.go b/v2/pkg/engine/datasource/search_datasource/configuration.go new file mode 100644 index 0000000000..748b20eca0 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/configuration.go @@ -0,0 +1,57 @@ +package search_datasource + +import "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + +// Configuration is the DataSource-specific configuration for the search datasource. +type Configuration struct { + // IndexName is the name of the search index. + IndexName string `json:"index_name"` + // SearchField is the name of the generated Query field. + SearchField string `json:"search_field"` + // EntityTypeName is the entity type this search field resolves. + EntityTypeName string `json:"entity_type_name"` + // KeyFields are the federation key fields for the entity. + KeyFields []string `json:"key_fields"` + // Fields describes the indexed fields and their types. + Fields []IndexedFieldConfig `json:"fields"` + // EmbeddingFields describes derived embedding fields. + EmbeddingFields []EmbeddingFieldConfig `json:"embedding_fields,omitempty"` + // HasVectorSearch indicates the entity supports vector search. + HasVectorSearch bool `json:"has_vector_search"` + // HasTextSearch indicates the entity supports full-text search. + HasTextSearch bool `json:"has_text_search"` + // ResultsMetaInformation controls whether the search field returns wrapper types with score/distance + // or a flat entity array. Defaults to true. + ResultsMetaInformation bool `json:"results_meta_information"` + // CursorBasedPagination enables Relay-style cursor pagination. + CursorBasedPagination bool `json:"cursor_based_pagination,omitempty"` + // CursorBidirectional enables last/before args (true for bleve, pgvector; false for elasticsearch). + CursorBidirectional bool `json:"cursor_bidirectional,omitempty"` + // IsSuggest indicates this configuration is for the suggest/autocomplete field, not the search field. + IsSuggest bool `json:"is_suggest,omitempty"` +} + +// NeedsResponseWrapper returns true if the config requires wrapper types. +func (c *Configuration) NeedsResponseWrapper() bool { + return c.ResultsMetaInformation || c.CursorBasedPagination +} + +// IndexedFieldConfig describes a field's indexing configuration. +type IndexedFieldConfig struct { + FieldName string `json:"field_name"` + GraphQLType string `json:"graphql_type"` + IndexType searchindex.FieldType `json:"index_type"` + Filterable bool `json:"filterable"` + Sortable bool `json:"sortable"` + Dimensions int `json:"dimensions,omitempty"` + Weight float64 `json:"weight,omitempty"` + Autocomplete bool `json:"autocomplete,omitempty"` +} + +// EmbeddingFieldConfig describes a derived embedding field. +type EmbeddingFieldConfig struct { + FieldName string `json:"field_name"` + SourceFields []string `json:"source_fields"` + Template string `json:"template"` + Model string `json:"model"` +} diff --git a/v2/pkg/engine/datasource/search_datasource/cursor.go b/v2/pkg/engine/datasource/search_datasource/cursor.go new file mode 100644 index 0000000000..c19b6a36f0 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/cursor.go @@ -0,0 +1,26 @@ +package search_datasource + +import ( + "encoding/base64" + "encoding/json" + "fmt" +) + +// EncodeCursor encodes sort values into an opaque cursor string. +func EncodeCursor(sortValues []string) string { + data, _ := json.Marshal(sortValues) + return base64.RawURLEncoding.EncodeToString(data) +} + +// DecodeCursor decodes an opaque cursor string into sort values. +func DecodeCursor(cursor string) ([]string, error) { + data, err := base64.RawURLEncoding.DecodeString(cursor) + if err != nil { + return nil, fmt.Errorf("invalid cursor encoding: %w", err) + } + var sortValues []string + if err := json.Unmarshal(data, &sortValues); err != nil { + return nil, fmt.Errorf("invalid cursor data: %w", err) + } + return sortValues, nil +} diff --git a/v2/pkg/engine/datasource/search_datasource/directives.go b/v2/pkg/engine/datasource/search_datasource/directives.go new file mode 100644 index 0000000000..f017af55ac --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/directives.go @@ -0,0 +1,532 @@ +package search_datasource + +import ( + "fmt" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// ParsedConfig holds the complete parsed configuration from the config schema. +type ParsedConfig struct { + Indices []IndexDirective + Entities []SearchableEntity + Populations []PopulateDirective + Subscriptions []SubscribeDirective +} + +// IndexDirective represents @index on the schema extension. +type IndexDirective struct { + Name string + Backend string + ConfigJSON string + CursorBasedPagination bool // from @index(cursorBasedPagination: true) +} + +// SearchableEntity represents @searchable on an object type. +type SearchableEntity struct { + TypeName string + IndexName string + SearchField string + SuggestField string // e.g. "suggestProducts" — omit to disable autocomplete + KeyFields []string + Fields []IndexedField + EmbeddingFields []EmbeddingField + ResultsMetaInformation bool // renamed from UseResultWrapper + CursorBasedPagination bool // propagated from IndexDirective + CursorBidirectional bool // true if backend supports last/before +} + +// NeedsResponseWrapper returns true if the entity needs wrapper types in the SDL. +func (e *SearchableEntity) NeedsResponseWrapper() bool { + return e.ResultsMetaInformation || e.CursorBasedPagination +} + +// IndexedField represents @indexed on a field definition. +type IndexedField struct { + FieldName string + GraphQLType string + IndexType searchindex.FieldType + Filterable bool + Sortable bool + Dimensions int + Weight float64 // search boost for TEXT fields; 0 = default (1.0) + Autocomplete bool // opt-in for term autocomplete +} + +// EmbeddingField represents @embedding on a virtual field. +type EmbeddingField struct { + FieldName string + SourceFields []string + Template string + Model string +} + +// PopulateDirective represents @populate on a schema extension or query operation. +type PopulateDirective struct { + IndexName string + EntityTypeName string + Path string + Query string // GraphQL query to execute for population + ResyncInterval string +} + +// SubscribeDirective represents @subscribe on a schema extension or subscription operation. +type SubscribeDirective struct { + IndexName string + EntityTypeName string + Path string + DeletionPath string + Subscription string // GraphQL subscription operation to execute +} + +// HasVectorSearch returns true if the entity has any VECTOR or embedding fields. +func (e *SearchableEntity) HasVectorSearch() bool { + for _, f := range e.Fields { + if f.IndexType == searchindex.FieldTypeVector { + return true + } + } + return len(e.EmbeddingFields) > 0 +} + +// HasGeoSearch returns true if the entity has any GEO fields. +func (e *SearchableEntity) HasGeoSearch() bool { + for _, f := range e.Fields { + if f.IndexType == searchindex.FieldTypeGeo { + return true + } + } + return false +} + +// HasDateField returns true if the entity has any DATE or DATETIME fields. +func (e *SearchableEntity) HasDateField() bool { + for _, f := range e.Fields { + if f.IndexType == searchindex.FieldTypeDate || f.IndexType == searchindex.FieldTypeDateTime { + return true + } + } + return false +} + +// HasTextSearch returns true if the entity has any TEXT fields. +func (e *SearchableEntity) HasTextSearch() bool { + for _, f := range e.Fields { + if f.IndexType == searchindex.FieldTypeText { + return true + } + } + return false +} + +// HasAutocomplete returns true if the entity has any fields with autocomplete enabled. +func (e *SearchableEntity) HasAutocomplete() bool { + for _, f := range e.Fields { + if f.Autocomplete { + return true + } + } + return false +} + +// ParseConfigSchema parses a config schema document and extracts all directives. +func ParseConfigSchema(doc *ast.Document) (*ParsedConfig, error) { + config := &ParsedConfig{} + + // Parse @index, @populate, @subscribe directives from schema extensions + if err := parseSchemaExtensionDirectives(doc, config); err != nil { + return nil, fmt.Errorf("parsing schema extension directives: %w", err) + } + + // Parse @searchable, @indexed, @embedding directives from object types + if err := parseEntityDirectives(doc, config); err != nil { + return nil, fmt.Errorf("parsing entity directives: %w", err) + } + + // Propagate cursor flags from IndexDirective to matching SearchableEntity. + indexByName := make(map[string]*IndexDirective, len(config.Indices)) + for i := range config.Indices { + indexByName[config.Indices[i].Name] = &config.Indices[i] + } + for i := range config.Entities { + if idx, ok := indexByName[config.Entities[i].IndexName]; ok && idx.CursorBasedPagination { + config.Entities[i].CursorBasedPagination = true + caps, hasCaps := cursorBackendCaps[idx.Backend] + if hasCaps { + config.Entities[i].CursorBidirectional = caps.Bidirectional + } + } + } + + return config, nil +} + +// cursorBackendCap describes cursor support for a backend. +type cursorBackendCap struct { + Supported bool + Bidirectional bool +} + +// cursorBackendCaps maps backend names to their cursor capabilities. +var cursorBackendCaps = map[string]cursorBackendCap{ + "bleve": {Supported: true, Bidirectional: true}, + "elasticsearch": {Supported: true, Bidirectional: false}, + "pgvector": {Supported: true, Bidirectional: true}, +} + +func parseSchemaExtensionDirectives(doc *ast.Document, config *ParsedConfig) error { + for i := range doc.SchemaExtensions { + for _, dirRef := range doc.SchemaExtensions[i].Directives.Refs { + dirName := doc.DirectiveNameString(dirRef) + switch dirName { + case "index": + idx := IndexDirective{} + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("name")); ok { + idx.Name = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("backend")); ok { + idx.Backend = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("config")); ok { + idx.ConfigJSON = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("cursorBasedPagination")); ok { + if val.Kind == ast.ValueKindBoolean { + idx.CursorBasedPagination = bool(doc.BooleanValues[val.Ref]) + } + } + if idx.Name == "" || idx.Backend == "" { + return fmt.Errorf("@index requires 'name' and 'backend' arguments") + } + config.Indices = append(config.Indices, idx) + case "populate": + p := PopulateDirective{} + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("index")); ok { + p.IndexName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("entity")); ok { + p.EntityTypeName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("path")); ok { + p.Path = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("query")); ok { + p.Query = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("resyncInterval")); ok { + p.ResyncInterval = doc.StringValueContentString(val.Ref) + } + config.Populations = append(config.Populations, p) + case "subscribe": + s := SubscribeDirective{} + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("index")); ok { + s.IndexName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("entity")); ok { + s.EntityTypeName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("path")); ok { + s.Path = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("deletionPath")); ok { + s.DeletionPath = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("subscription")); ok { + s.Subscription = doc.StringValueContentString(val.Ref) + } + config.Subscriptions = append(config.Subscriptions, s) + } + } + } + return nil +} + +func parseEntityDirectives(doc *ast.Document, config *ParsedConfig) error { + for i := range doc.ObjectTypeDefinitions { + def := &doc.ObjectTypeDefinitions[i] + entity, err := parseSearchableType(doc, def, i) + if err != nil { + return err + } + if entity != nil { + config.Entities = append(config.Entities, *entity) + } + } + // Also check type extensions + for i := range doc.ObjectTypeExtensions { + ext := &doc.ObjectTypeExtensions[i] + entity, err := parseSearchableTypeExtension(doc, ext, i) + if err != nil { + return err + } + if entity != nil { + config.Entities = append(config.Entities, *entity) + } + } + return nil +} + +func parseSearchableType(doc *ast.Document, def *ast.ObjectTypeDefinition, defIdx int) (*SearchableEntity, error) { + // Look for @searchable directive + searchableDir := -1 + for _, dirRef := range def.Directives.Refs { + if doc.DirectiveNameString(dirRef) == "searchable" { + searchableDir = dirRef + break + } + } + if searchableDir == -1 { + return nil, nil + } + + entity := &SearchableEntity{ + TypeName: doc.ObjectTypeDefinitionNameString(defIdx), + ResultsMetaInformation: true, // default + } + + // Parse @searchable arguments + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("index")); ok { + entity.IndexName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("searchField")); ok { + entity.SearchField = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("suggestField")); ok { + entity.SuggestField = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("resultsMetaInformation")); ok { + if val.Kind == ast.ValueKindBoolean { + entity.ResultsMetaInformation = bool(doc.BooleanValues[val.Ref]) + } + } + + // Parse @key directive for key fields + for _, dirRef := range def.Directives.Refs { + if doc.DirectiveNameString(dirRef) == "key" { + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("fields")); ok { + fieldsStr := doc.StringValueContentString(val.Ref) + entity.KeyFields = strings.Fields(fieldsStr) + } + } + } + + // Parse fields with @indexed and @embedding directives + for _, fieldRef := range def.FieldsDefinition.Refs { + fieldName := doc.FieldDefinitionNameString(fieldRef) + fieldType := doc.FieldDefinitionTypeNameString(fieldRef) + + // Check for @indexed + for _, dirRef := range doc.FieldDefinitions[fieldRef].Directives.Refs { + switch doc.DirectiveNameString(dirRef) { + case "indexed": + field, err := parseIndexedDirective(doc, dirRef, fieldName, fieldType) + if err != nil { + return nil, err + } + entity.Fields = append(entity.Fields, *field) + case "embedding": + emb, err := parseEmbeddingDirective(doc, dirRef, fieldName) + if err != nil { + return nil, err + } + entity.EmbeddingFields = append(entity.EmbeddingFields, *emb) + } + } + } + + return entity, nil +} + +func parseSearchableTypeExtension(doc *ast.Document, ext *ast.ObjectTypeExtension, extIdx int) (*SearchableEntity, error) { + searchableDir := -1 + for _, dirRef := range ext.Directives.Refs { + if doc.DirectiveNameString(dirRef) == "searchable" { + searchableDir = dirRef + break + } + } + if searchableDir == -1 { + return nil, nil + } + + entity := &SearchableEntity{ + TypeName: doc.ObjectTypeExtensionNameString(extIdx), + ResultsMetaInformation: true, // default + } + + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("index")); ok { + entity.IndexName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("searchField")); ok { + entity.SearchField = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("suggestField")); ok { + entity.SuggestField = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(searchableDir, []byte("resultsMetaInformation")); ok { + if val.Kind == ast.ValueKindBoolean { + entity.ResultsMetaInformation = bool(doc.BooleanValues[val.Ref]) + } + } + + for _, dirRef := range ext.Directives.Refs { + if doc.DirectiveNameString(dirRef) == "key" { + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("fields")); ok { + fieldsStr := doc.StringValueContentString(val.Ref) + entity.KeyFields = strings.Fields(fieldsStr) + } + } + } + + for _, fieldRef := range ext.FieldsDefinition.Refs { + fieldName := doc.FieldDefinitionNameString(fieldRef) + fieldType := doc.FieldDefinitionTypeNameString(fieldRef) + + for _, dirRef := range doc.FieldDefinitions[fieldRef].Directives.Refs { + switch doc.DirectiveNameString(dirRef) { + case "indexed": + field, err := parseIndexedDirective(doc, dirRef, fieldName, fieldType) + if err != nil { + return nil, err + } + entity.Fields = append(entity.Fields, *field) + case "embedding": + emb, err := parseEmbeddingDirective(doc, dirRef, fieldName) + if err != nil { + return nil, err + } + entity.EmbeddingFields = append(entity.EmbeddingFields, *emb) + } + } + } + + return entity, nil +} + +func parseIndexedDirective(doc *ast.Document, dirRef int, fieldName, fieldType string) (*IndexedField, error) { + field := &IndexedField{ + FieldName: fieldName, + GraphQLType: fieldType, + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("type")); ok { + enumStr := doc.EnumValueNameString(val.Ref) + ft, ok := searchindex.ParseFieldType(enumStr) + if !ok { + return nil, fmt.Errorf("unknown indexed field type %q on field %s", enumStr, fieldName) + } + field.IndexType = ft + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("filterable")); ok { + field.Filterable = val.Kind == ast.ValueKindBoolean && bool(doc.BooleanValues[val.Ref]) + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("sortable")); ok { + field.Sortable = val.Kind == ast.ValueKindBoolean && bool(doc.BooleanValues[val.Ref]) + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("dimensions")); ok { + if val.Kind == ast.ValueKindInteger { + field.Dimensions = int(doc.IntValueAsInt32(val.Ref)) + } + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("weight")); ok { + switch val.Kind { + case ast.ValueKindFloat: + field.Weight = float64(doc.FloatValueAsFloat32(val.Ref)) + case ast.ValueKindInteger: + field.Weight = float64(doc.IntValueAsInt32(val.Ref)) + } + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("autocomplete")); ok { + field.Autocomplete = val.Kind == ast.ValueKindBoolean && bool(doc.BooleanValues[val.Ref]) + } + + return field, nil +} + +func parseEmbeddingDirective(doc *ast.Document, dirRef int, fieldName string) (*EmbeddingField, error) { + emb := &EmbeddingField{ + FieldName: fieldName, + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("fields")); ok { + fieldsStr := doc.StringValueContentString(val.Ref) + emb.SourceFields = strings.Fields(fieldsStr) + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("template")); ok { + emb.Template = doc.StringValueContentString(val.Ref) + } + + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("model")); ok { + emb.Model = doc.StringValueContentString(val.Ref) + } + + if len(emb.SourceFields) == 0 || emb.Template == "" || emb.Model == "" { + return nil, fmt.Errorf("@embedding on field %s requires 'fields', 'template', and 'model' arguments", fieldName) + } + + return emb, nil +} + +// ParsePopulateDirective parses @populate from a query operation document. +// Deprecated: prefer placing @populate on schema extensions and using ParseConfigSchema. +func ParsePopulateDirective(doc *ast.Document, operationRef int) (*PopulateDirective, error) { + for _, dirRef := range doc.OperationDefinitions[operationRef].Directives.Refs { + if doc.DirectiveNameString(dirRef) != "populate" { + continue + } + p := &PopulateDirective{} + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("index")); ok { + p.IndexName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("entity")); ok { + p.EntityTypeName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("path")); ok { + p.Path = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("query")); ok { + p.Query = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("resyncInterval")); ok { + p.ResyncInterval = doc.StringValueContentString(val.Ref) + } + return p, nil + } + return nil, nil +} + +// ParseSubscribeDirective parses @subscribe from a subscription operation document. +// Deprecated: prefer placing @subscribe on schema extensions and using ParseConfigSchema. +func ParseSubscribeDirective(doc *ast.Document, operationRef int) (*SubscribeDirective, error) { + for _, dirRef := range doc.OperationDefinitions[operationRef].Directives.Refs { + if doc.DirectiveNameString(dirRef) != "subscribe" { + continue + } + s := &SubscribeDirective{} + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("index")); ok { + s.IndexName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("entity")); ok { + s.EntityTypeName = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("path")); ok { + s.Path = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("deletionPath")); ok { + s.DeletionPath = doc.StringValueContentString(val.Ref) + } + if val, ok := doc.DirectiveArgumentValueByName(dirRef, []byte("subscription")); ok { + s.Subscription = doc.StringValueContentString(val.Ref) + } + return s, nil + } + return nil, nil +} diff --git a/v2/pkg/engine/datasource/search_datasource/entity_extractor.go b/v2/pkg/engine/datasource/search_datasource/entity_extractor.go new file mode 100644 index 0000000000..b7d9a323d6 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/entity_extractor.go @@ -0,0 +1,118 @@ +package search_datasource + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// ExtractEntities extracts entity documents from a GraphQL response JSON. +// The path parameter (e.g. "data.products") specifies where to find the entity array. +func ExtractEntities(responseBody []byte, path string, entityTypeName string, keyFields []string) ([]searchindex.EntityDocument, error) { + var raw any + if err := json.Unmarshal(responseBody, &raw); err != nil { + return nil, fmt.Errorf("invalid JSON response: %w", err) + } + + // Navigate to the path + current := raw + for _, segment := range strings.Split(path, ".") { + obj, ok := current.(map[string]any) + if !ok { + return nil, fmt.Errorf("expected object at path segment %q, got %T", segment, current) + } + current, ok = obj[segment] + if !ok { + return nil, fmt.Errorf("path segment %q not found", segment) + } + } + + // Handle both single object and array + switch v := current.(type) { + case []any: + return extractFromArray(v, entityTypeName, keyFields) + case map[string]any: + doc, err := extractSingleEntity(v, entityTypeName, keyFields) + if err != nil { + return nil, err + } + return []searchindex.EntityDocument{doc}, nil + default: + return nil, fmt.Errorf("expected array or object at path, got %T", current) + } +} + +func extractFromArray(items []any, entityTypeName string, keyFields []string) ([]searchindex.EntityDocument, error) { + docs := make([]searchindex.EntityDocument, 0, len(items)) + for i, item := range items { + obj, ok := item.(map[string]any) + if !ok { + return nil, fmt.Errorf("expected object at index %d, got %T", i, item) + } + doc, err := extractSingleEntity(obj, entityTypeName, keyFields) + if err != nil { + return nil, fmt.Errorf("entity at index %d: %w", i, err) + } + docs = append(docs, doc) + } + return docs, nil +} + +func extractSingleEntity(obj map[string]any, entityTypeName string, keyFields []string) (searchindex.EntityDocument, error) { + identity := searchindex.DocumentIdentity{ + TypeName: entityTypeName, + KeyFields: make(map[string]any, len(keyFields)), + } + + for _, kf := range keyFields { + val, ok := obj[kf] + if !ok { + return searchindex.EntityDocument{}, fmt.Errorf("key field %q not found in entity", kf) + } + identity.KeyFields[kf] = val + } + + // Copy all fields (except vectors, which are handled by the embedding pipeline) + fields := make(map[string]any, len(obj)) + vectors := make(map[string][]float32) + + for k, v := range obj { + // Check if this is a vector field (array of numbers) + if arr, ok := v.([]any); ok && len(arr) > 0 { + if _, isNum := arr[0].(float64); isNum { + vec := make([]float32, len(arr)) + allNum := true + for i, elem := range arr { + num, ok := elem.(float64) + if !ok { + allNum = false + break + } + vec[i] = float32(num) + } + if allNum { + vectors[k] = vec + continue + } + } + } + fields[k] = v + } + + return searchindex.EntityDocument{ + Identity: identity, + Fields: fields, + Vectors: vectors, + }, nil +} + +// EntityFieldMaps extracts fields as maps for batch embedding processing. +func EntityFieldMaps(docs []searchindex.EntityDocument) []map[string]any { + result := make([]map[string]any, len(docs)) + for i, doc := range docs { + result[i] = doc.Fields + } + return result +} diff --git a/v2/pkg/engine/datasource/search_datasource/entity_extractor_test.go b/v2/pkg/engine/datasource/search_datasource/entity_extractor_test.go new file mode 100644 index 0000000000..ca798c65b2 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/entity_extractor_test.go @@ -0,0 +1,124 @@ +package search_datasource + +import ( + "testing" +) + +func TestExtractEntities(t *testing.T) { + t.Run("extract array from path", func(t *testing.T) { + response := []byte(`{ + "data": { + "products": [ + {"id": "1", "name": "Widget", "price": 9.99}, + {"id": "2", "name": "Gadget", "price": 19.99} + ] + } + }`) + + docs, err := ExtractEntities(response, "data.products", "Product", []string{"id"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(docs) != 2 { + t.Fatalf("expected 2 documents, got %d", len(docs)) + } + + if docs[0].Identity.TypeName != "Product" { + t.Errorf("TypeName = %q, want %q", docs[0].Identity.TypeName, "Product") + } + if docs[0].Identity.KeyFields["id"] != "1" { + t.Errorf("KeyFields[id] = %v, want %q", docs[0].Identity.KeyFields["id"], "1") + } + if docs[0].Fields["name"] != "Widget" { + t.Errorf("Fields[name] = %v, want %q", docs[0].Fields["name"], "Widget") + } + }) + + t.Run("extract single object", func(t *testing.T) { + response := []byte(`{ + "data": { + "productUpdated": {"id": "1", "name": "Widget", "price": 9.99} + } + }`) + + docs, err := ExtractEntities(response, "data.productUpdated", "Product", []string{"id"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(docs) != 1 { + t.Fatalf("expected 1 document, got %d", len(docs)) + } + }) + + t.Run("missing key field", func(t *testing.T) { + response := []byte(`{"data": {"items": [{"name": "Widget"}]}}`) + + _, err := ExtractEntities(response, "data.items", "Product", []string{"id"}) + if err == nil { + t.Fatal("expected error for missing key field") + } + }) + + t.Run("invalid path", func(t *testing.T) { + response := []byte(`{"data": {}}`) + + _, err := ExtractEntities(response, "data.missing", "Product", []string{"id"}) + if err == nil { + t.Fatal("expected error for invalid path") + } + }) + + t.Run("vector fields extracted separately", func(t *testing.T) { + response := []byte(`{ + "data": { + "images": [ + {"id": "1", "caption": "A cat", "embedding": [0.1, 0.2, 0.3]} + ] + } + }`) + + docs, err := ExtractEntities(response, "data.images", "Image", []string{"id"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(docs) != 1 { + t.Fatalf("expected 1 document, got %d", len(docs)) + } + + // Embedding should be in Vectors, not Fields + if _, ok := docs[0].Fields["embedding"]; ok { + t.Error("embedding should not be in Fields") + } + vec, ok := docs[0].Vectors["embedding"] + if !ok { + t.Fatal("embedding should be in Vectors") + } + if len(vec) != 3 { + t.Errorf("vector length = %d, want 3", len(vec)) + } + }) +} + +func TestEntityFieldMaps(t *testing.T) { + response := []byte(`{ + "data": { + "articles": [ + {"id": "1", "title": "Hello", "body": "World"}, + {"id": "2", "title": "Foo", "body": "Bar"} + ] + } + }`) + + docs, err := ExtractEntities(response, "data.articles", "Article", []string{"id"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + fieldMaps := EntityFieldMaps(docs) + if len(fieldMaps) != 2 { + t.Fatalf("expected 2 field maps, got %d", len(fieldMaps)) + } + if fieldMaps[0]["title"] != "Hello" { + t.Errorf("fieldMaps[0][title] = %v, want %q", fieldMaps[0]["title"], "Hello") + } +} diff --git a/v2/pkg/engine/datasource/search_datasource/factory.go b/v2/pkg/engine/datasource/search_datasource/factory.go new file mode 100644 index 0000000000..b3f039d116 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/factory.go @@ -0,0 +1,92 @@ +package search_datasource + +import ( + "context" + "fmt" + + "github.com/jensneuse/abstractlogger" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/plan" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Factory creates Planner instances for the search datasource. +type Factory struct { + executionContext context.Context + indexRegistry *searchindex.IndexFactoryRegistry + embedderRegistry *searchindex.EmbedderRegistry + indices map[string]searchindex.Index // index name → Index instance +} + +// NewFactory creates a new search datasource factory. +func NewFactory( + ctx context.Context, + indexRegistry *searchindex.IndexFactoryRegistry, + embedderRegistry *searchindex.EmbedderRegistry, +) *Factory { + return &Factory{ + executionContext: ctx, + indexRegistry: indexRegistry, + embedderRegistry: embedderRegistry, + indices: make(map[string]searchindex.Index), + } +} + +// RegisterIndex registers a pre-created index for use by planners. +func (f *Factory) RegisterIndex(name string, index searchindex.Index) { + f.indices[name] = index +} + +// Planner creates a new DataSourcePlanner for the search datasource. +func (f *Factory) Planner(_ abstractlogger.Logger) plan.DataSourcePlanner[Configuration] { + return &Planner{ + factory: f, + } +} + +// Context returns the execution context. +func (f *Factory) Context() context.Context { + return f.executionContext +} + +// UpstreamSchema returns the upstream schema for the search datasource. +func (f *Factory) UpstreamSchema(_ plan.DataSourceConfiguration[Configuration]) (*ast.Document, bool) { + return nil, false +} + +// PlanningBehavior returns the planning behavior for the search datasource. +func (f *Factory) PlanningBehavior() plan.DataSourcePlanningBehavior { + return plan.DataSourcePlanningBehavior{ + MergeAliasedRootNodes: true, + OverrideFieldPathFromAlias: true, + } +} + +// CreateSourceForConfig creates a Source for the given configuration. +// Returns an error if the index is not registered. Callers must ensure +// Manager.Start() completes before queries are served. +func (f *Factory) CreateSourceForConfig(config Configuration) (*Source, error) { + idx, ok := f.indices[config.IndexName] + if !ok { + return nil, fmt.Errorf("search_datasource: index %q not registered", config.IndexName) + } + + source := &Source{ + index: idx, + config: config, + } + + // If the entity has embedding fields, find the appropriate embedder. + // Embedding lookup errors are logged but not fatal -- the source will + // function without auto-embedding (text-only search instead of hybrid). + if len(config.EmbeddingFields) > 0 && f.embedderRegistry != nil { + model := config.EmbeddingFields[0].Model + embedder, err := f.embedderRegistry.Get(model) + if err == nil { + source.embedder = embedder + } + } + + return source, nil +} diff --git a/v2/pkg/engine/datasource/search_datasource/filter_parser.go b/v2/pkg/engine/datasource/search_datasource/filter_parser.go new file mode 100644 index 0000000000..9c0937882b --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/filter_parser.go @@ -0,0 +1,388 @@ +package search_datasource + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// ParseFilterJSON converts a GraphQL filter argument (as JSON) into a searchindex.Filter tree. +// The JSON structure matches the generated filter input types: +// +// { +// "name": {"eq": "Widget"}, +// "price": {"gte": 10.0, "lte": 100.0}, +// "AND": [{"category": {"eq": "Electronics"}}], +// "OR": [...], +// "NOT": {...} +// } +func ParseFilterJSON(data json.RawMessage, fields []IndexedFieldConfig) (*searchindex.Filter, error) { + if len(data) == 0 { + return nil, nil + } + + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return nil, fmt.Errorf("invalid filter JSON: %w", err) + } + + return parseFilterMap(raw, fields) +} + +func parseFilterMap(raw map[string]json.RawMessage, fields []IndexedFieldConfig) (*searchindex.Filter, error) { + filter := &searchindex.Filter{} + + for key, val := range raw { + switch key { + case "AND": + var items []json.RawMessage + if err := json.Unmarshal(val, &items); err != nil { + return nil, fmt.Errorf("invalid AND value: %w", err) + } + for _, item := range items { + child, err := ParseFilterJSON(item, fields) + if err != nil { + return nil, err + } + if child != nil { + filter.And = append(filter.And, child) + } + } + case "OR": + var items []json.RawMessage + if err := json.Unmarshal(val, &items); err != nil { + return nil, fmt.Errorf("invalid OR value: %w", err) + } + for _, item := range items { + child, err := ParseFilterJSON(item, fields) + if err != nil { + return nil, err + } + if child != nil { + filter.Or = append(filter.Or, child) + } + } + case "NOT": + child, err := ParseFilterJSON(val, fields) + if err != nil { + return nil, err + } + filter.Not = child + default: + // Geo filter suffixes: _distance, _boundingBox + if strings.HasSuffix(key, "_distance") { + fieldName := strings.TrimSuffix(key, "_distance") + geoFilter, err := parseGeoDistanceFilter(fieldName, val) + if err != nil { + return nil, err + } + if geoFilter != nil { + filter.And = append(filter.And, geoFilter) + } + continue + } + if strings.HasSuffix(key, "_boundingBox") { + fieldName := strings.TrimSuffix(key, "_boundingBox") + geoFilter, err := parseGeoBoundingBoxFilter(fieldName, val) + if err != nil { + return nil, err + } + if geoFilter != nil { + filter.And = append(filter.And, geoFilter) + } + continue + } + + // Field filter + fieldFilter, err := parseFieldFilter(key, val, fields) + if err != nil { + return nil, err + } + if fieldFilter != nil { + filter.And = append(filter.And, fieldFilter) + } + } + } + + // Simplify: if only one AND clause and nothing else, unwrap it + if len(filter.And) == 1 && len(filter.Or) == 0 && filter.Not == nil { + return filter.And[0], nil + } + + return filter, nil +} + +func parseFieldFilter(fieldName string, data json.RawMessage, fields []IndexedFieldConfig) (*searchindex.Filter, error) { + cfg := findFieldConfig(fieldName, fields) + if cfg == nil { + return nil, fmt.Errorf("unknown filter field %q", fieldName) + } + + switch cfg.IndexType { + case searchindex.FieldTypeBool: + // Boolean fields are just a direct value + var boolVal bool + if err := json.Unmarshal(data, &boolVal); err != nil { + return nil, fmt.Errorf("invalid boolean filter for field %q: %w", fieldName, err) + } + return &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: fieldName, Value: boolVal}, + }, nil + + case searchindex.FieldTypeText, searchindex.FieldTypeKeyword: + return parseStringFilter(fieldName, data) + + case searchindex.FieldTypeNumeric: + return parseNumericFilter(fieldName, data) + + case searchindex.FieldTypeDate, searchindex.FieldTypeDateTime: + return parseDateFilter(fieldName, data) + + default: + return nil, fmt.Errorf("unsupported filter type for field %q", fieldName) + } +} + +func parseStringFilter(fieldName string, data json.RawMessage) (*searchindex.Filter, error) { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return nil, fmt.Errorf("invalid string filter for field %q: %w", fieldName, err) + } + + for op, val := range raw { + switch op { + case "eq": + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + return &searchindex.Filter{Term: &searchindex.TermFilter{Field: fieldName, Value: s}}, nil + case "ne": + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + return &searchindex.Filter{ + Not: &searchindex.Filter{Term: &searchindex.TermFilter{Field: fieldName, Value: s}}, + }, nil + case "in": + var values []string + if err := json.Unmarshal(val, &values); err != nil { + return nil, err + } + anyValues := make([]any, len(values)) + for i, v := range values { + anyValues[i] = v + } + return &searchindex.Filter{Terms: &searchindex.TermsFilter{Field: fieldName, Values: anyValues}}, nil + case "contains": + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + // Contains is modeled as a term match in full-text context + return &searchindex.Filter{Term: &searchindex.TermFilter{Field: fieldName, Value: s}}, nil + case "startsWith": + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + return &searchindex.Filter{Prefix: &searchindex.PrefixFilter{Field: fieldName, Value: s}}, nil + } + } + + return nil, nil +} + +func parseNumericFilter(fieldName string, data json.RawMessage) (*searchindex.Filter, error) { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return nil, fmt.Errorf("invalid numeric filter for field %q: %w", fieldName, err) + } + + // Check for equality first + if eqVal, ok := raw["eq"]; ok { + var num float64 + if err := json.Unmarshal(eqVal, &num); err != nil { + return nil, err + } + return &searchindex.Filter{Term: &searchindex.TermFilter{Field: fieldName, Value: num}}, nil + } + + // Range filter + rf := &searchindex.RangeFilter{Field: fieldName} + hasRange := false + + if val, ok := raw["gt"]; ok { + var num float64 + if err := json.Unmarshal(val, &num); err != nil { + return nil, err + } + rf.GT = num + rf.HasGT = true + hasRange = true + } + if val, ok := raw["gte"]; ok { + var num float64 + if err := json.Unmarshal(val, &num); err != nil { + return nil, err + } + rf.GTE = num + hasRange = true + } + if val, ok := raw["lt"]; ok { + var num float64 + if err := json.Unmarshal(val, &num); err != nil { + return nil, err + } + rf.LT = num + rf.HasLT = true + hasRange = true + } + if val, ok := raw["lte"]; ok { + var num float64 + if err := json.Unmarshal(val, &num); err != nil { + return nil, err + } + rf.LTE = num + hasRange = true + } + + if hasRange { + return &searchindex.Filter{Range: rf}, nil + } + + return nil, nil +} + +func parseDateFilter(fieldName string, data json.RawMessage) (*searchindex.Filter, error) { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return nil, fmt.Errorf("invalid date filter for field %q: %w", fieldName, err) + } + + // Date equality is implemented as a range with GTE == LTE because backends + // store dates as numeric timestamps, not strings (a TermFilter would fail). + if eqVal, ok := raw["eq"]; ok { + var s string + if err := json.Unmarshal(eqVal, &s); err != nil { + return nil, err + } + return &searchindex.Filter{Range: &searchindex.RangeFilter{Field: fieldName, GTE: s, LTE: s}}, nil + } + + // Range filter — after is alias for gt, before is alias for lt + rf := &searchindex.RangeFilter{Field: fieldName} + hasRange := false + + if val, ok := raw["gt"]; ok { + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + rf.GT = s + rf.HasGT = true + hasRange = true + } else if val, ok := raw["after"]; ok { + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + rf.GT = s + rf.HasGT = true + hasRange = true + } + if val, ok := raw["gte"]; ok { + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + rf.GTE = s + hasRange = true + } + if val, ok := raw["lt"]; ok { + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + rf.LT = s + rf.HasLT = true + hasRange = true + } else if val, ok := raw["before"]; ok { + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + rf.LT = s + rf.HasLT = true + hasRange = true + } + if val, ok := raw["lte"]; ok { + var s string + if err := json.Unmarshal(val, &s); err != nil { + return nil, err + } + rf.LTE = s + hasRange = true + } + + if hasRange { + return &searchindex.Filter{Range: rf}, nil + } + + return nil, nil +} + +func findFieldConfig(name string, fields []IndexedFieldConfig) *IndexedFieldConfig { + for i := range fields { + if fields[i].FieldName == name { + return &fields[i] + } + } + return nil +} + +func parseGeoDistanceFilter(fieldName string, data json.RawMessage) (*searchindex.Filter, error) { + var input struct { + Center struct { + Lat float64 `json:"lat"` + Lon float64 `json:"lon"` + } `json:"center"` + Distance string `json:"distance"` + } + if err := json.Unmarshal(data, &input); err != nil { + return nil, fmt.Errorf("invalid geo distance filter for field %q: %w", fieldName, err) + } + return &searchindex.Filter{ + GeoDistance: &searchindex.GeoDistanceFilter{ + Field: fieldName, + Center: searchindex.GeoPoint{Lat: input.Center.Lat, Lon: input.Center.Lon}, + Distance: input.Distance, + }, + }, nil +} + +func parseGeoBoundingBoxFilter(fieldName string, data json.RawMessage) (*searchindex.Filter, error) { + var input struct { + TopLeft struct { + Lat float64 `json:"lat"` + Lon float64 `json:"lon"` + } `json:"topLeft"` + BottomRight struct { + Lat float64 `json:"lat"` + Lon float64 `json:"lon"` + } `json:"bottomRight"` + } + if err := json.Unmarshal(data, &input); err != nil { + return nil, fmt.Errorf("invalid geo bounding box filter for field %q: %w", fieldName, err) + } + return &searchindex.Filter{ + GeoBoundingBox: &searchindex.GeoBoundingBoxFilter{ + Field: fieldName, + TopLeft: searchindex.GeoPoint{Lat: input.TopLeft.Lat, Lon: input.TopLeft.Lon}, + BottomRight: searchindex.GeoPoint{Lat: input.BottomRight.Lat, Lon: input.BottomRight.Lon}, + }, + }, nil +} diff --git a/v2/pkg/engine/datasource/search_datasource/filter_parser_test.go b/v2/pkg/engine/datasource/search_datasource/filter_parser_test.go new file mode 100644 index 0000000000..11b7291a11 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/filter_parser_test.go @@ -0,0 +1,331 @@ +package search_datasource + +import ( + "encoding/json" + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestParseFilterJSON(t *testing.T) { + fields := []IndexedFieldConfig{ + {FieldName: "name", IndexType: searchindex.FieldTypeText, Filterable: true}, + {FieldName: "category", IndexType: searchindex.FieldTypeKeyword, Filterable: true}, + {FieldName: "price", IndexType: searchindex.FieldTypeNumeric, GraphQLType: "Float!", Filterable: true}, + {FieldName: "inStock", IndexType: searchindex.FieldTypeBool, Filterable: true}, + } + + t.Run("nil input", func(t *testing.T) { + f, err := ParseFilterJSON(nil, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f != nil { + t.Fatal("expected nil filter") + } + }) + + t.Run("term filter string", func(t *testing.T) { + input := json.RawMessage(`{"category": {"eq": "Electronics"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Term == nil { + t.Fatal("expected term filter") + } + if f.Term.Field != "category" { + t.Errorf("field = %q, want %q", f.Term.Field, "category") + } + if f.Term.Value != "Electronics" { + t.Errorf("value = %v, want %q", f.Term.Value, "Electronics") + } + }) + + t.Run("boolean filter", func(t *testing.T) { + input := json.RawMessage(`{"inStock": true}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Term == nil { + t.Fatal("expected term filter for boolean") + } + if f.Term.Value != true { + t.Errorf("value = %v, want true", f.Term.Value) + } + }) + + t.Run("numeric range filter", func(t *testing.T) { + input := json.RawMessage(`{"price": {"gte": 10.0, "lte": 100.0}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Range == nil { + t.Fatal("expected range filter") + } + if f.Range.GTE != 10.0 { + t.Errorf("GTE = %v, want 10.0", f.Range.GTE) + } + if f.Range.LTE != 100.0 { + t.Errorf("LTE = %v, want 100.0", f.Range.LTE) + } + }) + + t.Run("prefix filter", func(t *testing.T) { + input := json.RawMessage(`{"name": {"startsWith": "Widget"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Prefix == nil { + t.Fatal("expected prefix filter") + } + if f.Prefix.Value != "Widget" { + t.Errorf("value = %q, want %q", f.Prefix.Value, "Widget") + } + }) + + t.Run("terms filter (IN)", func(t *testing.T) { + input := json.RawMessage(`{"category": {"in": ["A", "B", "C"]}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Terms == nil { + t.Fatal("expected terms filter") + } + if len(f.Terms.Values) != 3 { + t.Errorf("len(values) = %d, want 3", len(f.Terms.Values)) + } + }) + + t.Run("NOT filter", func(t *testing.T) { + input := json.RawMessage(`{"NOT": {"category": {"eq": "Obsolete"}}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Not == nil { + t.Fatal("expected NOT filter") + } + if f.Not.Term == nil { + t.Fatal("expected term inside NOT") + } + }) + + t.Run("AND filter", func(t *testing.T) { + input := json.RawMessage(`{"AND": [{"category": {"eq": "A"}}, {"inStock": true}]}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || len(f.And) != 2 { + t.Fatalf("expected 2 AND clauses, got %d", len(f.And)) + } + }) + + t.Run("OR filter", func(t *testing.T) { + input := json.RawMessage(`{"OR": [{"category": {"eq": "A"}}, {"category": {"eq": "B"}}]}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || len(f.Or) != 2 { + t.Fatalf("expected 2 OR clauses, got %d", len(f.Or)) + } + }) + + t.Run("numeric equality", func(t *testing.T) { + input := json.RawMessage(`{"price": {"eq": 42.5}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Term == nil { + t.Fatal("expected term filter for numeric equality") + } + if f.Term.Value != 42.5 { + t.Errorf("value = %v, want 42.5", f.Term.Value) + } + }) + + t.Run("ne filter (NOT eq)", func(t *testing.T) { + input := json.RawMessage(`{"category": {"ne": "Obsolete"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Not == nil || f.Not.Term == nil { + t.Fatal("expected NOT(term) filter") + } + if f.Not.Term.Value != "Obsolete" { + t.Errorf("value = %v, want %q", f.Not.Term.Value, "Obsolete") + } + }) + + t.Run("unknown field", func(t *testing.T) { + input := json.RawMessage(`{"unknown": {"eq": "val"}}`) + _, err := ParseFilterJSON(input, fields) + if err == nil { + t.Fatal("expected error for unknown field") + } + }) + + t.Run("geo distance filter", func(t *testing.T) { + input := json.RawMessage(`{"location_distance": {"center": {"lat": 40.7128, "lon": -74.006}, "distance": "10km"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.GeoDistance == nil { + t.Fatal("expected geo distance filter") + } + if f.GeoDistance.Field != "location" { + t.Errorf("field = %q, want %q", f.GeoDistance.Field, "location") + } + if f.GeoDistance.Distance != "10km" { + t.Errorf("distance = %q, want %q", f.GeoDistance.Distance, "10km") + } + if f.GeoDistance.Center.Lat != 40.7128 { + t.Errorf("lat = %v, want 40.7128", f.GeoDistance.Center.Lat) + } + if f.GeoDistance.Center.Lon != -74.006 { + t.Errorf("lon = %v, want -74.006", f.GeoDistance.Center.Lon) + } + }) + + t.Run("geo bounding box filter", func(t *testing.T) { + input := json.RawMessage(`{"location_boundingBox": {"topLeft": {"lat": 41.0, "lon": -74.5}, "bottomRight": {"lat": 40.5, "lon": -73.5}}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.GeoBoundingBox == nil { + t.Fatal("expected geo bounding box filter") + } + if f.GeoBoundingBox.Field != "location" { + t.Errorf("field = %q, want %q", f.GeoBoundingBox.Field, "location") + } + if f.GeoBoundingBox.TopLeft.Lat != 41.0 { + t.Errorf("topLeft.lat = %v, want 41.0", f.GeoBoundingBox.TopLeft.Lat) + } + if f.GeoBoundingBox.BottomRight.Lon != -73.5 { + t.Errorf("bottomRight.lon = %v, want -73.5", f.GeoBoundingBox.BottomRight.Lon) + } + }) +} + +func TestParseFilterJSON_DateFields(t *testing.T) { + fields := []IndexedFieldConfig{ + {FieldName: "eventDate", IndexType: searchindex.FieldTypeDate, Filterable: true}, + {FieldName: "createdAt", IndexType: searchindex.FieldTypeDateTime, Filterable: true}, + } + + t.Run("date eq", func(t *testing.T) { + input := json.RawMessage(`{"eventDate": {"eq": "2024-01-15"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Range == nil { + t.Fatal("expected range filter (date eq is translated to GTE+LTE)") + } + if f.Range.Field != "eventDate" { + t.Errorf("field = %q, want %q", f.Range.Field, "eventDate") + } + if f.Range.GTE != "2024-01-15" { + t.Errorf("GTE = %v, want %q", f.Range.GTE, "2024-01-15") + } + if f.Range.LTE != "2024-01-15" { + t.Errorf("LTE = %v, want %q", f.Range.LTE, "2024-01-15") + } + }) + + t.Run("datetime range gte/lte", func(t *testing.T) { + input := json.RawMessage(`{"createdAt": {"gte": "2024-01-01T00:00:00Z", "lte": "2024-12-31T23:59:59Z"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Range == nil { + t.Fatal("expected range filter") + } + if f.Range.Field != "createdAt" { + t.Errorf("field = %q, want %q", f.Range.Field, "createdAt") + } + if f.Range.GTE != "2024-01-01T00:00:00Z" { + t.Errorf("GTE = %v, want %q", f.Range.GTE, "2024-01-01T00:00:00Z") + } + if f.Range.LTE != "2024-12-31T23:59:59Z" { + t.Errorf("LTE = %v, want %q", f.Range.LTE, "2024-12-31T23:59:59Z") + } + }) + + t.Run("date after alias", func(t *testing.T) { + input := json.RawMessage(`{"eventDate": {"after": "2024-06-01"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Range == nil { + t.Fatal("expected range filter") + } + if f.Range.GT != "2024-06-01" { + t.Errorf("GT = %v, want %q", f.Range.GT, "2024-06-01") + } + if !f.Range.HasGT { + t.Error("HasGT should be true") + } + }) + + t.Run("date before alias", func(t *testing.T) { + input := json.RawMessage(`{"eventDate": {"before": "2025-01-01"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Range == nil { + t.Fatal("expected range filter") + } + if f.Range.LT != "2025-01-01" { + t.Errorf("LT = %v, want %q", f.Range.LT, "2025-01-01") + } + if !f.Range.HasLT { + t.Error("HasLT should be true") + } + }) + + t.Run("gt takes precedence over after", func(t *testing.T) { + input := json.RawMessage(`{"eventDate": {"gt": "2024-03-01", "after": "2024-06-01"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Range == nil { + t.Fatal("expected range filter") + } + // gt should win over after + if f.Range.GT != "2024-03-01" { + t.Errorf("GT = %v, want %q (gt should take precedence)", f.Range.GT, "2024-03-01") + } + }) + + t.Run("combined after and before", func(t *testing.T) { + input := json.RawMessage(`{"createdAt": {"after": "2024-01-01T00:00:00Z", "before": "2025-01-01T00:00:00Z"}}`) + f, err := ParseFilterJSON(input, fields) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil || f.Range == nil { + t.Fatal("expected range filter") + } + if f.Range.GT != "2024-01-01T00:00:00Z" { + t.Errorf("GT = %v, want %q", f.Range.GT, "2024-01-01T00:00:00Z") + } + if f.Range.LT != "2025-01-01T00:00:00Z" { + t.Errorf("LT = %v, want %q", f.Range.LT, "2025-01-01T00:00:00Z") + } + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/generator.go b/v2/pkg/engine/datasource/search_datasource/generator.go new file mode 100644 index 0000000000..8dedd8702a --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/generator.go @@ -0,0 +1,483 @@ +package search_datasource + +import ( + "fmt" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// GenerateSubgraphSDL takes the parsed config and produces the actual subgraph SDL. +func GenerateSubgraphSDL(config *ParsedConfig) (string, error) { + // Validate cursor pagination backend support. + for _, entity := range config.Entities { + if !entity.CursorBasedPagination { + continue + } + // Find matching index directive. + for _, idx := range config.Indices { + if idx.Name == entity.IndexName { + caps, ok := cursorBackendCaps[idx.Backend] + if !ok || !caps.Supported { + return "", fmt.Errorf("backend %q does not support cursor-based pagination", idx.Backend) + } + break + } + } + } + + var b strings.Builder + + // Shared scalar/input/enum types + writeSharedTypes(&b, config.Entities) + + // Per-entity types + for _, entity := range config.Entities { + writeEntityTypes(&b, &entity) + } + + // Query type with all search fields + writeQueryType(&b, config.Entities) + + // Entity stubs + writeEntityStubs(&b, config.Entities) + + return b.String(), nil +} + +func writeSharedTypes(b *strings.Builder, entities []SearchableEntity) { + // Declare @oneOf if any entity uses vector search (SearchInput uses @oneOf). + for _, e := range entities { + if e.HasVectorSearch() { + b.WriteString("directive @oneOf on INPUT_OBJECT\n\n") + break + } + } + + b.WriteString(`input StringFilter { + eq: String + ne: String + in: [String!] + contains: String + startsWith: String +} + +input FloatFilter { + eq: Float + gt: Float + gte: Float + lt: Float + lte: Float +} + +input IntFilter { + eq: Int + gt: Int + gte: Int + lt: Int + lte: Int +} + +enum SortDirection { + ASC + DESC +} + +enum Fuzziness { + EXACT + LOW + HIGH +} + +`) + + // Only emit facet types if at least one entity uses wrapper types and is not vector-only. + needsFacets := false + for _, e := range entities { + if e.NeedsResponseWrapper() && !e.HasVectorSearch() { + needsFacets = true + break + } + } + if needsFacets { + b.WriteString(`type SearchFacet { + field: String! + values: [SearchFacetValue!]! +} + +type SearchFacetValue { + value: String! + count: Int! +} + +`) + } + + // Emit SearchHighlight type if any entity uses wrapper or connection types. + needsHighlights := false + for _, e := range entities { + if e.NeedsResponseWrapper() { + needsHighlights = true + break + } + } + if needsHighlights { + b.WriteString(`type SearchHighlight { + field: String! + fragments: [String!]! +} + +`) + } + + // Emit SearchPageInfo if any entity uses cursor pagination. + needsPageInfo := false + for _, e := range entities { + if e.CursorBasedPagination { + needsPageInfo = true + break + } + } + if needsPageInfo { + b.WriteString(`type SearchPageInfo { + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String + endCursor: String +} + +`) + } + + // Emit geo types if any entity has GEO fields. + needsGeo := false + for _, e := range entities { + if e.HasGeoSearch() { + needsGeo = true + break + } + } + if needsGeo { + b.WriteString(`input GeoPointInput { + lat: Float! + lon: Float! +} + +input GeoDistanceFilterInput { + center: GeoPointInput! + distance: String! +} + +input GeoBoundingBoxFilterInput { + topLeft: GeoPointInput! + bottomRight: GeoPointInput! +} + +input GeoDistanceSortInput { + field: String! + center: GeoPointInput! + direction: SortDirection! + unit: String +} + +`) + } + + // Emit date scalars and filter types if any entity has DATE or DATETIME fields. + needsDate := false + for _, e := range entities { + if e.HasDateField() { + needsDate = true + break + } + } + if needsDate { + b.WriteString(`scalar Date +scalar DateTime + +input DateFilter { + eq: Date + gt: Date + gte: Date + lt: Date + lte: Date + after: Date + before: Date +} + +input DateTimeFilter { + eq: DateTime + gt: DateTime + gte: DateTime + lt: DateTime + lte: DateTime + after: DateTime + before: DateTime +} + +`) + } + + // Emit SuggestTerm type if any entity has autocomplete. + needsSuggest := false + for _, e := range entities { + if e.SuggestField != "" && e.HasAutocomplete() { + needsSuggest = true + break + } + } + if needsSuggest { + b.WriteString(`type SuggestTerm { + term: String! + count: Int! +} + +`) + } +} + +func writeEntityTypes(b *strings.Builder, entity *SearchableEntity) { + typeName := entity.TypeName + hasVector := entity.HasVectorSearch() + + // Search input for vector entities + if hasVector { + fmt.Fprintf(b, "input Search%sInput @oneOf {\n", typeName) + b.WriteString(" query: String\n") + b.WriteString(" vector: [Float!]\n") + b.WriteString("}\n\n") + } + + // Filter input type + filterFields := filterableFields(entity) + if len(filterFields) > 0 { + fmt.Fprintf(b, "input %sFilter {\n", typeName) + for _, f := range filterFields { + if f.IndexType == searchindex.FieldTypeGeo { + fmt.Fprintf(b, " %s_distance: GeoDistanceFilterInput\n", f.FieldName) + fmt.Fprintf(b, " %s_boundingBox: GeoBoundingBoxFilterInput\n", f.FieldName) + } else { + filterType := graphqlFilterType(f) + fmt.Fprintf(b, " %s: %s\n", f.FieldName, filterType) + } + } + fmt.Fprintf(b, " AND: [%sFilter!]\n", typeName) + fmt.Fprintf(b, " OR: [%sFilter!]\n", typeName) + fmt.Fprintf(b, " NOT: %sFilter\n", typeName) + b.WriteString("}\n\n") + } + + // Sort enum and input + sortFields := sortableFields(entity) + if len(sortFields) > 0 { + fmt.Fprintf(b, "enum %sSortField {\n", typeName) + b.WriteString(" RELEVANCE\n") + for _, f := range sortFields { + fmt.Fprintf(b, " %s\n", strings.ToUpper(f.FieldName)) + } + b.WriteString("}\n\n") + + fmt.Fprintf(b, "input %sSort {\n", typeName) + fmt.Fprintf(b, " field: %sSortField!\n", typeName) + b.WriteString(" direction: SortDirection!\n") + b.WriteString("}\n\n") + } + + // Cursor-based pagination types + if entity.CursorBasedPagination { + writeConnectionTypes(b, entity) + return + } + + // Legacy result wrapper types + if entity.ResultsMetaInformation { + fmt.Fprintf(b, "type Search%sResult {\n", typeName) + fmt.Fprintf(b, " hits: [Search%sHit!]!\n", typeName) + b.WriteString(" totalCount: Int!\n") + if !hasVector { + b.WriteString(" facets: [SearchFacet!]\n") + } + b.WriteString("}\n\n") + + fmt.Fprintf(b, "type Search%sHit {\n", typeName) + b.WriteString(" score: Float!\n") + if hasVector { + b.WriteString(" distance: Float\n") + } + if entity.HasGeoSearch() { + b.WriteString(" geoDistance: Float\n") + } + b.WriteString(" highlights: [SearchHighlight!]\n") + fmt.Fprintf(b, " node: %s!\n", typeName) + b.WriteString("}\n\n") + } +} + +// writeConnectionTypes emits Relay-style Connection/Edge types for cursor pagination. +func writeConnectionTypes(b *strings.Builder, entity *SearchableEntity) { + typeName := entity.TypeName + hasVector := entity.HasVectorSearch() + + // Connection type + fmt.Fprintf(b, "type Search%sConnection {\n", typeName) + fmt.Fprintf(b, " edges: [Search%sEdge!]!\n", typeName) + b.WriteString(" pageInfo: SearchPageInfo!\n") + b.WriteString(" totalCount: Int!\n") + if !hasVector { + b.WriteString(" facets: [SearchFacet!]\n") + } + b.WriteString("}\n\n") + + // Edge type + fmt.Fprintf(b, "type Search%sEdge {\n", typeName) + b.WriteString(" cursor: String!\n") + fmt.Fprintf(b, " node: %s!\n", typeName) + if entity.ResultsMetaInformation { + b.WriteString(" score: Float!\n") + if hasVector { + b.WriteString(" distance: Float\n") + } + if entity.HasGeoSearch() { + b.WriteString(" geoDistance: Float\n") + } + b.WriteString(" highlights: [SearchHighlight!]\n") + } + b.WriteString("}\n\n") +} + +func writeQueryType(b *strings.Builder, entities []SearchableEntity) { + b.WriteString("type Query {\n") + for _, entity := range entities { + writeSearchField(b, &entity) + if entity.SuggestField != "" && entity.HasAutocomplete() { + writeSuggestField(b, &entity) + } + } + b.WriteString("}\n\n") +} + +func writeSuggestField(b *strings.Builder, entity *SearchableEntity) { + fmt.Fprintf(b, " %s(\n", entity.SuggestField) + b.WriteString(" prefix: String!\n") + b.WriteString(" limit: Int\n") + fmt.Fprintf(b, " ): [SuggestTerm!]!\n") +} + +func writeSearchField(b *strings.Builder, entity *SearchableEntity) { + hasVector := entity.HasVectorSearch() + hasFilter := len(filterableFields(entity)) > 0 + hasSort := len(sortableFields(entity)) > 0 + hasGeoSort := hasSortableGeoField(entity) + + fmt.Fprintf(b, " %s(\n", entity.SearchField) + + if hasVector { + fmt.Fprintf(b, " search: Search%sInput!\n", entity.TypeName) + } else { + b.WriteString(" query: String!\n") + } + + b.WriteString(" fuzziness: Fuzziness\n") + + if hasFilter { + fmt.Fprintf(b, " filter: %sFilter\n", entity.TypeName) + } + + if hasSort { + fmt.Fprintf(b, " sort: [%sSort!]\n", entity.TypeName) + } + + if hasGeoSort { + b.WriteString(" geoSort: GeoDistanceSortInput\n") + } + + if entity.CursorBasedPagination { + // Cursor pagination args + b.WriteString(" first: Int\n") + b.WriteString(" after: String\n") + if entity.CursorBidirectional { + b.WriteString(" last: Int\n") + b.WriteString(" before: String\n") + } + if !hasVector { + b.WriteString(" facets: [String!]\n") + } + fmt.Fprintf(b, " ): Search%sConnection!\n", entity.TypeName) + } else { + // Offset pagination args + b.WriteString(" limit: Int\n") + b.WriteString(" offset: Int\n") + if !hasVector && entity.ResultsMetaInformation { + b.WriteString(" facets: [String!]\n") + } + if entity.ResultsMetaInformation { + fmt.Fprintf(b, " ): Search%sResult!\n", entity.TypeName) + } else { + fmt.Fprintf(b, " ): [%s!]!\n", entity.TypeName) + } + } +} + +func writeEntityStubs(b *strings.Builder, entities []SearchableEntity) { + for _, entity := range entities { + keyFields := strings.Join(entity.KeyFields, " ") + fmt.Fprintf(b, "type %s @key(fields: \"%s\") {\n", entity.TypeName, keyFields) + for _, kf := range entity.KeyFields { + fmt.Fprintf(b, " %s: ID! @external\n", kf) + } + b.WriteString("}\n\n") + } +} + +func filterableFields(entity *SearchableEntity) []IndexedField { + var result []IndexedField + for _, f := range entity.Fields { + if f.Filterable { + result = append(result, f) + } + } + return result +} + +func sortableFields(entity *SearchableEntity) []IndexedField { + var result []IndexedField + for _, f := range entity.Fields { + if f.Sortable && f.IndexType != searchindex.FieldTypeGeo { + result = append(result, f) + } + } + return result +} + +func hasSortableGeoField(entity *SearchableEntity) bool { + for _, f := range entity.Fields { + if f.IndexType == searchindex.FieldTypeGeo && f.Sortable { + return true + } + } + return false +} + +func graphqlFilterType(f IndexedField) string { + switch f.IndexType { + case searchindex.FieldTypeText, searchindex.FieldTypeKeyword: + return "StringFilter" + case searchindex.FieldTypeNumeric: + if isFloatType(f.GraphQLType) { + return "FloatFilter" + } + return "IntFilter" + case searchindex.FieldTypeBool: + return "Boolean" + case searchindex.FieldTypeDate: + return "DateFilter" + case searchindex.FieldTypeDateTime: + return "DateTimeFilter" + default: + return "StringFilter" + } +} + +func isFloatType(graphqlType string) bool { + return strings.Contains(graphqlType, "Float") +} diff --git a/v2/pkg/engine/datasource/search_datasource/generator_test.go b/v2/pkg/engine/datasource/search_datasource/generator_test.go new file mode 100644 index 0000000000..7dd53db7eb --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/generator_test.go @@ -0,0 +1,529 @@ +package search_datasource + +import ( + "strings" + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestGenerateSubgraphSDL(t *testing.T) { + t.Run("text-only entity", func(t *testing.T) { + config := &ParsedConfig{ + Entities: []SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []IndexedField{ + {FieldName: "name", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "description", GraphQLType: "String!", IndexType: searchindex.FieldTypeText}, + {FieldName: "category", GraphQLType: "String!", IndexType: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {FieldName: "price", GraphQLType: "Float!", IndexType: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {FieldName: "inStock", GraphQLType: "Boolean!", IndexType: searchindex.FieldTypeBool, Filterable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should have query field + assertContains(t, sdl, "searchProducts(") + assertContains(t, sdl, "query: String!") + + // Should have filter type with filterable fields + assertContains(t, sdl, "input ProductFilter {") + assertContains(t, sdl, "name: StringFilter") + assertContains(t, sdl, "category: StringFilter") + assertContains(t, sdl, "price: FloatFilter") + assertContains(t, sdl, "inStock: Boolean") + assertContains(t, sdl, "AND: [ProductFilter!]") + assertContains(t, sdl, "OR: [ProductFilter!]") + assertContains(t, sdl, "NOT: ProductFilter") + + // Should have sort enum + assertContains(t, sdl, "enum ProductSortField {") + assertContains(t, sdl, "RELEVANCE") + assertContains(t, sdl, "NAME") + assertContains(t, sdl, "CATEGORY") + assertContains(t, sdl, "PRICE") + + // Should have result types + assertContains(t, sdl, "type SearchProductResult {") + assertContains(t, sdl, "type SearchProductHit {") + assertContains(t, sdl, "node: Product!") + + // Should have entity stub + assertContains(t, sdl, `type Product @key(fields: "id") {`) + assertContains(t, sdl, "id: ID!") + + // Should NOT have vector search input + assertNotContains(t, sdl, "SearchProductInput") + + // Should have facets argument + assertContains(t, sdl, "facets: [String!]") + + // Should have shared types + assertContains(t, sdl, "input StringFilter {") + assertContains(t, sdl, "input FloatFilter {") + }) + + t.Run("vector entity with embedding", func(t *testing.T) { + config := &ParsedConfig{ + Entities: []SearchableEntity{ + { + TypeName: "Article", + IndexName: "articles", + SearchField: "searchArticles", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []IndexedField{ + {FieldName: "title", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true}, + {FieldName: "body", GraphQLType: "String!", IndexType: searchindex.FieldTypeText}, + }, + EmbeddingFields: []EmbeddingField{ + {FieldName: "_embedding", SourceFields: []string{"title", "body"}, Template: "{{title}}. {{body}}", Model: "text-embedding-3-small"}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should have @oneOf search input + assertContains(t, sdl, "input SearchArticleInput @oneOf {") + assertContains(t, sdl, "query: String") + assertContains(t, sdl, "vector: [Float!]") + + // Search field should use search: input + assertContains(t, sdl, "search: SearchArticleInput!") + + // Should have distance field in hit type + assertContains(t, sdl, "distance: Float") + }) + + t.Run("inline style text-only entity", func(t *testing.T) { + config := &ParsedConfig{ + Entities: []SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: false, + Fields: []IndexedField{ + {FieldName: "name", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "description", GraphQLType: "String!", IndexType: searchindex.FieldTypeText}, + {FieldName: "category", GraphQLType: "String!", IndexType: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {FieldName: "price", GraphQLType: "Float!", IndexType: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {FieldName: "inStock", GraphQLType: "Boolean!", IndexType: searchindex.FieldTypeBool, Filterable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should return [Product!]! instead of wrapper + assertContains(t, sdl, "): [Product!]!") + + // Should NOT have wrapper types + assertNotContains(t, sdl, "SearchProductResult") + assertNotContains(t, sdl, "SearchProductHit") + assertNotContains(t, sdl, "SearchFacet") + assertNotContains(t, sdl, "SearchFacetValue") + + // Should NOT have facets argument + assertNotContains(t, sdl, "facets:") + + // Should still have filter/sort/limit/offset types + assertContains(t, sdl, "input ProductFilter {") + assertContains(t, sdl, "enum ProductSortField {") + assertContains(t, sdl, "input ProductSort {") + assertContains(t, sdl, "limit: Int") + assertContains(t, sdl, "offset: Int") + + // Should still have entity stub + assertContains(t, sdl, `type Product @key(fields: "id") {`) + assertContains(t, sdl, "id: ID!") + }) + + t.Run("pre-computed vector entity", func(t *testing.T) { + config := &ParsedConfig{ + Entities: []SearchableEntity{ + { + TypeName: "Image", + IndexName: "images", + SearchField: "searchImages", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []IndexedField{ + {FieldName: "caption", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true}, + {FieldName: "embedding", GraphQLType: "[Float!]!", IndexType: searchindex.FieldTypeVector, Dimensions: 512}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + assertContains(t, sdl, "input SearchImageInput @oneOf {") + assertContains(t, sdl, "search: SearchImageInput!") + }) + + t.Run("cursor pagination bidirectional", func(t *testing.T) { + config := &ParsedConfig{ + Indices: []IndexDirective{ + {Name: "products", Backend: "bleve", CursorBasedPagination: true}, + }, + Entities: []SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + CursorBasedPagination: true, + CursorBidirectional: true, + Fields: []IndexedField{ + {FieldName: "name", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "price", GraphQLType: "Float!", IndexType: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should have Connection/Edge/PageInfo types + assertContains(t, sdl, "type SearchProductConnection {") + assertContains(t, sdl, "edges: [SearchProductEdge!]!") + assertContains(t, sdl, "pageInfo: SearchPageInfo!") + assertContains(t, sdl, "totalCount: Int!") + + assertContains(t, sdl, "type SearchProductEdge {") + assertContains(t, sdl, "cursor: String!") + assertContains(t, sdl, "node: Product!") + assertContains(t, sdl, "score: Float!") // meta=true + + assertContains(t, sdl, "type SearchPageInfo {") + assertContains(t, sdl, "hasNextPage: Boolean!") + assertContains(t, sdl, "hasPreviousPage: Boolean!") + assertContains(t, sdl, "startCursor: String") + assertContains(t, sdl, "endCursor: String") + + // Should have bidirectional cursor args + assertContains(t, sdl, "first: Int") + assertContains(t, sdl, "after: String") + assertContains(t, sdl, "last: Int") + assertContains(t, sdl, "before: String") + + // Should NOT have offset/limit + assertNotContains(t, sdl, "limit: Int") + assertNotContains(t, sdl, "offset: Int") + + // Return type should be Connection + assertContains(t, sdl, "): SearchProductConnection!") + + // Should NOT have old-style wrapper types + assertNotContains(t, sdl, "SearchProductResult") + assertNotContains(t, sdl, "SearchProductHit") + }) + + t.Run("cursor pagination forward-only", func(t *testing.T) { + config := &ParsedConfig{ + Indices: []IndexDirective{ + {Name: "articles", Backend: "elasticsearch", CursorBasedPagination: true}, + }, + Entities: []SearchableEntity{ + { + TypeName: "Article", + IndexName: "articles", + SearchField: "searchArticles", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + CursorBasedPagination: true, + CursorBidirectional: false, + Fields: []IndexedField{ + {FieldName: "title", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should have first/after but NOT last/before + assertContains(t, sdl, "first: Int") + assertContains(t, sdl, "after: String") + assertNotContains(t, sdl, "last: Int") + assertNotContains(t, sdl, "before: String") + }) + + t.Run("cursor pagination meta disabled", func(t *testing.T) { + config := &ParsedConfig{ + Indices: []IndexDirective{ + {Name: "products", Backend: "bleve", CursorBasedPagination: true}, + }, + Entities: []SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: false, + CursorBasedPagination: true, + CursorBidirectional: true, + Fields: []IndexedField{ + {FieldName: "name", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Edge should NOT have score/distance when meta is disabled + assertContains(t, sdl, "type SearchProductEdge {") + assertContains(t, sdl, "cursor: String!") + assertContains(t, sdl, "node: Product!") + assertNotContains(t, sdl, "score: Float!") + }) + + t.Run("cursor pagination vector entity", func(t *testing.T) { + config := &ParsedConfig{ + Indices: []IndexDirective{ + {Name: "images", Backend: "bleve", CursorBasedPagination: true}, + }, + Entities: []SearchableEntity{ + { + TypeName: "Image", + IndexName: "images", + SearchField: "searchImages", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + CursorBasedPagination: true, + CursorBidirectional: true, + Fields: []IndexedField{ + {FieldName: "caption", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true}, + {FieldName: "embedding", GraphQLType: "[Float!]!", IndexType: searchindex.FieldTypeVector, Dimensions: 512}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Edge should have distance for vector + assertContains(t, sdl, "type SearchImageEdge {") + assertContains(t, sdl, "distance: Float") + }) + + t.Run("unsupported backend error", func(t *testing.T) { + config := &ParsedConfig{ + Indices: []IndexDirective{ + {Name: "products", Backend: "typesense", CursorBasedPagination: true}, + }, + Entities: []SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + CursorBasedPagination: true, + Fields: []IndexedField{ + {FieldName: "name", GraphQLType: "String!", IndexType: searchindex.FieldTypeText}, + }, + }, + }, + } + + _, err := GenerateSubgraphSDL(config) + if err == nil { + t.Fatal("expected error for unsupported backend with cursor pagination") + } + if !strings.Contains(err.Error(), "typesense") { + t.Errorf("error should mention the backend name, got: %v", err) + } + }) +} + +func TestGenerateSubgraphSDL_GeoEntity(t *testing.T) { + config := &ParsedConfig{ + Entities: []SearchableEntity{ + { + TypeName: "Store", + IndexName: "stores", + SearchField: "searchStores", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []IndexedField{ + {FieldName: "name", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "category", GraphQLType: "String!", IndexType: searchindex.FieldTypeKeyword, Filterable: true}, + {FieldName: "location", GraphQLType: "GeoPoint", IndexType: searchindex.FieldTypeGeo, Filterable: true, Sortable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should have shared geo types + assertContains(t, sdl, "input GeoPointInput {") + assertContains(t, sdl, "input GeoDistanceFilterInput {") + assertContains(t, sdl, "input GeoBoundingBoxFilterInput {") + assertContains(t, sdl, "input GeoDistanceSortInput {") + + // Filter should have geo-specific fields + assertContains(t, sdl, "input StoreFilter {") + assertContains(t, sdl, "location_distance: GeoDistanceFilterInput") + assertContains(t, sdl, "location_boundingBox: GeoBoundingBoxFilterInput") + + // Should NOT have a plain "location:" filter (GEO fields get _distance/_boundingBox instead) + assertNotContains(t, sdl, " location: ") + + // Sort enum should NOT include location (GEO fields use geoSort instead) + assertNotContains(t, sdl, "LOCATION") + + // Search field should have geoSort argument + assertContains(t, sdl, "geoSort: GeoDistanceSortInput") + + // Hit type should have geoDistance field + assertContains(t, sdl, "type SearchStoreHit {") + assertContains(t, sdl, "geoDistance: Float") + + // Should have query field + assertContains(t, sdl, "searchStores(") +} + +func TestGenerateSubgraphSDL_DateEntity(t *testing.T) { + config := &ParsedConfig{ + Entities: []SearchableEntity{ + { + TypeName: "Event", + IndexName: "events", + SearchField: "searchEvents", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []IndexedField{ + {FieldName: "title", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "eventDate", GraphQLType: "Date", IndexType: searchindex.FieldTypeDate, Filterable: true, Sortable: true}, + {FieldName: "createdAt", GraphQLType: "DateTime", IndexType: searchindex.FieldTypeDateTime, Filterable: true, Sortable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should have Date and DateTime scalars + assertContains(t, sdl, "scalar Date") + assertContains(t, sdl, "scalar DateTime") + + // Should have DateFilter input type + assertContains(t, sdl, "input DateFilter {") + assertContains(t, sdl, "eq: Date") + assertContains(t, sdl, "gt: Date") + assertContains(t, sdl, "gte: Date") + assertContains(t, sdl, "lt: Date") + assertContains(t, sdl, "lte: Date") + assertContains(t, sdl, "after: Date") + assertContains(t, sdl, "before: Date") + + // Should have DateTimeFilter input type + assertContains(t, sdl, "input DateTimeFilter {") + assertContains(t, sdl, "eq: DateTime") + assertContains(t, sdl, "gt: DateTime") + assertContains(t, sdl, "after: DateTime") + assertContains(t, sdl, "before: DateTime") + + // Filter should use DateFilter for Date fields and DateTimeFilter for DateTime fields + assertContains(t, sdl, "input EventFilter {") + assertContains(t, sdl, "eventDate: DateFilter") + assertContains(t, sdl, "createdAt: DateTimeFilter") + + // Sort enum should include date fields + assertContains(t, sdl, "enum EventSortField {") + assertContains(t, sdl, "EVENTDATE") + assertContains(t, sdl, "CREATEDAT") + + // Should NOT have geo types (no geo fields) + assertNotContains(t, sdl, "GeoPointInput") + assertNotContains(t, sdl, "GeoDistanceFilterInput") +} + +func TestGenerateSubgraphSDL_NoDateScalarsWithoutDateFields(t *testing.T) { + config := &ParsedConfig{ + Entities: []SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []IndexedField{ + {FieldName: "name", GraphQLType: "String!", IndexType: searchindex.FieldTypeText, Filterable: true}, + {FieldName: "price", GraphQLType: "Float!", IndexType: searchindex.FieldTypeNumeric, Filterable: true}, + }, + }, + }, + } + + sdl, err := GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should NOT have date scalars or filters when no date fields exist + assertNotContains(t, sdl, "scalar Date") + assertNotContains(t, sdl, "scalar DateTime") + assertNotContains(t, sdl, "DateFilter") + assertNotContains(t, sdl, "DateTimeFilter") +} + +func assertContains(t *testing.T, sdl, substr string) { + t.Helper() + if !strings.Contains(sdl, substr) { + t.Errorf("SDL should contain %q\n\nSDL:\n%s", substr, sdl) + } +} + +func assertNotContains(t *testing.T, sdl, substr string) { + t.Helper() + if strings.Contains(sdl, substr) { + t.Errorf("SDL should NOT contain %q", substr) + } +} diff --git a/v2/pkg/engine/datasource/search_datasource/lifecycle.go b/v2/pkg/engine/datasource/search_datasource/lifecycle.go new file mode 100644 index 0000000000..dd65619748 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/lifecycle.go @@ -0,0 +1,411 @@ +package search_datasource + +import ( + "context" + "encoding/json" + "fmt" + "log" + "strings" + "sync" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// GraphQLExecutor executes GraphQL operations against the federated graph. +type GraphQLExecutor interface { + Execute(ctx context.Context, operation string) ([]byte, error) +} + +// GraphQLSubscriber subscribes to GraphQL operations and streams events. +// Each received []byte is a complete JSON response for one event. +type GraphQLSubscriber interface { + Subscribe(ctx context.Context, operation string) (<-chan []byte, error) +} + +// Manager handles the lifecycle of search indices: creation, population, subscriptions, and shutdown. +type Manager struct { + factory *Factory + indexRegistry *searchindex.IndexFactoryRegistry + embedderRegistry *searchindex.EmbedderRegistry + executor GraphQLExecutor + subscriber GraphQLSubscriber + config *ParsedConfig + + indices map[string]searchindex.Index + pipelines map[string]map[string]*searchindex.EmbeddingPipeline // entity type → field name → pipeline + + cancelFuncs []context.CancelFunc + mu sync.Mutex +} + +// NewManager creates a new lifecycle manager. +func NewManager( + factory *Factory, + indexRegistry *searchindex.IndexFactoryRegistry, + embedderRegistry *searchindex.EmbedderRegistry, + executor GraphQLExecutor, + config *ParsedConfig, +) *Manager { + return &Manager{ + factory: factory, + indexRegistry: indexRegistry, + embedderRegistry: embedderRegistry, + executor: executor, + config: config, + indices: make(map[string]searchindex.Index), + pipelines: make(map[string]map[string]*searchindex.EmbeddingPipeline), + } +} + +// SetSubscriber sets the optional subscriber for live index updates. +func (m *Manager) SetSubscriber(subscriber GraphQLSubscriber) { + m.subscriber = subscriber +} + +// Start creates indices, runs initial population, and starts subscriptions. +func (m *Manager) Start(ctx context.Context) error { + // Create indices + if err := m.createIndices(ctx); err != nil { + return fmt.Errorf("creating indices: %w", err) + } + + // Setup embedding pipelines + if err := m.setupEmbeddingPipelines(); err != nil { + return fmt.Errorf("setting up embedding pipelines: %w", err) + } + + // Run initial population queries + if err := m.runPopulations(ctx); err != nil { + return fmt.Errorf("running populations: %w", err) + } + + // Start subscriptions for live updates + m.startSubscriptions(ctx) + + return nil +} + +// Stop cancels all subscriptions and closes all indices. +func (m *Manager) Stop() error { + m.mu.Lock() + defer m.mu.Unlock() + + for _, cancel := range m.cancelFuncs { + cancel() + } + m.cancelFuncs = nil + + var firstErr error + for name, idx := range m.indices { + if err := idx.Close(); err != nil && firstErr == nil { + firstErr = fmt.Errorf("closing index %s: %w", name, err) + } + } + return firstErr +} + +// GetIndex returns the index for the given name. +func (m *Manager) GetIndex(name string) (searchindex.Index, bool) { + m.mu.Lock() + defer m.mu.Unlock() + idx, ok := m.indices[name] + return idx, ok +} + +func (m *Manager) createIndices(ctx context.Context) error { + for _, idxDir := range m.config.Indices { + factory, err := m.indexRegistry.Get(idxDir.Backend) + if err != nil { + return fmt.Errorf("backend %q: %w", idxDir.Backend, err) + } + + // Build schema from entity fields + schema := m.buildIndexSchema(idxDir.Name) + + idx, err := factory.CreateIndex(ctx, idxDir.Name, schema, []byte(idxDir.ConfigJSON)) + if err != nil { + return fmt.Errorf("creating index %q: %w", idxDir.Name, err) + } + + m.indices[idxDir.Name] = idx + m.factory.RegisterIndex(idxDir.Name, idx) + } + return nil +} + +func (m *Manager) buildIndexSchema(indexName string) searchindex.IndexConfig { + schema := searchindex.IndexConfig{Name: indexName} + for _, entity := range m.config.Entities { + if entity.IndexName != indexName { + continue + } + for _, f := range entity.Fields { + schema.Fields = append(schema.Fields, searchindex.FieldConfig{ + Name: f.FieldName, + Type: f.IndexType, + Filterable: f.Filterable, + Sortable: f.Sortable, + Dimensions: f.Dimensions, + Weight: f.Weight, + }) + } + // Add embedding fields as vector fields, resolving dimensions from the embedder. + for _, ef := range entity.EmbeddingFields { + dims := 0 + if m.embedderRegistry != nil { + if embedder, err := m.embedderRegistry.Get(ef.Model); err == nil { + dims = embedder.Dimensions() + } + } + schema.Fields = append(schema.Fields, searchindex.FieldConfig{ + Name: ef.FieldName, + Type: searchindex.FieldTypeVector, + Dimensions: dims, + }) + } + } + return schema +} + +func (m *Manager) setupEmbeddingPipelines() error { + for _, entity := range m.config.Entities { + for _, ef := range entity.EmbeddingFields { + transformer, err := searchindex.NewTemplateTransformer(ef.Template) + if err != nil { + return fmt.Errorf("creating template transformer for %s.%s: %w", entity.TypeName, ef.FieldName, err) + } + + embedder, err := m.embedderRegistry.Get(ef.Model) + if err != nil { + return fmt.Errorf("embedder model %q for %s.%s: %w", ef.Model, entity.TypeName, ef.FieldName, err) + } + + if m.pipelines[entity.TypeName] == nil { + m.pipelines[entity.TypeName] = make(map[string]*searchindex.EmbeddingPipeline) + } + m.pipelines[entity.TypeName][ef.FieldName] = &searchindex.EmbeddingPipeline{ + Transformer: transformer, + Embedder: embedder, + } + } + } + return nil +} + +func (m *Manager) runPopulations(ctx context.Context) error { + for _, pop := range m.config.Populations { + idx, ok := m.indices[pop.IndexName] + if !ok { + return fmt.Errorf("index %q not found for population", pop.IndexName) + } + + entity := m.findEntity(pop.EntityTypeName) + if entity == nil { + return fmt.Errorf("entity %q not found for population", pop.EntityTypeName) + } + + if err := m.populate(ctx, idx, entity, &pop); err != nil { + return fmt.Errorf("populating index %q: %w", pop.IndexName, err) + } + + // Schedule resync if configured + if pop.ResyncInterval != "" { + interval, err := time.ParseDuration(pop.ResyncInterval) + if err != nil { + return fmt.Errorf("invalid resync interval %q: %w", pop.ResyncInterval, err) + } + m.scheduleResync(ctx, idx, entity, &pop, interval) + } + } + return nil +} + +func (m *Manager) populate(ctx context.Context, idx searchindex.Index, entity *SearchableEntity, pop *PopulateDirective) error { + responseBody, err := m.executor.Execute(ctx, pop.Query) + if err != nil { + return fmt.Errorf("executing population query: %w", err) + } + + docs, err := ExtractEntities(responseBody, pop.Path, entity.TypeName, entity.KeyFields) + if err != nil { + return fmt.Errorf("extracting entities: %w", err) + } + + if err := m.processEmbeddings(ctx, docs, entity); err != nil { + return fmt.Errorf("processing embeddings: %w", err) + } + + return idx.IndexDocuments(ctx, docs) +} + +// processEmbeddings runs all embedding pipelines for the entity, populating vectors on each document. +func (m *Manager) processEmbeddings(ctx context.Context, docs []searchindex.EntityDocument, entity *SearchableEntity) error { + entityPipelines, ok := m.pipelines[entity.TypeName] + if !ok { + return nil + } + + fieldMaps := EntityFieldMaps(docs) + for fieldName, pipeline := range entityPipelines { + vectors, err := pipeline.ProcessBatch(ctx, fieldMaps) + if err != nil { + return fmt.Errorf("embedding field %s: %w", fieldName, err) + } + for i, vec := range vectors { + if docs[i].Vectors == nil { + docs[i].Vectors = make(map[string][]float32) + } + docs[i].Vectors[fieldName] = vec + } + } + return nil +} + +func (m *Manager) scheduleResync(ctx context.Context, idx searchindex.Index, entity *SearchableEntity, pop *PopulateDirective, interval time.Duration) { + resyncCtx, cancel := context.WithCancel(ctx) + m.mu.Lock() + m.cancelFuncs = append(m.cancelFuncs, cancel) + m.mu.Unlock() + + go func() { + ticker := time.NewTicker(interval) + defer ticker.Stop() + for { + select { + case <-resyncCtx.Done(): + return + case <-ticker.C: + if err := m.populate(resyncCtx, idx, entity, pop); err != nil { + log.Printf("search_datasource: resync error for %s: %v", pop.IndexName, err) + } + } + } + }() +} + +func (m *Manager) startSubscriptions(ctx context.Context) { + if m.subscriber == nil { + return + } + + for _, sub := range m.config.Subscriptions { + idx, ok := m.indices[sub.IndexName] + if !ok { + log.Printf("search_datasource: index %q not found for subscription, skipping", sub.IndexName) + continue + } + + entity := m.findEntity(sub.EntityTypeName) + if entity == nil { + log.Printf("search_datasource: entity %q not found for subscription, skipping", sub.EntityTypeName) + continue + } + + subCtx, cancel := context.WithCancel(ctx) + m.mu.Lock() + m.cancelFuncs = append(m.cancelFuncs, cancel) + m.mu.Unlock() + + go m.runSubscription(subCtx, idx, entity, &sub) + } +} + +func (m *Manager) runSubscription(ctx context.Context, idx searchindex.Index, entity *SearchableEntity, sub *SubscribeDirective) { + events, err := m.subscriber.Subscribe(ctx, sub.Subscription) + if err != nil { + log.Printf("search_datasource: subscribe to %q failed: %v", sub.IndexName, err) + return + } + + for { + select { + case <-ctx.Done(): + return + case eventData, ok := <-events: + if !ok { + return + } + if err := m.handleSubscriptionEvent(ctx, idx, entity, sub, eventData); err != nil { + log.Printf("search_datasource: subscription event error for %s: %v", sub.IndexName, err) + } + } + } +} + +func (m *Manager) handleSubscriptionEvent(ctx context.Context, idx searchindex.Index, entity *SearchableEntity, sub *SubscribeDirective, eventData []byte) error { + // Try deletion path first if configured. + if sub.DeletionPath != "" { + if err := m.handleDeletion(ctx, idx, entity, sub.DeletionPath, eventData); err == nil { + return nil + } + } + + // Handle upsert via the regular path. + docs, err := ExtractEntities(eventData, sub.Path, entity.TypeName, entity.KeyFields) + if err != nil { + return fmt.Errorf("extracting entities from event: %w", err) + } + + if err := m.processEmbeddings(ctx, docs, entity); err != nil { + return fmt.Errorf("processing embeddings: %w", err) + } + + return idx.IndexDocuments(ctx, docs) +} + +func (m *Manager) handleDeletion(ctx context.Context, idx searchindex.Index, entity *SearchableEntity, deletionPath string, eventData []byte) error { + var raw any + if err := json.Unmarshal(eventData, &raw); err != nil { + return err + } + + current := raw + for _, segment := range strings.Split(deletionPath, ".") { + obj, ok := current.(map[string]any) + if !ok { + return fmt.Errorf("expected object at path segment %q", segment) + } + val, ok := obj[segment] + if !ok { + return fmt.Errorf("path segment %q not found", segment) + } + current = val + } + + switch v := current.(type) { + case map[string]any: + id := buildIdentity(v, entity) + return idx.DeleteDocument(ctx, id) + case []any: + ids := make([]searchindex.DocumentIdentity, 0, len(v)) + for _, item := range v { + if obj, ok := item.(map[string]any); ok { + ids = append(ids, buildIdentity(obj, entity)) + } + } + return idx.DeleteDocuments(ctx, ids) + default: + return fmt.Errorf("unexpected type %T at deletion path", current) + } +} + +func buildIdentity(obj map[string]any, entity *SearchableEntity) searchindex.DocumentIdentity { + keyFields := make(map[string]any, len(entity.KeyFields)) + for _, kf := range entity.KeyFields { + keyFields[kf] = obj[kf] + } + return searchindex.DocumentIdentity{ + TypeName: entity.TypeName, + KeyFields: keyFields, + } +} + +func (m *Manager) findEntity(typeName string) *SearchableEntity { + for i := range m.config.Entities { + if m.config.Entities[i].TypeName == typeName { + return &m.config.Entities[i] + } + } + return nil +} diff --git a/v2/pkg/engine/datasource/search_datasource/planner.go b/v2/pkg/engine/datasource/search_datasource/planner.go new file mode 100644 index 0000000000..67d907f234 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/planner.go @@ -0,0 +1,138 @@ +package search_datasource + +import ( + "log" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/plan" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/resolve" +) + +// Planner implements plan.DataSourcePlanner for the search datasource. +type Planner struct { + id int + config Configuration + visitor *plan.Visitor + dataSourceConfig plan.DataSourceConfiguration[Configuration] + factory *Factory + + // State collected during visitor walk + searchFieldName string + fieldRef int + presentArgs map[string]bool // tracks which arguments are present on the field +} + +func (p *Planner) SetID(id int) { p.id = id } +func (p *Planner) ID() int { return p.id } + +func (p *Planner) Register(visitor *plan.Visitor, configuration plan.DataSourceConfiguration[Configuration], _ plan.DataSourcePlannerConfiguration) error { + p.visitor = visitor + p.dataSourceConfig = configuration + p.config = Configuration(configuration.CustomConfiguration()) + + visitor.Walker.RegisterEnterFieldVisitor(p) + return nil +} + +func (p *Planner) EnterField(ref int) { + fieldName := p.visitor.Operation.FieldNameString(ref) + if fieldName != p.config.SearchField { + return + } + p.searchFieldName = fieldName + p.fieldRef = ref + + // Collect which arguments are present on this field in the operation. + p.presentArgs = make(map[string]bool) + for _, argName := range []string{"query", "search", "filter", "sort", "geoSort", "fuzziness", "limit", "offset", "facets", "first", "after", "last", "before", "prefix"} { + if _, ok := p.visitor.Operation.FieldArgument(ref, []byte(argName)); ok { + p.presentArgs[argName] = true + } + } +} + +func (p *Planner) DownstreamResponseFieldAlias(_ int) (alias string, exists bool) { + return +} + +func (p *Planner) ConfigureFetch() resolve.FetchConfiguration { + source, err := p.factory.CreateSourceForConfig(p.config) + if err != nil { + log.Printf("search_datasource: failed to create source: %v", err) + } + + input := p.buildFetchInput() + + return resolve.FetchConfiguration{ + Input: input, + DataSource: source, + PostProcessing: resolve.PostProcessingConfiguration{ + SelectResponseDataPath: []string{"data"}, + }, + } +} + +func (p *Planner) ConfigureSubscription() plan.SubscriptionConfiguration { + return plan.SubscriptionConfiguration{} +} + +func (p *Planner) hasArg(name string) bool { + return p.presentArgs != nil && p.presentArgs[name] +} + +func (p *Planner) buildFetchInput() string { + // Use {{.arguments.X}} template syntax. The plan.Visitor's resolveInputTemplates + // method resolves these to proper ContextVariables at plan time. + input := `{"search_field":"` + p.config.SearchField + `"` + + if p.config.IsSuggest { + input += `,"is_suggest":true` + } + + if p.hasArg("prefix") { + input += `,"prefix":{{.arguments.prefix}}` + } + + if p.config.HasVectorSearch && p.hasArg("search") { + input += `,"search":{{.arguments.search}}` + } else if p.hasArg("query") { + input += `,"query":{{.arguments.query}}` + } + + // Only include optional arguments that are actually present in the operation. + if p.hasArg("filter") { + input += `,"filter":{{.arguments.filter}}` + } + if p.hasArg("sort") { + input += `,"sort":{{.arguments.sort}}` + } + if p.hasArg("limit") { + input += `,"limit":{{.arguments.limit}}` + } + if p.hasArg("offset") { + input += `,"offset":{{.arguments.offset}}` + } + if p.hasArg("geoSort") { + input += `,"geoSort":{{.arguments.geoSort}}` + } + if p.hasArg("fuzziness") { + input += `,"fuzziness":{{.arguments.fuzziness}}` + } + if p.hasArg("facets") { + input += `,"facets":{{.arguments.facets}}` + } + if p.hasArg("first") { + input += `,"first":{{.arguments.first}}` + } + if p.hasArg("after") { + input += `,"after":{{.arguments.after}}` + } + if p.hasArg("last") { + input += `,"last":{{.arguments.last}}` + } + if p.hasArg("before") { + input += `,"before":{{.arguments.before}}` + } + + input += `}` + return input +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/algolia_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/algolia_test.go new file mode 100644 index 0000000000..de83d2b6d8 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/algolia_test.go @@ -0,0 +1,65 @@ +//go:build integration + +package searche2e + +import ( + "context" + "encoding/json" + "fmt" + "os" + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/algolia" +) + +func skipIfNoAlgolia(t *testing.T) (string, string) { + t.Helper() + appID := os.Getenv("ALGOLIA_APP_ID") + apiKey := os.Getenv("ALGOLIA_API_KEY") + if appID == "" || apiKey == "" { + t.Skip("ALGOLIA_APP_ID and ALGOLIA_API_KEY environment variables are required for integration tests") + } + return appID, apiKey +} + +func newAlgoliaIndex(t *testing.T, appID, apiKey string) searchindex.Index { + t.Helper() + + factory := &algolia.Factory{} + cfg := algolia.Config{ + AppID: appID, + APIKey: apiKey, + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + + indexName := fmt.Sprintf("test_e2e_%d", time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, ProductIndexSchema(), cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func TestAlgolia(t *testing.T) { + appID, apiKey := skipIfNoAlgolia(t) + + idx := newAlgoliaIndex(t, appID, apiKey) + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: false, + HasExists: false, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(2 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + return newAlgoliaIndex(t, appID, apiKey) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/bleve_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/bleve_test.go new file mode 100644 index 0000000000..c318eb6826 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/bleve_test.go @@ -0,0 +1,74 @@ +package searche2e + +import ( + "context" + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/bleve" +) + +func newBleveIndex(t *testing.T) searchindex.Index { + t.Helper() + factory := bleve.NewFactory() + idx, err := factory.CreateIndex(context.Background(), "test", ProductIndexSchema(), nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func TestBleve(t *testing.T) { + idx := newBleveIndex(t) + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: true, + HasExists: true, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newBleveIndex(t) + }) +} + +func TestBleveCursor(t *testing.T) { + idx := newBleveIndex(t) + RunCursorTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasCursorPagination: true, + }, BackendHooks{}) +} + +func TestEntityJoinCompatibility(t *testing.T) { + idx := newBleveIndex(t) + if err := idx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate: %v", err) + } + + config := ProductDatasourceConfig() + source := CreateSource(t, idx, config) + + // Verify Source.Load response has the correct format for federation entity resolution. + resp := LoadAndParse(t, source, BuildSearchInput(WithLimit(10))) + if len(resp.Hits) == 0 { + t.Fatal("expected hits") + } + + for i, hit := range resp.Hits { + // Each hit.node must contain __typename + key fields. + typename, ok := hit.Node["__typename"].(string) + if !ok || typename != "Product" { + t.Errorf("hit[%d]: __typename = %v, want Product", i, hit.Node["__typename"]) + } + + id, ok := hit.Node["id"] + if !ok { + t.Errorf("hit[%d]: missing key field 'id'", i) + } + if _, ok := id.(string); !ok { + t.Errorf("hit[%d]: id should be string, got %T", i, id) + } + + // Score or distance should be set for scored queries; for match-all, 0 is acceptable. + } +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/elasticsearch_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/elasticsearch_test.go new file mode 100644 index 0000000000..6ca8a2b1be --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/elasticsearch_test.go @@ -0,0 +1,98 @@ +//go:build integration + +package searche2e + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/elasticsearch" +) + +func startElasticsearch(t *testing.T) string { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "docker.elastic.co/elasticsearch/elasticsearch:8.13.4", + ExposedPorts: []string{"9200/tcp"}, + Env: map[string]string{ + "discovery.type": "single-node", + "xpack.security.enabled": "false", + "ES_JAVA_OPTS": "-Xms512m -Xmx512m", + }, + WaitingFor: wait.ForHTTP("/"). + WithPort("9200/tcp"). + WithStartupTimeout(120 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start elasticsearch container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "9200/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return fmt.Sprintf("http://%s:%s", host, port.Port()) +} + +func newElasticsearchIndex(t *testing.T, baseURL string) searchindex.Index { + t.Helper() + + factory := elasticsearch.NewFactory() + cfg := elasticsearch.Config{ + Addresses: []string{baseURL}, + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + + indexName := fmt.Sprintf("test-products-%d", time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, ProductIndexSchema(), cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func TestElasticsearch(t *testing.T) { + baseURL := startElasticsearch(t) + + idx := newElasticsearchIndex(t, baseURL) + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: true, + HasExists: true, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(2 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + return newElasticsearchIndex(t, baseURL) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_algolia_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_algolia_test.go new file mode 100644 index 0000000000..d19c7d9d99 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_algolia_test.go @@ -0,0 +1,28 @@ +//go:build integration + +package searche2e + +import ( + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Algolia(t *testing.T) { + appID, apiKey := skipIfNoAlgolia(t) + + idx := newAlgoliaIndex(t, appID, apiKey) + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: false, + HasExists: false, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(2 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + return newAlgoliaIndex(t, appID, apiKey) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_bleve_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_bleve_test.go new file mode 100644 index 0000000000..a9be1f5fab --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_bleve_test.go @@ -0,0 +1,20 @@ +package searche2e + +import ( + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Bleve(t *testing.T) { + t.Parallel() + idx := newBleveIndex(t) + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: true, + HasExists: true, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newBleveIndex(t) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_elasticsearch_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_elasticsearch_test.go new file mode 100644 index 0000000000..f2532fe42d --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_elasticsearch_test.go @@ -0,0 +1,28 @@ +//go:build integration + +package searche2e + +import ( + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Elasticsearch(t *testing.T) { + baseURL := startElasticsearch(t) + + idx := newElasticsearchIndex(t, baseURL) + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: true, + HasExists: true, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(2 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + return newElasticsearchIndex(t, baseURL) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_framework.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_framework.go new file mode 100644 index 0000000000..252473831e --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_framework.go @@ -0,0 +1,629 @@ +package searche2e + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/wundergraph/astjson" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astnormalization" + "github.com/wundergraph/graphql-go-tools/v2/pkg/asttransform" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astvalidation" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/graphql_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/search_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/plan" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/postprocess" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/resolve" + "github.com/wundergraph/graphql-go-tools/v2/pkg/internal/unsafeparser" + "github.com/wundergraph/graphql-go-tools/v2/pkg/operationreport" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// --- Entity subgraph data --- + +type Review struct { + Text string `json:"text"` + Stars int `json:"stars"` +} + +type ProductDetail struct { + Reviews []Review `json:"reviews"` + Rating float64 `json:"rating"` + Manufacturer string `json:"manufacturer"` +} + +var productDetails = map[string]ProductDetail{ + "1": {Reviews: []Review{{Text: "Great shoes", Stars: 5}}, Rating: 4.5, Manufacturer: "Nike"}, + "2": {Reviews: []Review{{Text: "Good grip", Stars: 4}}, Rating: 4.2, Manufacturer: "Adidas"}, + "3": {Reviews: []Review{{Text: "Nice belt", Stars: 3}}, Rating: 3.8, Manufacturer: "Gucci"}, + "4": {Reviews: []Review{{Text: "Warm socks", Stars: 5}}, Rating: 4.7, Manufacturer: "Smartwool"}, +} + +// entitySubgraphSDL is the federation SDL for the entity subgraph. +const entitySubgraphSDL = ` +type Product @key(fields: "id") { + id: ID! @external + reviews: [Review!]! + rating: Float + manufacturer: String +} + +type Review { + text: String! + stars: Int! +} + +type Query { + _entities(representations: [_Any!]!): [_Entity]! +} + +scalar _Any +union _Entity = Product +` + +// startEntitySubgraph starts an HTTP test server that handles _entities queries. +func startEntitySubgraph(t *testing.T) *httptest.Server { + t.Helper() + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, err := io.ReadAll(r.Body) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + var req struct { + Query string `json:"query"` + Variables json.RawMessage `json:"variables"` + } + if err := json.Unmarshal(body, &req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + var vars struct { + Representations []map[string]any `json:"representations"` + } + if err := json.Unmarshal(req.Variables, &vars); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + entities := make([]any, 0, len(vars.Representations)) + for _, rep := range vars.Representations { + id, _ := rep["id"].(string) + detail, ok := productDetails[id] + if !ok { + entities = append(entities, nil) + continue + } + entity := map[string]any{ + "id": id, + "__typename": "Product", + "reviews": detail.Reviews, + "rating": detail.Rating, + "manufacturer": detail.Manufacturer, + } + entities = append(entities, entity) + } + + resp := map[string]any{ + "data": map[string]any{ + "_entities": entities, + }, + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(resp) + })) + t.Cleanup(server.Close) + return server +} + +// --- Supergraph definition --- + +// The merged supergraph schema combining search subgraph + entity subgraph. +// This is a hand-written merge for the Product test case. +const supergraphDefinition = ` +input StringFilter { + eq: String + ne: String + in: [String!] + contains: String + startsWith: String +} + +input FloatFilter { + eq: Float + gt: Float + gte: Float + lt: Float + lte: Float +} + +input IntFilter { + eq: Int + gt: Int + gte: Int + lt: Int + lte: Int +} + +enum SortDirection { + ASC + DESC +} + +type SearchFacet { + field: String! + values: [SearchFacetValue!]! +} + +type SearchFacetValue { + value: String! + count: Int! +} + +input ProductFilter { + name: StringFilter + category: StringFilter + price: FloatFilter + inStock: Boolean + AND: [ProductFilter!] + OR: [ProductFilter!] + NOT: ProductFilter +} + +enum ProductSortField { + RELEVANCE + NAME + CATEGORY + PRICE +} + +input ProductSort { + field: ProductSortField! + direction: SortDirection! +} + +type SearchProductResult { + hits: [SearchProductHit!]! + totalCount: Int! + facets: [SearchFacet!] +} + +type SearchProductHit { + score: Float! + node: Product! +} + +type Product { + id: ID! + reviews: [Review!]! + rating: Float + manufacturer: String +} + +type Review { + text: String! + stars: Int! +} + +type Query { + searchProducts( + query: String! + filter: ProductFilter + sort: [ProductSort!] + limit: Int + offset: Int + facets: [String!] + ): SearchProductResult! +} +` + +// --- Config builder --- + +// FederatedTestSetup holds the configuration and cleanup for a federated test. +type FederatedTestSetup struct { + PlanConfig plan.Configuration + Definition string + Cleanup func() +} + +// BuildFederatedConfig creates a federated plan.Configuration with a search datasource +// and an entity subgraph datasource. +func BuildFederatedConfig(t *testing.T, idx searchindex.Index) *FederatedTestSetup { + t.Helper() + + entityServer := startEntitySubgraph(t) + + // --- Search datasource (DS1) --- + searchConfig := ProductDatasourceConfig() + searchFactory := search_datasource.NewFactory(context.Background(), nil, nil) + searchFactory.RegisterIndex(searchConfig.IndexName, idx) + + searchDS, err := plan.NewDataSourceConfiguration[search_datasource.Configuration]( + "search-ds", + searchFactory, + &plan.DataSourceMetadata{ + RootNodes: plan.TypeFields{ + {TypeName: "Query", FieldNames: []string{"searchProducts"}}, + {TypeName: "Product", FieldNames: []string{"id"}}, + }, + ChildNodes: plan.TypeFields{ + {TypeName: "SearchProductResult", FieldNames: []string{"hits", "totalCount", "facets"}}, + {TypeName: "SearchProductHit", FieldNames: []string{"score", "node"}}, + {TypeName: "SearchFacet", FieldNames: []string{"field", "values"}}, + {TypeName: "SearchFacetValue", FieldNames: []string{"value", "count"}}, + }, + FederationMetaData: plan.FederationMetaData{ + Keys: plan.FederationFieldConfigurations{ + {TypeName: "Product", SelectionSet: "id"}, + }, + }, + }, + searchConfig, + ) + if err != nil { + t.Fatalf("NewDataSourceConfiguration (search): %v", err) + } + + // --- Entity datasource (DS2) --- + entitySchemaConfig, err := graphql_datasource.NewSchemaConfiguration( + entitySubgraphSDL, + &graphql_datasource.FederationConfiguration{ + Enabled: true, + ServiceSDL: entitySubgraphSDL, + }, + ) + if err != nil { + t.Fatalf("NewSchemaConfiguration (entity): %v", err) + } + + entityConfig, err := graphql_datasource.NewConfiguration(graphql_datasource.ConfigurationInput{ + Fetch: &graphql_datasource.FetchConfiguration{ + URL: entityServer.URL, + }, + SchemaConfiguration: entitySchemaConfig, + }) + if err != nil { + t.Fatalf("NewConfiguration (entity): %v", err) + } + + entityFactory, err := graphql_datasource.NewFactory(context.Background(), http.DefaultClient, &noopSubscriptionClient{}) + if err != nil { + t.Fatalf("NewFactory (entity): %v", err) + } + + entityDS, err := plan.NewDataSourceConfiguration[graphql_datasource.Configuration]( + "entity-ds", + entityFactory, + &plan.DataSourceMetadata{ + RootNodes: plan.TypeFields{ + {TypeName: "Product", FieldNames: []string{"id", "reviews", "rating", "manufacturer"}}, + }, + ChildNodes: plan.TypeFields{ + {TypeName: "Review", FieldNames: []string{"text", "stars"}}, + }, + FederationMetaData: plan.FederationMetaData{ + Keys: plan.FederationFieldConfigurations{ + {TypeName: "Product", SelectionSet: "id"}, + }, + }, + }, + entityConfig, + ) + if err != nil { + t.Fatalf("NewDataSourceConfiguration (entity): %v", err) + } + + // --- Plan configuration --- + planConfig := plan.Configuration{ + DataSources: []plan.DataSource{ + searchDS, + entityDS, + }, + Fields: plan.FieldConfigurations{ + { + TypeName: "Query", + FieldName: "searchProducts", + Arguments: plan.ArgumentsConfigurations{ + {Name: "query", SourceType: plan.FieldArgumentSource, RenderConfig: plan.RenderArgumentAsJSONValue}, + {Name: "filter", SourceType: plan.FieldArgumentSource, RenderConfig: plan.RenderArgumentAsJSONValue}, + {Name: "sort", SourceType: plan.FieldArgumentSource, RenderConfig: plan.RenderArgumentAsJSONValue}, + {Name: "limit", SourceType: plan.FieldArgumentSource, RenderConfig: plan.RenderArgumentAsJSONValue}, + {Name: "offset", SourceType: plan.FieldArgumentSource, RenderConfig: plan.RenderArgumentAsJSONValue}, + {Name: "facets", SourceType: plan.FieldArgumentSource, RenderConfig: plan.RenderArgumentAsJSONValue}, + }, + }, + }, + DisableResolveFieldPositions: true, + } + + return &FederatedTestSetup{ + PlanConfig: planConfig, + Definition: supergraphDefinition, + Cleanup: func() { + entityServer.Close() + }, + } +} + +// --- Execution helper --- + +// ExecuteFederatedQuery plans and resolves a GraphQL query through the full pipeline. +func ExecuteFederatedQuery(t *testing.T, setup *FederatedTestSetup, query string, variables string) string { + t.Helper() + + def := unsafeparser.ParseGraphqlDocumentString(setup.Definition) + op := unsafeparser.ParseGraphqlDocumentString(query) + + if err := asttransform.MergeDefinitionWithBaseSchema(&def); err != nil { + t.Fatalf("MergeDefinitionWithBaseSchema: %v", err) + } + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + if report.HasErrors() { + t.Fatalf("normalize: %s", report.Error()) + } + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + if report.HasErrors() { + t.Fatalf("validate: %s", report.Error()) + } + + p, err := plan.NewPlanner(setup.PlanConfig) + if err != nil { + t.Fatalf("NewPlanner: %v", err) + } + + executionPlan := p.Plan(&op, &def, "", report) + if report.HasErrors() { + t.Fatalf("plan: %s", report.Error()) + } + + // Post-process the plan to build the fetch tree from raw fetches. + proc := postprocess.NewProcessor() + proc.Process(executionPlan) + + syncPlan, ok := executionPlan.(*plan.SynchronousResponsePlan) + if !ok { + t.Fatalf("expected SynchronousResponsePlan, got %T", executionPlan) + } + + if syncPlan.Response.Info == nil { + syncPlan.Response.Info = &resolve.GraphQLResponseInfo{ + OperationType: ast.OperationTypeQuery, + } + } + + resolver := resolve.New(context.Background(), resolve.ResolverOptions{ + MaxConcurrency: 32, + PropagateSubgraphErrors: true, + }) + + ctx := resolve.NewContext(context.Background()) + if variables != "" { + ctx.Variables = astjson.MustParseBytes([]byte(variables)) + } + + buf := &bytes.Buffer{} + _, err = resolver.ResolveGraphQLResponse(ctx, syncPlan.Response, nil, buf) + if err != nil { + t.Fatalf("ResolveGraphQLResponse: %v", err) + } + + return buf.String() +} + +// --- Test runner --- + +// FederatedSearchResponse is the parsed response from a federated search query. +type FederatedSearchResponse struct { + Data struct { + SearchProducts struct { + Hits []struct { + Score float64 `json:"score"` + Node struct { + ID string `json:"id"` + Manufacturer string `json:"manufacturer"` + Rating float64 `json:"rating"` + Reviews []Review `json:"reviews"` + } `json:"node"` + } `json:"hits"` + TotalCount int `json:"totalCount"` + Facets []struct { + Field string `json:"field"` + Values []struct { + Value string `json:"value"` + Count int `json:"count"` + } `json:"values"` + } `json:"facets"` + } `json:"searchProducts"` + } `json:"data"` +} + +func parseFederatedResponse(t *testing.T, raw string) FederatedSearchResponse { + t.Helper() + var resp FederatedSearchResponse + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("parse federated response: %v\nraw: %s", err, raw) + } + return resp +} + +// RunFederatedBackendTests runs federation e2e tests against the given index. +func RunFederatedBackendTests(t *testing.T, idx searchindex.Index, caps BackendCaps, hooks BackendHooks, indexFactory func(t *testing.T) searchindex.Index) { + // Populate shared index with test data. + if err := idx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate test data: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + setup := BuildFederatedConfig(t, idx) + + t.Run("basic_search_with_join", func(t *testing.T) { + t.Parallel() + query := `{ searchProducts(query: "shoes") { hits { node { id manufacturer } } totalCount } }` + raw := ExecuteFederatedQuery(t, setup, query, "") + resp := parseFederatedResponse(t, raw) + + if resp.Data.SearchProducts.TotalCount < 2 { + t.Errorf("expected totalCount >= 2, got %d", resp.Data.SearchProducts.TotalCount) + } + if len(resp.Data.SearchProducts.Hits) < 2 { + t.Errorf("expected >= 2 hits, got %d", len(resp.Data.SearchProducts.Hits)) + } + for _, hit := range resp.Data.SearchProducts.Hits { + if hit.Node.ID == "" { + t.Error("expected non-empty id") + } + if hit.Node.Manufacturer == "" { + t.Errorf("expected manufacturer for product %s, got empty", hit.Node.ID) + } + expected, ok := productDetails[hit.Node.ID] + if ok && hit.Node.Manufacturer != expected.Manufacturer { + t.Errorf("product %s: manufacturer = %q, want %q", hit.Node.ID, hit.Node.Manufacturer, expected.Manufacturer) + } + } + }) + + t.Run("filter_with_join", func(t *testing.T) { + t.Parallel() + query := `query($f: ProductFilter) { searchProducts(query: "*", filter: $f) { hits { node { id rating } } } }` + vars := `{"f": {"category": {"eq": "Footwear"}}}` + raw := ExecuteFederatedQuery(t, setup, query, vars) + resp := parseFederatedResponse(t, raw) + + if len(resp.Data.SearchProducts.Hits) < 2 { + t.Errorf("expected >= 2 hits for Footwear filter, got %d", len(resp.Data.SearchProducts.Hits)) + } + for _, hit := range resp.Data.SearchProducts.Hits { + if hit.Node.Rating == 0 { + t.Errorf("expected rating for product %s, got 0", hit.Node.ID) + } + } + }) + + t.Run("sort_with_join", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!]) { searchProducts(query: "*", sort: $s) { hits { node { id manufacturer } } } }` + vars := `{"s": [{"field": "price", "direction": "ASC"}]}` + raw := ExecuteFederatedQuery(t, setup, query, vars) + resp := parseFederatedResponse(t, raw) + + if len(resp.Data.SearchProducts.Hits) < 2 { + t.Errorf("expected >= 2 hits, got %d", len(resp.Data.SearchProducts.Hits)) + } + // First hit should be cheapest (Wool Socks, id=4) + if len(resp.Data.SearchProducts.Hits) > 0 { + first := resp.Data.SearchProducts.Hits[0] + if first.Node.ID != "4" { + t.Errorf("expected first hit id=4 (cheapest), got %s", first.Node.ID) + } + if first.Node.Manufacturer == "" { + t.Error("expected manufacturer on sorted hit") + } + } + }) + + t.Run("pagination_with_join", func(t *testing.T) { + t.Parallel() + query := `query($s: [ProductSort!], $lim: Int, $off: Int) { + searchProducts(query: "*", sort: $s, limit: $lim, offset: $off) { + hits { node { id reviews { text stars } } } + totalCount + } + }` + vars := `{"s": [{"field": "price", "direction": "ASC"}], "lim": 2, "off": 1}` + raw := ExecuteFederatedQuery(t, setup, query, vars) + resp := parseFederatedResponse(t, raw) + + if len(resp.Data.SearchProducts.Hits) != 2 { + t.Errorf("expected 2 hits with limit=2 offset=1, got %d", len(resp.Data.SearchProducts.Hits)) + } + for _, hit := range resp.Data.SearchProducts.Hits { + if len(hit.Node.Reviews) == 0 { + t.Errorf("expected reviews for product %s", hit.Node.ID) + } + for _, r := range hit.Node.Reviews { + if r.Text == "" { + t.Error("expected non-empty review text") + } + if r.Stars == 0 { + t.Error("expected non-zero review stars") + } + } + } + }) + + t.Run("full_hit_fields", func(t *testing.T) { + t.Parallel() + query := `{ searchProducts(query: "shoes") { hits { score node { id } } totalCount } }` + raw := ExecuteFederatedQuery(t, setup, query, "") + resp := parseFederatedResponse(t, raw) + + if resp.Data.SearchProducts.TotalCount < 2 { + t.Errorf("expected totalCount >= 2, got %d", resp.Data.SearchProducts.TotalCount) + } + for _, hit := range resp.Data.SearchProducts.Hits { + if hit.Node.ID == "" { + t.Error("expected non-empty id in full_hit_fields") + } + } + }) + + if caps.HasFacets { + t.Run("facets_with_join", func(t *testing.T) { + t.Parallel() + query := `query($fac: [String!]) { searchProducts(query: "*", facets: $fac) { hits { node { id manufacturer } } facets { field values { value count } } } }` + vars := `{"fac": ["category"]}` + raw := ExecuteFederatedQuery(t, setup, query, vars) + resp := parseFederatedResponse(t, raw) + + if len(resp.Data.SearchProducts.Facets) == 0 { + t.Fatal("expected at least 1 facet") + } + found := false + for _, f := range resp.Data.SearchProducts.Facets { + if f.Field == "category" { + found = true + if len(f.Values) < 2 { + t.Errorf("expected >= 2 facet values for category, got %d", len(f.Values)) + } + } + } + if !found { + t.Error("expected category facet in response") + } + // Also verify entity join worked + for _, hit := range resp.Data.SearchProducts.Hits { + if hit.Node.Manufacturer == "" { + t.Errorf("expected manufacturer for product %s in facets test", hit.Node.ID) + } + } + }) + } +} + +// --- Helpers --- + +// noopSubscriptionClient satisfies graphql_datasource.GraphQLSubscriptionClient for tests. +type noopSubscriptionClient struct{} + +func (n *noopSubscriptionClient) Subscribe(_ *resolve.Context, _ graphql_datasource.GraphQLSubscriptionOptions, _ resolve.SubscriptionUpdater) error { + return nil +} +func (n *noopSubscriptionClient) SubscribeAsync(_ *resolve.Context, _ uint64, _ graphql_datasource.GraphQLSubscriptionOptions, _ resolve.SubscriptionUpdater) error { + return nil +} +func (n *noopSubscriptionClient) Unsubscribe(_ uint64) {} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_meilisearch_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_meilisearch_test.go new file mode 100644 index 0000000000..09e8ca8b44 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_meilisearch_test.go @@ -0,0 +1,23 @@ +//go:build integration + +package searche2e + +import ( + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Meilisearch(t *testing.T) { + meiliHost := startMeilisearchContainer(t) + + idx := newMeilisearchIndex(t, meiliHost) + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: false, + HasExists: false, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newMeilisearchIndex(t, meiliHost) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_pgvector_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_pgvector_test.go new file mode 100644 index 0000000000..e40bc9ba5e --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_pgvector_test.go @@ -0,0 +1,24 @@ +//go:build integration + +package searche2e + +import ( + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Pgvector(t *testing.T) { + db := startPgvectorContainer(t) + + idx := newPgvectorIndex(t, db) + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: true, + HasExists: true, + HasVectorSearch: true, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newPgvectorIndex(t, db) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_qdrant_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_qdrant_test.go new file mode 100644 index 0000000000..3a28e7469d --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_qdrant_test.go @@ -0,0 +1,28 @@ +//go:build integration + +package searche2e + +import ( + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Qdrant(t *testing.T) { + host, port := startQdrant(t) + + idx := newQdrantIndex(t, host, port) + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: false, + HasFacets: false, + HasPrefix: false, + HasExists: false, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(1 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + return newQdrantIndex(t, host, port) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_typesense_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_typesense_test.go new file mode 100644 index 0000000000..91f4fd05be --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_typesense_test.go @@ -0,0 +1,23 @@ +//go:build integration + +package searche2e + +import ( + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Typesense(t *testing.T) { + host, port := startTypesense(t) + + idx := newTypesenseIndex(t, host, port) + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: false, + HasExists: false, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newTypesenseIndex(t, host, port) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/federation_weaviate_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_weaviate_test.go new file mode 100644 index 0000000000..910630bfb3 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/federation_weaviate_test.go @@ -0,0 +1,28 @@ +//go:build integration + +package searche2e + +import ( + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestFederation_Weaviate(t *testing.T) { + host := startWeaviate(t) + + idx := newWeaviateIndex(t, host, "FederationProducts") + RunFederatedBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: false, + HasPrefix: true, + HasExists: false, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(1 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + return newWeaviateIndex(t, host, "FederationProducts") + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/framework.go b/v2/pkg/engine/datasource/search_datasource/searche2e/framework.go new file mode 100644 index 0000000000..07055d26cb --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/framework.go @@ -0,0 +1,1048 @@ +package searche2e + +import ( + "context" + "encoding/json" + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/search_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// BackendCaps describes what capabilities a backend supports. +type BackendCaps struct { + HasTextSearch bool + HasFacets bool + HasPrefix bool + HasExists bool + HasVectorSearch bool + HasCursorPagination bool +} + +// BackendHooks provides hooks for backend-specific behavior. +type BackendHooks struct { + WaitForIndex func(t *testing.T) // called after populating, before querying +} + +// SearchResponse represents the parsed JSON response from Source.Load(). +type SearchResponse struct { + Hits []SearchHit `json:"hits"` + TotalCount int `json:"totalCount"` + Facets []SearchFacet `json:"facets"` +} + +// SearchHighlight represents a highlighted field in a search hit. +type SearchHighlight struct { + Field string `json:"field"` + Fragments []string `json:"fragments"` +} + +// SearchHit represents a single hit in the response. +type SearchHit struct { + Score float64 `json:"score"` + Distance float64 `json:"distance"` + GeoDistance *float64 `json:"geoDistance"` + Highlights []SearchHighlight `json:"highlights"` + Node map[string]any `json:"node"` +} + +// SearchFacet represents a facet in the response. +type SearchFacet struct { + Field string `json:"field"` + Values []SearchFacetVal `json:"values"` +} + +// SearchFacetVal represents a single facet value. +type SearchFacetVal struct { + Value string `json:"value"` + Count int `json:"count"` +} + +// ConnectionResponse represents a cursor-based pagination response. +type ConnectionResponse struct { + Edges []ConnectionEdge `json:"edges"` + PageInfo PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +// ConnectionEdge represents a single edge in a connection response. +type ConnectionEdge struct { + Cursor string `json:"cursor"` + Node map[string]any `json:"node"` + Score float64 `json:"score"` + Highlights []SearchHighlight `json:"highlights"` +} + +// PageInfo represents page info in a connection response. +type PageInfo struct { + HasNextPage bool `json:"hasNextPage"` + HasPreviousPage bool `json:"hasPreviousPage"` + StartCursor *string `json:"startCursor"` + EndCursor *string `json:"endCursor"` +} + +// TestProducts returns the standard 4 test products. +func TestProducts() []searchindex.EntityDocument { + return []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + }, + } +} + +// GeoTestProducts returns the standard 4 test products with geo locations. +func GeoTestProducts() []searchindex.EntityDocument { + return []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true, "location": map[string]any{"lat": 40.7128, "lon": -74.0060}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true, "location": map[string]any{"lat": 40.7580, "lon": -73.9855}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false, "location": map[string]any{"lat": 34.0522, "lon": -118.2437}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true, "location": map[string]any{"lat": 51.5074, "lon": -0.1278}}, + }, + } +} + +// GeoProductIndexSchema returns the index schema with a geo location field. +func GeoProductIndexSchema() searchindex.IndexConfig { + return searchindex.IndexConfig{ + Name: "products", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + {Name: "location", Type: searchindex.FieldTypeGeo, Filterable: true, Sortable: true}, + }, + } +} + +// GeoProductDatasourceConfig returns the search datasource configuration with geo field. +func GeoProductDatasourceConfig() search_datasource.Configuration { + return search_datasource.Configuration{ + IndexName: "products", + SearchField: "searchProducts", + EntityTypeName: "Product", + KeyFields: []string{"id"}, + Fields: []search_datasource.IndexedFieldConfig{ + {FieldName: "name", GraphQLType: "String", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "description", GraphQLType: "String", IndexType: searchindex.FieldTypeText}, + {FieldName: "category", GraphQLType: "String", IndexType: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {FieldName: "price", GraphQLType: "Float", IndexType: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {FieldName: "inStock", GraphQLType: "Boolean", IndexType: searchindex.FieldTypeBool, Filterable: true}, + {FieldName: "location", GraphQLType: "GeoPoint", IndexType: searchindex.FieldTypeGeo, Filterable: true, Sortable: true}, + }, + HasTextSearch: true, + ResultsMetaInformation: true, + } +} + +// ProductIndexSchema returns the standard index schema for product tests. +func ProductIndexSchema() searchindex.IndexConfig { + return searchindex.IndexConfig{ + Name: "products", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + }, + } +} + +// ProductDatasourceConfig returns the search datasource configuration for products. +func ProductDatasourceConfig() search_datasource.Configuration { + return search_datasource.Configuration{ + IndexName: "products", + SearchField: "searchProducts", + EntityTypeName: "Product", + KeyFields: []string{"id"}, + Fields: []search_datasource.IndexedFieldConfig{ + {FieldName: "name", GraphQLType: "String", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "description", GraphQLType: "String", IndexType: searchindex.FieldTypeText}, + {FieldName: "category", GraphQLType: "String", IndexType: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {FieldName: "price", GraphQLType: "Float", IndexType: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {FieldName: "inStock", GraphQLType: "Boolean", IndexType: searchindex.FieldTypeBool, Filterable: true}, + }, + HasTextSearch: true, + ResultsMetaInformation: true, + } +} + +// CreateSource creates a Source for the given index and config. +func CreateSource(t *testing.T, idx searchindex.Index, config search_datasource.Configuration) *search_datasource.Source { + t.Helper() + factory := search_datasource.NewFactory(context.Background(), nil, nil) + factory.RegisterIndex(config.IndexName, idx) + source, err := factory.CreateSourceForConfig(config) + if err != nil { + t.Fatalf("CreateSourceForConfig: %v", err) + } + if source == nil { + t.Fatal("CreateSourceForConfig returned nil") + } + return source +} + +// searchInputBuilder is the struct used to build Source.Load() JSON input. +type searchInputBuilder struct { + SearchField string `json:"search_field"` + Query string `json:"query,omitempty"` + Filter json.RawMessage `json:"filter,omitempty"` + Sort json.RawMessage `json:"sort,omitempty"` + GeoSort json.RawMessage `json:"geoSort,omitempty"` + Limit *int `json:"limit,omitempty"` + Offset *int `json:"offset,omitempty"` + Facets []string `json:"facets,omitempty"` + Fuzziness *string `json:"fuzziness,omitempty"` + First *int `json:"first,omitempty"` + After *string `json:"after,omitempty"` + Last *int `json:"last,omitempty"` + Before *string `json:"before,omitempty"` +} + +// InputOption configures a search input. +type InputOption func(*searchInputBuilder) + +// BuildSearchInput builds JSON input for Source.Load(). +func BuildSearchInput(opts ...InputOption) []byte { + b := &searchInputBuilder{ + SearchField: "searchProducts", + } + for _, opt := range opts { + opt(b) + } + data, _ := json.Marshal(b) + return data +} + +// WithQuery sets the text query. +func WithQuery(q string) InputOption { + return func(b *searchInputBuilder) { + b.Query = q + } +} + +// WithFilter sets the filter. f is marshaled to JSON. +func WithFilter(f any) InputOption { + return func(b *searchInputBuilder) { + data, _ := json.Marshal(f) + b.Filter = data + } +} + +// WithSort sets the sort order. +func WithSort(sorts []map[string]string) InputOption { + return func(b *searchInputBuilder) { + data, _ := json.Marshal(sorts) + b.Sort = data + } +} + +// WithGeoSort sets the geo distance sort. +func WithGeoSort(field string, lat, lon float64, direction, unit string) InputOption { + return func(b *searchInputBuilder) { + data, _ := json.Marshal(map[string]any{ + "field": field, + "center": map[string]any{"lat": lat, "lon": lon}, + "direction": direction, + "unit": unit, + }) + b.GeoSort = data + } +} + +// WithFuzziness sets the fuzziness level ("EXACT", "LOW", "HIGH"). +func WithFuzziness(level string) InputOption { + return func(b *searchInputBuilder) { + b.Fuzziness = &level + } +} + +// WithLimit sets the result limit. +func WithLimit(n int) InputOption { + return func(b *searchInputBuilder) { + b.Limit = &n + } +} + +// WithOffset sets the result offset. +func WithOffset(n int) InputOption { + return func(b *searchInputBuilder) { + b.Offset = &n + } +} + +// WithFacets sets the facet fields. +func WithFacets(fields []string) InputOption { + return func(b *searchInputBuilder) { + b.Facets = fields + } +} + +// WithFirst sets the first (cursor pagination) limit. +func WithFirst(n int) InputOption { + return func(b *searchInputBuilder) { + b.First = &n + } +} + +// WithAfter sets the after cursor. +func WithAfter(cursor string) InputOption { + return func(b *searchInputBuilder) { + b.After = &cursor + } +} + +// WithLast sets the last (backward cursor pagination) limit. +func WithLast(n int) InputOption { + return func(b *searchInputBuilder) { + b.Last = &n + } +} + +// WithBefore sets the before cursor. +func WithBefore(cursor string) InputOption { + return func(b *searchInputBuilder) { + b.Before = &cursor + } +} + +// LoadAndParseConnection calls Source.Load and parses a cursor-based connection response. +func LoadAndParseConnection(t *testing.T, source *search_datasource.Source, input []byte) ConnectionResponse { + t.Helper() + data, err := source.Load(context.Background(), nil, input) + if err != nil { + t.Fatalf("Source.Load: %v", err) + } + + var wrapped map[string]map[string]json.RawMessage + if err := json.Unmarshal(data, &wrapped); err != nil { + t.Fatalf("unmarshal wrapped response: %v (raw: %s)", err, string(data)) + } + dataMap, ok := wrapped["data"] + if !ok { + t.Fatalf("missing 'data' key in response (raw: %s)", string(data)) + } + var inner json.RawMessage + for _, v := range dataMap { + inner = v + break + } + if inner == nil { + t.Fatalf("empty 'data' map in response (raw: %s)", string(data)) + } + + var resp ConnectionResponse + if err := json.Unmarshal(inner, &resp); err != nil { + t.Fatalf("unmarshal connection response: %v (raw: %s)", err, string(inner)) + } + return resp +} + +// CursorProductDatasourceConfig returns a search datasource configuration with cursor pagination enabled. +func CursorProductDatasourceConfig() search_datasource.Configuration { + cfg := ProductDatasourceConfig() + cfg.CursorBasedPagination = true + cfg.CursorBidirectional = true + return cfg +} + +// LoadAndParse calls Source.Load and parses the response. +// The source returns {"data": {"": {...}}} so we extract the inner result. +func LoadAndParse(t *testing.T, source *search_datasource.Source, input []byte) SearchResponse { + t.Helper() + data, err := source.Load(context.Background(), nil, input) + if err != nil { + t.Fatalf("Source.Load: %v", err) + } + + // Extract the search result from the wrapped response. + var wrapped map[string]map[string]json.RawMessage + if err := json.Unmarshal(data, &wrapped); err != nil { + t.Fatalf("unmarshal wrapped response: %v (raw: %s)", err, string(data)) + } + dataMap, ok := wrapped["data"] + if !ok { + t.Fatalf("missing 'data' key in response (raw: %s)", string(data)) + } + // Find the first (and only) key inside "data" — the search field result. + var inner json.RawMessage + for _, v := range dataMap { + inner = v + break + } + if inner == nil { + t.Fatalf("empty 'data' map in response (raw: %s)", string(data)) + } + + var resp SearchResponse + if err := json.Unmarshal(inner, &resp); err != nil { + t.Fatalf("unmarshal search result: %v (raw: %s)", err, string(inner)) + } + return resp +} + +// hitCount returns the effective hit count, preferring TotalCount but falling +// back to len(Hits) for backends that don't set TotalCount on scroll queries. +func hitCount(resp SearchResponse) int { + if resp.TotalCount > 0 { + return resp.TotalCount + } + return len(resp.Hits) +} + +// RunBackendTests runs all e2e test scenarios against the given index. +// Read-only tests use t.Parallel() on the shared index. +// Mutation tests each get a fresh index via indexFactory. +func RunBackendTests(t *testing.T, idx searchindex.Index, caps BackendCaps, hooks BackendHooks, indexFactory func(t *testing.T) searchindex.Index) { + // Populate shared index with test data. + if err := idx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate test data: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + config := ProductDatasourceConfig() + source := CreateSource(t, idx, config) + + // Read-only tests (parallel, shared index). + t.Run("read", func(t *testing.T) { + if caps.HasTextSearch { + t.Run("text_search", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput(WithQuery("shoes"), WithLimit(10))) + if hitCount(resp) < 2 { + t.Errorf("expected ≥2 hits for 'shoes', got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + } + + t.Run("filter_term_keyword", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{"category": map[string]string{"eq": "Footwear"}}), + WithLimit(10), + )) + if hitCount(resp) != 3 { + t.Errorf("expected 3 hits for category=Footwear, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("filter_boolean", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{"inStock": false}), + WithLimit(10), + )) + if hitCount(resp) != 1 { + t.Errorf("expected 1 hit for inStock=false, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("filter_numeric_range", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{"price": map[string]any{"gte": 30, "lte": 100}}), + WithLimit(10), + )) + if hitCount(resp) != 2 { + t.Errorf("expected 2 hits for price 30-100, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("filter_AND", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "AND": []map[string]any{ + {"category": map[string]string{"eq": "Footwear"}}, + {"inStock": true}, + }, + }), + WithLimit(10), + )) + if hitCount(resp) != 3 { + t.Errorf("expected 3 hits for Footwear AND inStock, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("filter_OR", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "OR": []map[string]any{ + {"category": map[string]string{"eq": "Accessories"}}, + {"price": map[string]any{"gte": 100}}, + }, + }), + WithLimit(10), + )) + if hitCount(resp) != 2 { + t.Errorf("expected 2 hits for Accessories OR price>=100, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("filter_NOT", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "NOT": map[string]any{ + "category": map[string]string{"eq": "Footwear"}, + }, + }), + WithLimit(10), + )) + if hitCount(resp) != 1 { + t.Errorf("expected 1 hit for NOT Footwear, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("sorting", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithSort([]map[string]string{{"field": "price", "direction": "ASC"}}), + WithLimit(10), + )) + if len(resp.Hits) < 4 { + t.Fatalf("expected ≥4 hits, got %d", len(resp.Hits)) + } + name, _ := resp.Hits[0].Node["name"].(string) + if name != "Wool Socks" { + t.Errorf("expected first hit to be Wool Socks (cheapest), got %q", name) + } + }) + + t.Run("pagination", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithSort([]map[string]string{{"field": "price", "direction": "ASC"}}), + WithLimit(2), + WithOffset(2), + )) + if len(resp.Hits) != 2 { + t.Errorf("expected 2 hits with limit=2 offset=2, got %d", len(resp.Hits)) + } + }) + + if caps.HasFacets { + t.Run("facets", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFacets([]string{"category"}), + WithLimit(10), + )) + if len(resp.Facets) == 0 { + t.Fatal("expected at least 1 facet") + } + found := false + for _, f := range resp.Facets { + if f.Field == "category" { + found = true + if len(f.Values) < 2 { + t.Errorf("expected ≥2 facet values for category, got %d", len(f.Values)) + } + } + } + if !found { + t.Error("expected category facet in response") + } + }) + } + + t.Run("identity_roundtrip", func(t *testing.T) { + t.Parallel() + var input []byte + if caps.HasTextSearch { + input = BuildSearchInput(WithQuery("running shoes"), WithLimit(1)) + } else { + input = BuildSearchInput(WithLimit(1)) + } + resp := LoadAndParse(t, source, input) + if len(resp.Hits) == 0 { + t.Fatal("expected at least 1 hit") + } + hit := resp.Hits[0] + typename, _ := hit.Node["__typename"].(string) + if typename != "Product" { + t.Errorf("__typename = %q, want Product", typename) + } + if _, ok := hit.Node["id"]; !ok { + t.Error("expected 'id' in hit node") + } + }) + + t.Run("terms_IN", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "category": map[string]any{"in": []string{"Footwear", "Accessories"}}, + }), + WithLimit(10), + )) + if hitCount(resp) != 4 { + t.Errorf("expected 4 hits for category IN [Footwear, Accessories], got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + if caps.HasPrefix { + t.Run("prefix_filter", func(t *testing.T) { + t.Parallel() + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "category": map[string]string{"startsWith": "Foot"}, + }), + WithLimit(10), + )) + if hitCount(resp) != 3 { + t.Errorf("expected 3 hits for category startsWith 'Foot', got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + } + }) + + // Mutation tests (sequential, fresh indices). + t.Run("mutations", func(t *testing.T) { + t.Run("index_single_document", func(t *testing.T) { + freshIdx := indexFactory(t) + doc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "99"}}, + Fields: map[string]any{"name": "Sandals", "description": "Comfortable summer sandals", "category": "Footwear", "price": 49.99, "inStock": true}, + } + if err := freshIdx.IndexDocument(context.Background(), doc); err != nil { + t.Fatalf("IndexDocument: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + mutSource := CreateSource(t, freshIdx, config) + resp := LoadAndParse(t, mutSource, BuildSearchInput(WithLimit(10))) + if hitCount(resp) != 1 { + t.Errorf("expected 1 hit after indexing single doc, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("upsert", func(t *testing.T) { + freshIdx := indexFactory(t) + if err := freshIdx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + // Re-index id=1 with updated name. + updated := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Trail Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + } + if err := freshIdx.IndexDocument(context.Background(), updated); err != nil { + t.Fatalf("IndexDocument (upsert): %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + mutSource := CreateSource(t, freshIdx, config) + + // Total count should still be 4. + allResp := LoadAndParse(t, mutSource, BuildSearchInput(WithLimit(10))) + if hitCount(allResp) != 4 { + t.Errorf("expected 4 hits after upsert, got totalCount=%d len(hits)=%d", allResp.TotalCount, len(allResp.Hits)) + } + + // Verify updated name is findable. + if caps.HasTextSearch { + trailResp := LoadAndParse(t, mutSource, BuildSearchInput(WithQuery("trail"), WithLimit(10))) + if hitCount(trailResp) < 1 { + t.Errorf("expected ≥1 hit for 'trail' after upsert, got totalCount=%d", trailResp.TotalCount) + } + if len(trailResp.Hits) > 0 { + name, _ := trailResp.Hits[0].Node["name"].(string) + if name != "Trail Running Shoes" { + t.Errorf("expected name %q, got %q", "Trail Running Shoes", name) + } + } + } + }) + + t.Run("delete_single", func(t *testing.T) { + freshIdx := indexFactory(t) + if err := freshIdx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + if err := freshIdx.DeleteDocument(context.Background(), searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }); err != nil { + t.Fatalf("DeleteDocument: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + mutSource := CreateSource(t, freshIdx, config) + resp := LoadAndParse(t, mutSource, BuildSearchInput(WithLimit(10))) + if hitCount(resp) != 3 { + t.Errorf("expected 3 hits after delete, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("delete_batch", func(t *testing.T) { + freshIdx := indexFactory(t) + if err := freshIdx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + if err := freshIdx.DeleteDocuments(context.Background(), []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + }); err != nil { + t.Fatalf("DeleteDocuments: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + mutSource := CreateSource(t, freshIdx, config) + resp := LoadAndParse(t, mutSource, BuildSearchInput(WithLimit(10))) + if hitCount(resp) != 2 { + t.Errorf("expected 2 hits after batch delete, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + }) +} + +// RunCursorTests runs cursor-based pagination tests against the given index. +// Products sorted by price ASC: Wool Socks ($12.99), Leather Belt ($35), Running Shoes ($89.99), Basketball Shoes ($129.99). +func RunCursorTests(t *testing.T, idx searchindex.Index, caps BackendCaps, hooks BackendHooks) { + if err := idx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate test data: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + config := CursorProductDatasourceConfig() + source := CreateSource(t, idx, config) + + sortByPrice := WithSort([]map[string]string{{"field": "price", "direction": "ASC"}}) + + t.Run("forward_page1", func(t *testing.T) { + resp := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithFirst(1))) + if len(resp.Edges) != 1 { + t.Fatalf("expected 1 edge, got %d", len(resp.Edges)) + } + name, _ := resp.Edges[0].Node["name"].(string) + if name != "Wool Socks" { + t.Errorf("expected Wool Socks, got %q", name) + } + if !resp.PageInfo.HasNextPage { + t.Error("expected hasNextPage=true") + } + if resp.PageInfo.HasPreviousPage { + t.Error("expected hasPreviousPage=false") + } + if resp.PageInfo.EndCursor == nil || *resp.PageInfo.EndCursor == "" { + t.Error("expected non-empty endCursor") + } + }) + + t.Run("forward_page2", func(t *testing.T) { + // Get page 1 to obtain cursor. + page1 := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithFirst(1))) + if len(page1.Edges) == 0 || page1.PageInfo.EndCursor == nil { + t.Fatal("page1 has no edges or endCursor") + } + + // Get page 2 using after cursor. + resp := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithFirst(1), WithAfter(*page1.PageInfo.EndCursor))) + if len(resp.Edges) != 1 { + t.Fatalf("expected 1 edge, got %d", len(resp.Edges)) + } + name, _ := resp.Edges[0].Node["name"].(string) + if name != "Leather Belt" { + t.Errorf("expected Leather Belt, got %q", name) + } + if !resp.PageInfo.HasNextPage { + t.Error("expected hasNextPage=true (2 more items)") + } + if !resp.PageInfo.HasPreviousPage { + t.Error("expected hasPreviousPage=true (after cursor is set)") + } + }) + + t.Run("forward_last_page", func(t *testing.T) { + // Page through to the last page to verify hasNextPage=false. + // Get all 4 in one page to simplify: first=4 should return all, no over-fetch extra. + resp := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithFirst(4))) + if len(resp.Edges) != 4 { + t.Fatalf("expected 4 edges, got %d", len(resp.Edges)) + } + if resp.PageInfo.HasNextPage { + t.Error("expected hasNextPage=false for full result set") + } + }) + + if caps.HasCursorPagination { + t.Run("backward_page1", func(t *testing.T) { + resp := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithLast(1))) + if len(resp.Edges) != 1 { + t.Fatalf("expected 1 edge, got %d", len(resp.Edges)) + } + name, _ := resp.Edges[0].Node["name"].(string) + if name != "Basketball Shoes" { + t.Errorf("expected Basketball Shoes (most expensive), got %q", name) + } + if !resp.PageInfo.HasPreviousPage { + t.Error("expected hasPreviousPage=true") + } + if resp.PageInfo.HasNextPage { + t.Error("expected hasNextPage=false (no before cursor)") + } + }) + + t.Run("backward_page2", func(t *testing.T) { + // Get last page to obtain cursor. + page1 := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithLast(1))) + if len(page1.Edges) == 0 || page1.PageInfo.StartCursor == nil { + t.Fatal("page1 has no edges or startCursor") + } + + // Get previous page. + resp := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithLast(1), WithBefore(*page1.PageInfo.StartCursor))) + if len(resp.Edges) != 1 { + t.Fatalf("expected 1 edge, got %d", len(resp.Edges)) + } + name, _ := resp.Edges[0].Node["name"].(string) + if name != "Running Shoes" { + t.Errorf("expected Running Shoes ($89.99), got %q", name) + } + if !resp.PageInfo.HasPreviousPage { + t.Error("expected hasPreviousPage=true (2 more items)") + } + if !resp.PageInfo.HasNextPage { + t.Error("expected hasNextPage=true (before cursor is set)") + } + }) + } + + t.Run("cursor_identity", func(t *testing.T) { + // Verify edges have __typename and key fields for entity resolution. + resp := LoadAndParseConnection(t, source, BuildSearchInput(sortByPrice, WithFirst(10))) + for i, edge := range resp.Edges { + if edge.Cursor == "" { + t.Errorf("edge[%d]: empty cursor", i) + } + typename, _ := edge.Node["__typename"].(string) + if typename != "Product" { + t.Errorf("edge[%d]: __typename=%q, want Product", i, typename) + } + if _, ok := edge.Node["id"]; !ok { + t.Errorf("edge[%d]: missing key field 'id'", i) + } + } + }) +} + +// RunGeoTests runs geo-spatial search tests against the given index. +// Products are at: +// +// #1 Running Shoes: New York (40.7128, -74.0060) +// #2 Basketball Shoes: Midtown Manhattan (40.7580, -73.9855) — ~5km from #1 +// #3 Leather Belt: Los Angeles (34.0522, -118.2437) — ~3,940km from #1 +// #4 Wool Socks: London (51.5074, -0.1278) — ~5,570km from #1 +func RunGeoTests(t *testing.T, idx searchindex.Index, hooks BackendHooks) { + if err := idx.IndexDocuments(context.Background(), GeoTestProducts()); err != nil { + t.Fatalf("populate geo test data: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + config := GeoProductDatasourceConfig() + source := CreateSource(t, idx, config) + + t.Run("geo_distance_filter", func(t *testing.T) { + t.Parallel() + // Search within 10km of New York — should find Running Shoes (#1) and Basketball Shoes (#2). + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "location_distance": map[string]any{ + "center": map[string]any{"lat": 40.7128, "lon": -74.0060}, + "distance": "10km", + }, + }), + WithLimit(10), + )) + if hitCount(resp) != 2 { + t.Errorf("expected 2 hits within 10km of NYC, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("geo_distance_filter_wide", func(t *testing.T) { + t.Parallel() + // Search within 5000km of New York — should find Running Shoes, Basketball Shoes, and Leather Belt (LA). + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "location_distance": map[string]any{ + "center": map[string]any{"lat": 40.7128, "lon": -74.0060}, + "distance": "5000km", + }, + }), + WithLimit(10), + )) + if hitCount(resp) != 3 { + t.Errorf("expected 3 hits within 5000km of NYC, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("geo_bounding_box_filter", func(t *testing.T) { + t.Parallel() + // Bounding box around New York area — should find Running Shoes and Basketball Shoes. + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "location_boundingBox": map[string]any{ + "topLeft": map[string]any{"lat": 41.0, "lon": -74.5}, + "bottomRight": map[string]any{"lat": 40.5, "lon": -73.5}, + }, + }), + WithLimit(10), + )) + if hitCount(resp) != 2 { + t.Errorf("expected 2 hits in NYC bounding box, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) + + t.Run("geo_distance_sort", func(t *testing.T) { + t.Parallel() + // Sort all products by distance from New York ASC. + resp := LoadAndParse(t, source, BuildSearchInput( + WithGeoSort("location", 40.7128, -74.0060, "ASC", "km"), + WithLimit(10), + )) + if len(resp.Hits) < 4 { + t.Fatalf("expected >= 4 hits, got %d", len(resp.Hits)) + } + // Nearest to NYC should be Running Shoes (in NYC) or Basketball Shoes (Midtown). + name, _ := resp.Hits[0].Node["name"].(string) + if name != "Running Shoes" { + t.Errorf("expected first hit to be Running Shoes (nearest to NYC), got %q", name) + } + // Farthest should be Wool Socks (London). + lastName, _ := resp.Hits[3].Node["name"].(string) + if lastName != "Wool Socks" { + t.Errorf("expected last hit to be Wool Socks (London, farthest), got %q", lastName) + } + // All hits should have geoDistance populated. + for i, hit := range resp.Hits { + if hit.GeoDistance == nil { + t.Errorf("hit[%d]: expected geoDistance to be populated", i) + } + } + }) + + t.Run("geo_filter_combined_with_keyword", func(t *testing.T) { + t.Parallel() + // Combine geo filter with keyword filter: Footwear within 100km of NYC. + resp := LoadAndParse(t, source, BuildSearchInput( + WithFilter(map[string]any{ + "location_distance": map[string]any{ + "center": map[string]any{"lat": 40.7128, "lon": -74.0060}, + "distance": "100km", + }, + "category": map[string]string{"eq": "Footwear"}, + }), + WithLimit(10), + )) + if hitCount(resp) != 2 { + t.Errorf("expected 2 Footwear hits near NYC, got totalCount=%d len(hits)=%d", resp.TotalCount, len(resp.Hits)) + } + }) +} + +// RunFuzzyTests runs fuzzy matching / typo tolerance tests against the given index. +// Uses the standard 4 products (Running Shoes, Basketball Shoes, Leather Belt, Wool Socks). +func RunFuzzyTests(t *testing.T, idx searchindex.Index, hooks BackendHooks) { + if err := idx.IndexDocuments(context.Background(), TestProducts()); err != nil { + t.Fatalf("populate test data: %v", err) + } + if hooks.WaitForIndex != nil { + hooks.WaitForIndex(t) + } + + config := ProductDatasourceConfig() + source := CreateSource(t, idx, config) + + t.Run("fuzzy_low_finds_typo", func(t *testing.T) { + t.Parallel() + // "runing" is 1 edit away from "running" — fuzziness LOW should find it. + resp := LoadAndParse(t, source, BuildSearchInput( + WithQuery("runing"), + WithFuzziness("LOW"), + WithLimit(10), + )) + if hitCount(resp) < 1 { + t.Errorf("expected >=1 hit for 'runing' with fuzziness LOW, got totalCount=%d", resp.TotalCount) + } + }) + + t.Run("fuzzy_exact_misses_typo", func(t *testing.T) { + t.Parallel() + // "runing" with fuzziness EXACT should find nothing. + resp := LoadAndParse(t, source, BuildSearchInput( + WithQuery("runing"), + WithFuzziness("EXACT"), + WithLimit(10), + )) + if hitCount(resp) != 0 { + t.Errorf("expected 0 hits for 'runing' with fuzziness EXACT, got totalCount=%d", resp.TotalCount) + } + }) + + t.Run("fuzzy_high_finds_typo", func(t *testing.T) { + t.Parallel() + // "runnin" is 1 edit away — fuzziness HIGH (2 edits) should find it. + resp := LoadAndParse(t, source, BuildSearchInput( + WithQuery("runnin"), + WithFuzziness("HIGH"), + WithLimit(10), + )) + if hitCount(resp) < 1 { + t.Errorf("expected >=1 hit for 'runnin' with fuzziness HIGH, got totalCount=%d", resp.TotalCount) + } + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/generator_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/generator_test.go new file mode 100644 index 0000000000..af005a0beb --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/generator_test.go @@ -0,0 +1,148 @@ +package searche2e + +import ( + "strings" + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/search_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func TestGenerateSubgraphSDL_TextOnly(t *testing.T) { + config := &search_datasource.ParsedConfig{ + Entities: []search_datasource.SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []search_datasource.IndexedField{ + {FieldName: "name", GraphQLType: "String", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "category", GraphQLType: "String", IndexType: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {FieldName: "price", GraphQLType: "Float", IndexType: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {FieldName: "inStock", GraphQLType: "Boolean", IndexType: searchindex.FieldTypeBool, Filterable: true}, + }, + }, + }, + } + + sdl, err := search_datasource.GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("GenerateSubgraphSDL: %v", err) + } + + // Shared types + assertContains(t, sdl, "input StringFilter {") + assertContains(t, sdl, "input FloatFilter {") + assertContains(t, sdl, "enum SortDirection {") + assertContains(t, sdl, "type SearchFacet {") + assertContains(t, sdl, "type SearchFacetValue {") + + // Filter input + assertContains(t, sdl, "input ProductFilter {") + assertContains(t, sdl, "name: StringFilter") + assertContains(t, sdl, "category: StringFilter") + assertContains(t, sdl, "price: FloatFilter") + assertContains(t, sdl, "inStock: Boolean") + assertContains(t, sdl, "AND: [ProductFilter!]") + assertContains(t, sdl, "OR: [ProductFilter!]") + assertContains(t, sdl, "NOT: ProductFilter") + + // Sort enum and input + assertContains(t, sdl, "enum ProductSortField {") + assertContains(t, sdl, "RELEVANCE") + assertContains(t, sdl, "NAME") + assertContains(t, sdl, "CATEGORY") + assertContains(t, sdl, "PRICE") + assertContains(t, sdl, "input ProductSort {") + + // Result types + assertContains(t, sdl, "type SearchProductResult {") + assertContains(t, sdl, "hits: [SearchProductHit!]!") + assertContains(t, sdl, "totalCount: Int!") + assertContains(t, sdl, "facets: [SearchFacet!]") + assertContains(t, sdl, "type SearchProductHit {") + assertContains(t, sdl, "score: Float!") + assertContains(t, sdl, "node: Product!") + + // No distance field for text-only + assertNotContains(t, sdl, "distance: Float") + + // No SearchProductInput @oneOf for text-only + assertNotContains(t, sdl, "input SearchProductInput") + + // Query type + assertContains(t, sdl, "type Query {") + assertContains(t, sdl, "searchProducts(") + assertContains(t, sdl, "query: String!") + assertContains(t, sdl, "filter: ProductFilter") + assertContains(t, sdl, "sort: [ProductSort!]") + assertContains(t, sdl, "limit: Int") + assertContains(t, sdl, "offset: Int") + assertContains(t, sdl, "facets: [String!]") + assertContains(t, sdl, "): SearchProductResult!") + + // Entity stub + assertContains(t, sdl, `type Product @key(fields: "id") {`) + assertContains(t, sdl, "id: ID! @external") +} + +func TestGenerateSubgraphSDL_Vector(t *testing.T) { + config := &search_datasource.ParsedConfig{ + Entities: []search_datasource.SearchableEntity{ + { + TypeName: "Article", + IndexName: "articles", + SearchField: "searchArticles", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []search_datasource.IndexedField{ + {FieldName: "title", GraphQLType: "String", IndexType: searchindex.FieldTypeText, Filterable: true}, + {FieldName: "embedding", GraphQLType: "[Float!]", IndexType: searchindex.FieldTypeVector, Dimensions: 1536}, + }, + }, + }, + } + + sdl, err := search_datasource.GenerateSubgraphSDL(config) + if err != nil { + t.Fatalf("GenerateSubgraphSDL: %v", err) + } + + // Vector search input (@oneOf) + assertContains(t, sdl, "input SearchArticleInput @oneOf {") + assertContains(t, sdl, "query: String") + assertContains(t, sdl, "vector: [Float!]") + + // Result types with distance + assertContains(t, sdl, "type SearchArticleHit {") + assertContains(t, sdl, "score: Float!") + assertContains(t, sdl, "distance: Float") + assertContains(t, sdl, "node: Article!") + + // Query uses search: instead of query: + assertContains(t, sdl, "searchArticles(") + assertContains(t, sdl, "search: SearchArticleInput!") + + // No facets argument for vector entities + assertNotContains(t, sdl, "facets: [String!]") + + // Entity stub + assertContains(t, sdl, `type Article @key(fields: "id") {`) + assertContains(t, sdl, "id: ID! @external") +} + +func assertContains(t *testing.T, s, substr string) { + t.Helper() + if !strings.Contains(s, substr) { + t.Errorf("expected SDL to contain %q\n\nActual SDL:\n%s", substr, s) + } +} + +func assertNotContains(t *testing.T, s, substr string) { + t.Helper() + if strings.Contains(s, substr) { + t.Errorf("expected SDL NOT to contain %q\n\nActual SDL:\n%s", substr, s) + } +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/meilisearch_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/meilisearch_test.go new file mode 100644 index 0000000000..8fd1c71a0a --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/meilisearch_test.go @@ -0,0 +1,86 @@ +//go:build integration + +package searche2e + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/meilisearch" +) + +const testMasterKey = "test-master-key" + +func startMeilisearchContainer(t *testing.T) string { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "getmeili/meilisearch:v1.6", + ExposedPorts: []string{"7700/tcp"}, + Env: map[string]string{ + "MEILI_MASTER_KEY": testMasterKey, + }, + WaitingFor: wait.ForHTTP("/health").WithPort("7700/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + require.NoError(t, err, "failed to start Meilisearch container") + t.Cleanup(func() { + _ = container.Terminate(ctx) + }) + + host, err := container.Host(ctx) + require.NoError(t, err) + port, err := container.MappedPort(ctx, "7700") + require.NoError(t, err) + + return fmt.Sprintf("http://%s:%s", host, port.Port()) +} + +func newMeilisearchIndex(t *testing.T, meiliHost string) searchindex.Index { + t.Helper() + + factory := meilisearch.NewFactory() + schema := ProductIndexSchema() + + cfg := meilisearch.Config{ + Host: meiliHost, + APIKey: testMasterKey, + } + cfgJSON, err := json.Marshal(cfg) + require.NoError(t, err) + + indexName := fmt.Sprintf("test_%d", time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, schema, cfgJSON) + require.NoError(t, err, "CreateIndex") + t.Cleanup(func() { _ = idx.Close() }) + + return idx +} + +func TestMeilisearch(t *testing.T) { + meiliHost := startMeilisearchContainer(t) + + idx := newMeilisearchIndex(t, meiliHost) + + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: false, + HasExists: false, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newMeilisearchIndex(t, meiliHost) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/pgvector_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/pgvector_test.go new file mode 100644 index 0000000000..82d93a712e --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/pgvector_test.go @@ -0,0 +1,110 @@ +//go:build integration + +package searche2e + +import ( + "context" + "database/sql" + "fmt" + "testing" + "time" + + _ "github.com/lib/pq" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/pgvector" +) + +func startPgvectorContainer(t *testing.T) *sql.DB { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "pgvector/pgvector:pg16", + ExposedPorts: []string{"5432/tcp"}, + Env: map[string]string{ + "POSTGRES_USER": "test", + "POSTGRES_PASSWORD": "test", + "POSTGRES_DB": "testdb", + }, + WaitingFor: wait.ForListeningPort("5432/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start pgvector container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "5432/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + dsn := fmt.Sprintf("postgres://test:test@%s:%s/testdb?sslmode=disable", host, port.Port()) + + db, err := sql.Open("postgres", dsn) + if err != nil { + t.Fatalf("failed to open database: %v", err) + } + t.Cleanup(func() { + if err := db.Close(); err != nil { + t.Logf("failed to close database: %v", err) + } + }) + + // Wait for database to be ready with a ping loop. + for i := 0; i < 30; i++ { + if err := db.PingContext(ctx); err == nil { + return db + } + time.Sleep(500 * time.Millisecond) + } + t.Fatal("database did not become ready after 30 ping attempts") + return nil +} + +func newPgvectorIndex(t *testing.T, db *sql.DB) searchindex.Index { + t.Helper() + + factory := pgvector.NewFactory(db) + schema := ProductIndexSchema() + + indexName := fmt.Sprintf("test_%d", time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { _ = idx.Close() }) + + return idx +} + +func TestPgvector(t *testing.T) { + db := startPgvectorContainer(t) + + idx := newPgvectorIndex(t, db) + + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: true, + HasExists: true, + HasVectorSearch: true, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newPgvectorIndex(t, db) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/qdrant_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/qdrant_test.go new file mode 100644 index 0000000000..1405736eea --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/qdrant_test.go @@ -0,0 +1,86 @@ +//go:build integration + +package searche2e + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/qdrant" +) + +func startQdrant(t *testing.T) (host string, port int) { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "qdrant/qdrant:v1.12.5", + ExposedPorts: []string{"6333/tcp"}, + WaitingFor: wait.ForHTTP("/healthz").WithPort("6333/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start qdrant container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate qdrant container: %v", err) + } + }) + + mappedHost, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + mappedPort, err := container.MappedPort(ctx, "6333") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return mappedHost, mappedPort.Int() +} + +func newQdrantIndex(t *testing.T, host string, port int) searchindex.Index { + t.Helper() + factory := qdrant.NewFactory() + name := fmt.Sprintf("test_%d", time.Now().UnixNano()) + cfgJSON, err := json.Marshal(qdrant.Config{Host: host, Port: port}) + if err != nil { + t.Fatalf("marshal qdrant config: %v", err) + } + idx, err := factory.CreateIndex(context.Background(), name, ProductIndexSchema(), cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func TestQdrant(t *testing.T) { + host, port := startQdrant(t) + idx := newQdrantIndex(t, host, port) + + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: false, + HasFacets: false, + HasPrefix: false, + HasExists: false, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(1 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + return newQdrantIndex(t, host, port) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/subscription_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/subscription_test.go new file mode 100644 index 0000000000..af5739af41 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/subscription_test.go @@ -0,0 +1,274 @@ +package searche2e + +import ( + "context" + "encoding/json" + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/search_datasource" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/bleve" +) + +// mockExecutor returns a canned JSON response for the population query. +type mockExecutor struct { + response []byte +} + +func (m *mockExecutor) Execute(_ context.Context, _ string) ([]byte, error) { + return m.response, nil +} + +// mockSubscriber provides a channel-based subscriber for testing. +type mockSubscriber struct { + ch chan []byte +} + +func newMockSubscriber() *mockSubscriber { + return &mockSubscriber{ch: make(chan []byte, 10)} +} + +func (m *mockSubscriber) Subscribe(_ context.Context, _ string) (<-chan []byte, error) { + return m.ch, nil +} + +func (m *mockSubscriber) Send(data []byte) { + m.ch <- data +} + +func (m *mockSubscriber) Close() { + close(m.ch) +} + +func TestSubscriptionUpsert(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // Build the population response with 2 initial products. + populationResp, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "products": []map[string]any{ + {"id": "1", "name": "Running Shoes", "description": "Great for jogging", "category": "Footwear", "price": 89.99, "inStock": true}, + {"id": "2", "name": "Basketball Shoes", "description": "High-top sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + }, + }) + + // Setup registries and factory. + indexRegistry := searchindex.NewIndexFactoryRegistry() + indexRegistry.Register("bleve", bleve.NewFactory()) + + factory := search_datasource.NewFactory(ctx, indexRegistry, nil) + executor := &mockExecutor{response: populationResp} + subscriber := newMockSubscriber() + + config := &search_datasource.ParsedConfig{ + Indices: []search_datasource.IndexDirective{ + {Name: "products", Backend: "bleve"}, + }, + Entities: []search_datasource.SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []search_datasource.IndexedField{ + {FieldName: "name", GraphQLType: "String", IndexType: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {FieldName: "description", GraphQLType: "String", IndexType: searchindex.FieldTypeText}, + {FieldName: "category", GraphQLType: "String", IndexType: searchindex.FieldTypeKeyword, Filterable: true}, + {FieldName: "price", GraphQLType: "Float", IndexType: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {FieldName: "inStock", GraphQLType: "Boolean", IndexType: searchindex.FieldTypeBool, Filterable: true}, + }, + }, + }, + Populations: []search_datasource.PopulateDirective{ + { + IndexName: "products", + EntityTypeName: "Product", + Path: "data.products", + Query: "{ products { id name description category price inStock } }", + }, + }, + Subscriptions: []search_datasource.SubscribeDirective{ + { + IndexName: "products", + EntityTypeName: "Product", + Path: "data.productUpdated", + Subscription: "subscription { productUpdated { id name description category price inStock } }", + }, + }, + } + + manager := search_datasource.NewManager(factory, indexRegistry, nil, executor, config) + manager.SetSubscriber(subscriber) + + if err := manager.Start(ctx); err != nil { + t.Fatalf("Manager.Start: %v", err) + } + defer func() { _ = manager.Stop() }() + + // Verify initial population: search for "shoes" should return 2 hits. + idx, ok := manager.GetIndex("products") + if !ok { + t.Fatal("index 'products' not found") + } + + searchAndExpect := func(query string, expectedCount int) *searchindex.SearchResult { + t.Helper() + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: query, + TypeName: "Product", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search(%q): %v", query, err) + } + if len(result.Hits) != expectedCount { + t.Fatalf("Search(%q): got %d hits, want %d", query, len(result.Hits), expectedCount) + } + return result + } + + searchAndExpect("shoes", 2) + + // Send a subscription event: upsert a new product. + upsertEvent, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "productUpdated": map[string]any{ + "id": "3", "name": "Tennis Shoes", "description": "Court shoes for tennis", "category": "Footwear", "price": 99.99, "inStock": true, + }, + }, + }) + subscriber.Send(upsertEvent) + + // Give the subscription goroutine time to process. + time.Sleep(200 * time.Millisecond) + + // Now "shoes" should return 3 hits. + searchAndExpect("shoes", 3) + + // Send another event: update an existing product's name. + updateEvent, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "productUpdated": map[string]any{ + "id": "1", "name": "Trail Running Boots", "description": "Great for trail running", "category": "Footwear", "price": 89.99, "inStock": true, + }, + }, + }) + subscriber.Send(updateEvent) + time.Sleep(200 * time.Millisecond) + + // "boots" should return 1 hit (the updated product). + result := searchAndExpect("boots", 1) + if result.Hits[0].Identity.KeyFields["id"] != "1" { + t.Errorf("expected updated product id=1, got %v", result.Hits[0].Identity.KeyFields["id"]) + } +} + +func TestSubscriptionDeletion(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + populationResp, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "products": []map[string]any{ + {"id": "1", "name": "Running Shoes", "description": "Great for jogging", "category": "Footwear", "price": 89.99, "inStock": true}, + {"id": "2", "name": "Basketball Shoes", "description": "High-top sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + {"id": "3", "name": "Leather Belt", "description": "Genuine leather belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + }, + }) + + indexRegistry := searchindex.NewIndexFactoryRegistry() + indexRegistry.Register("bleve", bleve.NewFactory()) + + factory := search_datasource.NewFactory(ctx, indexRegistry, nil) + executor := &mockExecutor{response: populationResp} + subscriber := newMockSubscriber() + + config := &search_datasource.ParsedConfig{ + Indices: []search_datasource.IndexDirective{ + {Name: "products", Backend: "bleve"}, + }, + Entities: []search_datasource.SearchableEntity{ + { + TypeName: "Product", + IndexName: "products", + SearchField: "searchProducts", + KeyFields: []string{"id"}, + ResultsMetaInformation: true, + Fields: []search_datasource.IndexedField{ + {FieldName: "name", GraphQLType: "String", IndexType: searchindex.FieldTypeText}, + {FieldName: "description", GraphQLType: "String", IndexType: searchindex.FieldTypeText}, + {FieldName: "category", GraphQLType: "String", IndexType: searchindex.FieldTypeKeyword, Filterable: true}, + {FieldName: "price", GraphQLType: "Float", IndexType: searchindex.FieldTypeNumeric}, + {FieldName: "inStock", GraphQLType: "Boolean", IndexType: searchindex.FieldTypeBool}, + }, + }, + }, + Populations: []search_datasource.PopulateDirective{ + { + IndexName: "products", + EntityTypeName: "Product", + Path: "data.products", + Query: "{ products { id name description category price inStock } }", + }, + }, + Subscriptions: []search_datasource.SubscribeDirective{ + { + IndexName: "products", + EntityTypeName: "Product", + Path: "data.productUpdated", + DeletionPath: "data.productDeleted", + Subscription: "subscription { productUpdated { id name description category price inStock } productDeleted { id } }", + }, + }, + } + + manager := search_datasource.NewManager(factory, indexRegistry, nil, executor, config) + manager.SetSubscriber(subscriber) + + if err := manager.Start(ctx); err != nil { + t.Fatalf("Manager.Start: %v", err) + } + defer func() { _ = manager.Stop() }() + + idx, ok := manager.GetIndex("products") + if !ok { + t.Fatal("index 'products' not found") + } + + // Verify initial: "shoes" returns 2. + result, err := idx.Search(ctx, searchindex.SearchRequest{TextQuery: "shoes", TypeName: "Product", Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 2 { + t.Fatalf("initial search: got %d hits, want 2", len(result.Hits)) + } + + // Send a deletion event. + deleteEvent, _ := json.Marshal(map[string]any{ + "data": map[string]any{ + "productDeleted": map[string]any{ + "id": "1", + }, + }, + }) + subscriber.Send(deleteEvent) + time.Sleep(200 * time.Millisecond) + + // "shoes" should now return 1 (only basketball shoes). + result, err = idx.Search(ctx, searchindex.SearchRequest{TextQuery: "shoes", TypeName: "Product", Limit: 10}) + if err != nil { + t.Fatalf("Search after delete: %v", err) + } + if len(result.Hits) != 1 { + t.Fatalf("after deletion: got %d hits, want 1", len(result.Hits)) + } + if result.Hits[0].Identity.KeyFields["id"] != "2" { + t.Errorf("expected remaining product id=2, got %v", result.Hits[0].Identity.KeyFields["id"]) + } +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/typesense_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/typesense_test.go new file mode 100644 index 0000000000..556d417cbf --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/typesense_test.go @@ -0,0 +1,97 @@ +//go:build integration + +package searche2e + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/typesense" +) + +func startTypesense(t *testing.T) (string, int) { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "typesense/typesense:27.1", + ExposedPorts: []string{"8108/tcp"}, + Env: map[string]string{ + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_DATA_DIR": "/data", + }, + Tmpfs: map[string]string{"/data": ""}, + WaitingFor: wait.ForHTTP("/health"). + WithPort("8108/tcp"). + WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start typesense container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "8108/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return host, port.Int() +} + +func newTypesenseIndex(t *testing.T, host string, port int) searchindex.Index { + t.Helper() + + factory := typesense.NewFactory() + cfg := typesense.Config{ + Host: host, + Port: port, + APIKey: "test-api-key", + Protocol: "http", + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + + indexName := fmt.Sprintf("test_%d", time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, ProductIndexSchema(), cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func TestTypesense(t *testing.T) { + host, port := startTypesense(t) + + idx := newTypesenseIndex(t, host, port) + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: true, + HasPrefix: false, + HasExists: false, + }, BackendHooks{}, func(t *testing.T) searchindex.Index { + return newTypesenseIndex(t, host, port) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/searche2e/weaviate_test.go b/v2/pkg/engine/datasource/search_datasource/searche2e/weaviate_test.go new file mode 100644 index 0000000000..f271dbed42 --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/searche2e/weaviate_test.go @@ -0,0 +1,90 @@ +//go:build integration + +package searche2e + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex/weaviate" +) + +func startWeaviate(t *testing.T) string { + t.Helper() + ctx := context.Background() + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: "semitechnologies/weaviate:1.27.0", + ExposedPorts: []string{"8080/tcp"}, + Env: map[string]string{ + "AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED": "true", + "PERSISTENCE_DATA_PATH": "/var/lib/weaviate", + "DEFAULT_VECTORIZER_MODULE": "none", + "CLUSTER_HOSTNAME": "node1", + }, + WaitingFor: wait.ForHTTP("/v1/.well-known/ready"). + WithPort("8080/tcp"). + WithStartupTimeout(60 * time.Second), + }, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start weaviate container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "8080/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return fmt.Sprintf("%s:%s", host, port.Port()) +} + +func newWeaviateIndex(t *testing.T, host string, name string) searchindex.Index { + t.Helper() + + factory := weaviate.NewFactory() + configJSON := []byte(fmt.Sprintf(`{"host":%q,"scheme":"http"}`, host)) + + idx, err := factory.CreateIndex(context.Background(), name, ProductIndexSchema(), configJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func TestWeaviate(t *testing.T) { + host := startWeaviate(t) + + idx := newWeaviateIndex(t, host, "test_products") + RunBackendTests(t, idx, BackendCaps{ + HasTextSearch: true, + HasFacets: false, + HasPrefix: true, + HasExists: false, + }, BackendHooks{ + WaitForIndex: func(t *testing.T) { + time.Sleep(1 * time.Second) + }, + }, func(t *testing.T) searchindex.Index { + name := fmt.Sprintf("weaviate_fresh_%d", time.Now().UnixNano()) + return newWeaviateIndex(t, host, name) + }) +} diff --git a/v2/pkg/engine/datasource/search_datasource/source.go b/v2/pkg/engine/datasource/search_datasource/source.go new file mode 100644 index 0000000000..867f4ae13b --- /dev/null +++ b/v2/pkg/engine/datasource/search_datasource/source.go @@ -0,0 +1,544 @@ +package search_datasource + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "sort" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/datasource/httpclient" + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Source implements resolve.DataSource for the search datasource. +type Source struct { + index searchindex.Index + config Configuration + embedder searchindex.Embedder // optional, for vector search query embedding +} + +// searchInput represents the parsed input from the planner. +type searchInput struct { + SearchField string `json:"search_field"` + Query string `json:"query,omitempty"` + Vector []float32 `json:"vector,omitempty"` + Search *searchInputArg `json:"search,omitempty"` + Filter json.RawMessage `json:"filter,omitempty"` + Sort json.RawMessage `json:"sort,omitempty"` + Limit *int `json:"limit,omitempty"` + Offset *int `json:"offset,omitempty"` + Facets []string `json:"facets,omitempty"` + GeoSort *geoSortInput `json:"geoSort,omitempty"` + Fuzziness *string `json:"fuzziness,omitempty"` + First *int `json:"first,omitempty"` + After *string `json:"after,omitempty"` + Last *int `json:"last,omitempty"` + Before *string `json:"before,omitempty"` + Prefix *string `json:"prefix,omitempty"` + IsSuggest bool `json:"is_suggest,omitempty"` +} + +type geoSortInput struct { + Field string `json:"field"` + Center struct { + Lat float64 `json:"lat"` + Lon float64 `json:"lon"` + } `json:"center"` + Direction string `json:"direction"` + Unit string `json:"unit,omitempty"` +} + +type searchInputArg struct { + Query *string `json:"query,omitempty"` + Vector []float32 `json:"vector,omitempty"` +} + +func (s *Source) Load(ctx context.Context, _ http.Header, input []byte) ([]byte, error) { + var si searchInput + if err := json.Unmarshal(input, &si); err != nil { + return nil, fmt.Errorf("search_datasource: invalid input: %w", err) + } + + if si.IsSuggest { + return s.loadSuggest(ctx, &si) + } + + req, err := s.buildSearchRequest(ctx, &si) + if err != nil { + return nil, fmt.Errorf("search_datasource: building search request: %w", err) + } + + result, err := s.index.Search(ctx, *req) + if err != nil { + return nil, fmt.Errorf("search_datasource: search failed: %w", err) + } + + return s.formatResponse(result, &si) +} + +func (s *Source) LoadWithFiles(ctx context.Context, headers http.Header, input []byte, _ []*httpclient.FileUpload) ([]byte, error) { + return s.Load(ctx, headers, input) +} + +func (s *Source) buildSearchRequest(ctx context.Context, si *searchInput) (*searchindex.SearchRequest, error) { + req := &searchindex.SearchRequest{ + TypeName: s.config.EntityTypeName, + } + + // Handle query input + if si.Search != nil { + // Vector search entity (@oneOf input) + if si.Search.Query != nil { + if s.embedder != nil { + // Auto-embed the text query and keep the text for hybrid search. + vec, err := s.embedder.EmbedSingle(ctx, *si.Search.Query) + if err != nil { + return nil, fmt.Errorf("embedding query: %w", err) + } + req.Vector = vec + req.VectorField = s.findVectorField() + req.TextQuery = *si.Search.Query + } else { + req.TextQuery = *si.Search.Query + } + } else if si.Search.Vector != nil { + req.Vector = si.Search.Vector + req.VectorField = s.findVectorField() + } + } else if si.Query != "" && si.Query != "*" { + req.TextQuery = si.Query + } + + // Populate text field weights from configuration. + if req.TextQuery != "" { + for _, f := range s.config.Fields { + if f.IndexType == searchindex.FieldTypeText { + w := f.Weight + if w == 0 { + w = 1.0 + } + req.TextFields = append(req.TextFields, searchindex.TextFieldWeight{ + Name: f.FieldName, + Weight: w, + }) + } + } + } + + // Parse filter + if len(si.Filter) > 0 { + filter, err := ParseFilterJSON(si.Filter, s.config.Fields) + if err != nil { + return nil, err + } + req.Filter = filter + } + + // Parse sort + if len(si.Sort) > 0 { + var sorts []sortInput + if err := json.Unmarshal(si.Sort, &sorts); err != nil { + return nil, fmt.Errorf("invalid sort input: %w", err) + } + for _, sort := range sorts { + req.Sort = append(req.Sort, searchindex.SortField{ + Field: s.resolveSortField(sort.Field), + Ascending: sort.Direction == "ASC", + }) + } + } + + // Geo distance sort + if si.GeoSort != nil { + unit := si.GeoSort.Unit + if unit == "" { + unit = "km" + } + req.GeoDistanceSort = &searchindex.GeoDistanceSort{ + Field: si.GeoSort.Field, + Center: searchindex.GeoPoint{Lat: si.GeoSort.Center.Lat, Lon: si.GeoSort.Center.Lon}, + Ascending: si.GeoSort.Direction == "ASC", + Unit: unit, + } + } + + // Fuzziness / typo tolerance + if si.Fuzziness != nil { + req.Fuzziness = parseFuzziness(*si.Fuzziness) + } + + // Cursor-based pagination takes precedence. + if s.config.CursorBasedPagination && (si.First != nil || si.Last != nil) { + if si.First != nil { + req.Limit = *si.First + } else if si.Last != nil { + req.Limit = *si.Last + } + if req.Limit <= 0 { + req.Limit = 10 + } + // Over-fetch by 1 to detect hasNextPage/hasPreviousPage. + req.Limit++ + if si.Last != nil { + // Backward pagination: reverse sort direction so the backend returns + // items from the end. formatConnectionResponse reverses them back. + // The "before" cursor becomes a SearchAfter in the reversed sort space. + for i := range req.Sort { + req.Sort[i].Ascending = !req.Sort[i].Ascending + } + if si.Before != nil { + sortVals, err := DecodeCursor(*si.Before) + if err != nil { + return nil, fmt.Errorf("decoding before cursor: %w", err) + } + req.SearchAfter = sortVals + } + } else { + if si.After != nil { + sortVals, err := DecodeCursor(*si.After) + if err != nil { + return nil, fmt.Errorf("decoding after cursor: %w", err) + } + req.SearchAfter = sortVals + } + } + } else if si.Limit != nil { + req.Limit = *si.Limit + } else if si.First != nil { + req.Limit = *si.First + } else { + req.Limit = 10 // default + } + if si.Offset != nil { + req.Offset = *si.Offset + } + + // Enforce upper bound on limit to prevent excessive result sets. + const maxLimit = 1000 + if req.Limit > maxLimit { + req.Limit = maxLimit + } + + for _, facet := range si.Facets { + req.Facets = append(req.Facets, searchindex.FacetRequest{Field: facet}) + } + + return req, nil +} + +type sortInput struct { + Field string `json:"field"` + Direction string `json:"direction"` +} + +func (s *Source) findVectorField() string { + // Embedding fields take precedence over manually declared VECTOR fields. + if len(s.config.EmbeddingFields) > 0 { + return s.config.EmbeddingFields[0].FieldName + } + // Then check VECTOR fields. + for _, f := range s.config.Fields { + if f.IndexType == searchindex.FieldTypeVector { + return f.FieldName + } + } + return "" +} + +// resolveSortField maps an uppercase enum value (e.g. "CREATEDAT") back to the +// original field name (e.g. "createdAt"). Falls back to strings.ToLower for +// fields not found in the config (e.g. "RELEVANCE"). +func (s *Source) resolveSortField(enumValue string) string { + upper := strings.ToUpper(enumValue) + for _, f := range s.config.Fields { + if strings.ToUpper(f.FieldName) == upper { + return f.FieldName + } + } + return strings.ToLower(enumValue) +} + +func (s *Source) formatResponse(result *searchindex.SearchResult, si *searchInput) ([]byte, error) { + if s.config.CursorBasedPagination { + return s.formatConnectionResponse(result, si) + } + if !s.config.ResultsMetaInformation { + return s.formatInlineResponse(result) + } + return s.formatWrapperResponse(result) +} + +func (s *Source) formatInlineResponse(result *searchindex.SearchResult) ([]byte, error) { + entities := make([]map[string]any, 0, len(result.Hits)) + for _, hit := range result.Hits { + entities = append(entities, hit.Representation) + } + wrapped := map[string]any{ + "data": map[string]any{ + s.config.SearchField: entities, + }, + } + return json.Marshal(wrapped) +} + +func (s *Source) formatWrapperResponse(result *searchindex.SearchResult) ([]byte, error) { + hits := make([]map[string]any, 0, len(result.Hits)) + for _, hit := range result.Hits { + h := map[string]any{ + "score": hit.Score, + "node": hit.Representation, + } + if hit.Distance != 0 { + h["distance"] = hit.Distance + } + if hit.GeoDistance != nil { + h["geoDistance"] = *hit.GeoDistance + } + if len(hit.Highlights) > 0 { + h["highlights"] = formatHighlights(hit.Highlights) + } + hits = append(hits, h) + } + + resp := map[string]any{ + "hits": hits, + "totalCount": result.TotalCount, + } + + if result.Facets != nil { + facets := make([]map[string]any, 0) + for field, fr := range result.Facets { + values := make([]map[string]any, 0, len(fr.Values)) + for _, fv := range fr.Values { + values = append(values, map[string]any{ + "value": fv.Value, + "count": fv.Count, + }) + } + facets = append(facets, map[string]any{ + "field": field, + "values": values, + }) + } + resp["facets"] = facets + } + + // Wrap in {"data": {"": ...}} to match PostProcessing.SelectResponseDataPath: ["data"]. + // After the resolver extracts "data", the result is keyed by the search field name, + // which aligns with the response tree built by the plan visitor. + wrapped := map[string]any{ + "data": map[string]any{ + s.config.SearchField: resp, + }, + } + return json.Marshal(wrapped) +} + +func (s *Source) formatConnectionResponse(result *searchindex.SearchResult, si *searchInput) ([]byte, error) { + // Determine the requested limit (before over-fetch). + requestedLimit := 10 + isBackward := si.Last != nil + if si.First != nil { + requestedLimit = *si.First + } else if si.Last != nil { + requestedLimit = *si.Last + } + + hits := result.Hits + hasMore := len(hits) > requestedLimit + if hasMore { + hits = hits[:requestedLimit] + } + + // For backward pagination, reverse the results (backend returns in reversed order). + if isBackward { + for i, j := 0, len(hits)-1; i < j; i, j = i+1, j-1 { + hits[i], hits[j] = hits[j], hits[i] + } + } + + // Compute pageInfo. + var hasNextPage, hasPreviousPage bool + if isBackward { + hasPreviousPage = hasMore + hasNextPage = si.Before != nil + } else { + hasNextPage = hasMore + hasPreviousPage = si.After != nil + } + + // Build edges. + edges := make([]map[string]any, 0, len(hits)) + for _, hit := range hits { + cursor := EncodeCursor(hit.SortValues) + edge := map[string]any{ + "cursor": cursor, + "node": hit.Representation, + } + if s.config.ResultsMetaInformation { + edge["score"] = hit.Score + if hit.Distance != 0 { + edge["distance"] = hit.Distance + } + if hit.GeoDistance != nil { + edge["geoDistance"] = *hit.GeoDistance + } + if len(hit.Highlights) > 0 { + edge["highlights"] = formatHighlights(hit.Highlights) + } + } + edges = append(edges, edge) + } + + var startCursor, endCursor *string + if len(edges) > 0 { + sc := edges[0]["cursor"].(string) + ec := edges[len(edges)-1]["cursor"].(string) + startCursor = &sc + endCursor = &ec + } + + pageInfo := map[string]any{ + "hasNextPage": hasNextPage, + "hasPreviousPage": hasPreviousPage, + "startCursor": startCursor, + "endCursor": endCursor, + } + + resp := map[string]any{ + "edges": edges, + "pageInfo": pageInfo, + "totalCount": result.TotalCount, + } + + if result.Facets != nil { + facets := make([]map[string]any, 0) + for field, fr := range result.Facets { + values := make([]map[string]any, 0, len(fr.Values)) + for _, fv := range fr.Values { + values = append(values, map[string]any{ + "value": fv.Value, + "count": fv.Count, + }) + } + facets = append(facets, map[string]any{ + "field": field, + "values": values, + }) + } + resp["facets"] = facets + } + + wrapped := map[string]any{ + "data": map[string]any{ + s.config.SearchField: resp, + }, + } + return json.Marshal(wrapped) +} + +func parseFuzziness(val string) *searchindex.Fuzziness { + var f searchindex.Fuzziness + switch val { + case "EXACT": + f = searchindex.FuzzinessExact + case "LOW": + f = searchindex.FuzzinessLow + case "HIGH": + f = searchindex.FuzzinessHigh + default: + return nil + } + return &f +} + +const ( + defaultSuggestLimit = 10 + minPrefixLength = 2 +) + +func (s *Source) loadSuggest(ctx context.Context, si *searchInput) ([]byte, error) { + prefix := "" + if si.Prefix != nil { + prefix = *si.Prefix + } + if len(prefix) < minPrefixLength { + return s.formatSuggestResponse(nil) + } + + prefix = strings.ToLower(prefix) + + limit := defaultSuggestLimit + if si.Limit != nil && *si.Limit > 0 { + limit = *si.Limit + } + + // Collect autocomplete-enabled fields and query each. + var allTerms []searchindex.AutocompleteTerm + for _, f := range s.config.Fields { + if !f.Autocomplete { + continue + } + result, err := s.index.Autocomplete(ctx, searchindex.AutocompleteRequest{ + Field: f.FieldName, + Prefix: prefix, + Limit: limit, + }) + if err != nil { + return nil, fmt.Errorf("search_datasource: autocomplete failed for field %s: %w", f.FieldName, err) + } + allTerms = append(allTerms, result.Terms...) + } + + // Deduplicate terms across fields, summing counts. + termMap := make(map[string]int) + for _, t := range allTerms { + termMap[t.Term] += t.Count + } + + deduped := make([]searchindex.AutocompleteTerm, 0, len(termMap)) + for term, count := range termMap { + deduped = append(deduped, searchindex.AutocompleteTerm{Term: term, Count: count}) + } + sort.Slice(deduped, func(i, j int) bool { + if deduped[i].Count != deduped[j].Count { + return deduped[i].Count > deduped[j].Count + } + return deduped[i].Term < deduped[j].Term + }) + if len(deduped) > limit { + deduped = deduped[:limit] + } + + return s.formatSuggestResponse(deduped) +} + +func (s *Source) formatSuggestResponse(terms []searchindex.AutocompleteTerm) ([]byte, error) { + suggestions := make([]map[string]any, 0, len(terms)) + for _, t := range terms { + suggestions = append(suggestions, map[string]any{ + "term": t.Term, + "count": t.Count, + }) + } + wrapped := map[string]any{ + "data": map[string]any{ + s.config.SearchField: suggestions, + }, + } + return json.Marshal(wrapped) +} + +// formatHighlights converts the backend highlight map to the GraphQL SearchHighlight array format. +func formatHighlights(highlights map[string][]string) []map[string]any { + result := make([]map[string]any, 0, len(highlights)) + for field, fragments := range highlights { + result = append(result, map[string]any{ + "field": field, + "fragments": fragments, + }) + } + return result +} diff --git a/v2/pkg/searchindex/algolia/algolia.go b/v2/pkg/searchindex/algolia/algolia.go new file mode 100644 index 0000000000..8f1f9b2cea --- /dev/null +++ b/v2/pkg/searchindex/algolia/algolia.go @@ -0,0 +1,861 @@ +// Package algolia implements the searchindex.Index interface for Algolia. +// +// It uses only the Go standard library (net/http + encoding/json) to communicate +// with the Algolia REST API. No external Algolia SDK is used. +// +// Priority: P2 +// Supports: full-text SaaS search. +// Filter translation: searchindex.Filter -> Algolia filters string. +package algolia + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "sort" + "strconv" + "strings" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Compile-time interface checks. +var ( + _ searchindex.IndexFactory = (*Factory)(nil) + _ searchindex.Index = (*Index)(nil) +) + +// Config holds Algolia-specific configuration. +type Config struct { + AppID string `json:"app_id"` + APIKey string `json:"api_key"` +} + +// Factory implements searchindex.IndexFactory for Algolia. +type Factory struct{} + +// Index implements searchindex.Index for Algolia. +type Index struct { + name string + config Config + schema searchindex.IndexConfig + client *http.Client + hosts []string +} + +// algoliaHosts returns the ordered list of hosts for the given AppID. +// The primary read host is {AppID}-dsn.algolia.net, with fallbacks to +// {AppID}-1.algolianet.com, {AppID}-2.algolianet.com, {AppID}-3.algolianet.com. +func algoliaHosts(appID string) []string { + return []string{ + fmt.Sprintf("%s-dsn.algolia.net", appID), + fmt.Sprintf("%s-1.algolianet.com", appID), + fmt.Sprintf("%s-2.algolianet.com", appID), + fmt.Sprintf("%s-3.algolianet.com", appID), + } +} + +// algoliaWriteHosts returns the ordered list of write hosts for the given AppID. +// The primary write host is {AppID}.algolia.net, with the same fallbacks. +func algoliaWriteHosts(appID string) []string { + return []string{ + fmt.Sprintf("%s.algolia.net", appID), + fmt.Sprintf("%s-1.algolianet.com", appID), + fmt.Sprintf("%s-2.algolianet.com", appID), + fmt.Sprintf("%s-3.algolianet.com", appID), + } +} + +// CreateIndex creates a new Algolia index with the given schema configuration. +// It configures searchable attributes, faceting attributes, and ranking based on +// the provided FieldConfig entries. +func (f *Factory) CreateIndex(ctx context.Context, name string, schema searchindex.IndexConfig, configJSON []byte) (searchindex.Index, error) { + var cfg Config + if len(configJSON) > 0 { + if err := json.Unmarshal(configJSON, &cfg); err != nil { + return nil, fmt.Errorf("algolia: invalid config: %w", err) + } + } + if cfg.AppID == "" || cfg.APIKey == "" { + return nil, fmt.Errorf("algolia: app_id and api_key are required") + } + + idx := &Index{ + name: name, + config: cfg, + schema: schema, + client: &http.Client{Timeout: 30 * time.Second}, + hosts: algoliaWriteHosts(cfg.AppID), + } + + // Configure the index settings via PUT /1/indexes/{indexName}/settings + if err := idx.configureSettings(ctx); err != nil { + return nil, fmt.Errorf("algolia: failed to configure index settings: %w", err) + } + + return idx, nil +} + +// configureSettings pushes the index settings derived from the schema to Algolia. +func (idx *Index) configureSettings(ctx context.Context) error { + settings := make(map[string]any) + + var searchableAttrs []string + var facetingAttrs []string + + for _, field := range idx.schema.Fields { + switch field.Type { + case searchindex.FieldTypeText: + searchableAttrs = append(searchableAttrs, field.Name) + if field.Filterable { + facetingAttrs = append(facetingAttrs, fmt.Sprintf("searchable(%s)", field.Name)) + } + case searchindex.FieldTypeKeyword: + if field.Filterable { + facetingAttrs = append(facetingAttrs, fmt.Sprintf("filterOnly(%s)", field.Name)) + } + case searchindex.FieldTypeNumeric: + if field.Filterable { + facetingAttrs = append(facetingAttrs, fmt.Sprintf("filterOnly(%s)", field.Name)) + } + case searchindex.FieldTypeBool: + if field.Filterable { + facetingAttrs = append(facetingAttrs, fmt.Sprintf("filterOnly(%s)", field.Name)) + } + case searchindex.FieldTypeGeo: + // Algolia has native _geoloc support; skip for now. + case searchindex.FieldTypeDate, searchindex.FieldTypeDateTime: + if field.Filterable { + facetingAttrs = append(facetingAttrs, fmt.Sprintf("filterOnly(%s)", field.Name)) + } + } + } + + // Always make _typeName filterable for multi-type indices. + facetingAttrs = append(facetingAttrs, "filterOnly(_typeName)") + + if len(searchableAttrs) > 0 { + settings["searchableAttributes"] = searchableAttrs + } + if len(facetingAttrs) > 0 { + settings["attributesForFaceting"] = facetingAttrs + } + + // Configure custom ranking for sortable fields. + var customRanking []string + for _, field := range idx.schema.Fields { + if field.Sortable { + customRanking = append(customRanking, fmt.Sprintf("asc(%s)", field.Name)) + } + } + if len(customRanking) > 0 { + settings["customRanking"] = customRanking + } + + path := fmt.Sprintf("/1/indexes/%s/settings", url.PathEscape(idx.name)) + resp, err := idx.doRequest(ctx, http.MethodPut, path, settings) + if err != nil { + return err + } + + return idx.waitForTask(ctx, resp) +} + +// documentObjectID produces a deterministic objectID for a DocumentIdentity. +// Format: TypeName:key1=value1,key2=value2 (keys sorted alphabetically). +func documentObjectID(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return id.TypeName + } + + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(id.TypeName) + b.WriteByte(':') + for i, k := range keys { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(k) + b.WriteByte('=') + b.WriteString(fmt.Sprintf("%v", id.KeyFields[k])) + } + return b.String() +} + +// IndexDocument indexes a single document. +func (idx *Index) IndexDocument(ctx context.Context, doc searchindex.EntityDocument) error { + return idx.IndexDocuments(ctx, []searchindex.EntityDocument{doc}) +} + +// IndexDocuments indexes a batch of documents using Algolia's batch API. +// POST /1/indexes/{indexName}/batch +func (idx *Index) IndexDocuments(ctx context.Context, docs []searchindex.EntityDocument) error { + if len(docs) == 0 { + return nil + } + + requests := make([]map[string]any, 0, len(docs)) + for _, doc := range docs { + body := make(map[string]any) + body["objectID"] = documentObjectID(doc.Identity) + body["_typeName"] = doc.Identity.TypeName + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return fmt.Errorf("algolia: failed to marshal key fields: %w", err) + } + body["_keyFieldsJSON"] = string(keyFieldsJSON) + + for k, v := range doc.Fields { + body[k] = v + } + + requests = append(requests, map[string]any{ + "action": "addObject", + "body": body, + }) + } + + payload := map[string]any{ + "requests": requests, + } + + path := fmt.Sprintf("/1/indexes/%s/batch", url.PathEscape(idx.name)) + resp, err := idx.doRequest(ctx, http.MethodPost, path, payload) + if err != nil { + return fmt.Errorf("algolia: batch index failed: %w", err) + } + + return idx.waitForTask(ctx, resp) +} + +// DeleteDocument deletes a single document by identity. +func (idx *Index) DeleteDocument(ctx context.Context, id searchindex.DocumentIdentity) error { + return idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{id}) +} + +// DeleteDocuments deletes a batch of documents using Algolia's batch API. +// POST /1/indexes/{indexName}/batch with action "deleteObject". +func (idx *Index) DeleteDocuments(ctx context.Context, ids []searchindex.DocumentIdentity) error { + if len(ids) == 0 { + return nil + } + + requests := make([]map[string]any, 0, len(ids)) + for _, id := range ids { + requests = append(requests, map[string]any{ + "action": "deleteObject", + "body": map[string]any{ + "objectID": documentObjectID(id), + }, + }) + } + + payload := map[string]any{ + "requests": requests, + } + + path := fmt.Sprintf("/1/indexes/%s/batch", url.PathEscape(idx.name)) + resp, err := idx.doRequest(ctx, http.MethodPost, path, payload) + if err != nil { + return fmt.Errorf("algolia: batch delete failed: %w", err) + } + + return idx.waitForTask(ctx, resp) +} + +// Search performs a search query against the Algolia index. +// POST /1/indexes/{indexName}/query +func (idx *Index) Search(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + params := make(map[string]any) + + // Text query + if req.TextQuery != "" { + params["query"] = req.TextQuery + } else { + params["query"] = "" + } + + // Restrict searchable attributes if specific text fields requested. + // Per-field weights are not supported at query time by Algolia. + if len(req.TextFields) > 0 { + names := make([]string, len(req.TextFields)) + for i, tf := range req.TextFields { + names[i] = tf.Name + } + params["restrictSearchableAttributes"] = names + } + + // Filters + var filterParts []string + + // TypeName filter for multi-type indices + if req.TypeName != "" { + filterParts = append(filterParts, fmt.Sprintf("_typeName:%s", quoteFilterValue(req.TypeName))) + } + + // Structured filters + if req.Filter != nil { + filterStr := buildFilterString(req.Filter) + if filterStr != "" { + filterParts = append(filterParts, filterStr) + } + } + + if len(filterParts) > 0 { + params["filters"] = strings.Join(filterParts, " AND ") + } + + // Pagination: Algolia uses hitsPerPage + page (0-based) + limit := req.Limit + if limit <= 0 { + limit = 20 + } + params["hitsPerPage"] = limit + + if req.Offset > 0 { + // Convert offset to page number (0-based) + page := req.Offset / limit + params["page"] = page + } else { + params["page"] = 0 + } + + // Facets + if len(req.Facets) > 0 { + facetFields := make([]string, 0, len(req.Facets)) + for _, f := range req.Facets { + facetFields = append(facetFields, f.Field) + } + params["facets"] = facetFields + + // Find the maximum facet size requested + maxFacetValues := 0 + for _, f := range req.Facets { + if f.Size > maxFacetValues { + maxFacetValues = f.Size + } + } + if maxFacetValues > 0 { + params["maxValuesPerFacet"] = maxFacetValues + } + } + + // Fuzziness / typo tolerance. + if req.Fuzziness != nil && *req.Fuzziness == searchindex.FuzzinessExact { + params["typoTolerance"] = false + } + + // attributesToRetrieve: return all attributes + params["attributesToRetrieve"] = []string{"*"} + params["attributesToHighlight"] = []string{"*"} + + // Use the read hosts for search + readHosts := algoliaHosts(idx.config.AppID) + path := fmt.Sprintf("/1/indexes/%s/query", url.PathEscape(idx.name)) + + resp, err := idx.doRequestWithHosts(ctx, http.MethodPost, path, params, readHosts) + if err != nil { + return nil, fmt.Errorf("algolia: search failed: %w", err) + } + + return idx.parseSearchResponse(resp, req) +} + +// parseSearchResponse converts an Algolia search response to a SearchResult. +func (idx *Index) parseSearchResponse(resp map[string]any, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + result := &searchindex.SearchResult{ + Facets: make(map[string]searchindex.FacetResult), + } + + // Parse total count + if nbHits, ok := resp["nbHits"]; ok { + switch v := nbHits.(type) { + case float64: + result.TotalCount = int(v) + case json.Number: + n, _ := v.Int64() + result.TotalCount = int(n) + } + } + + // Parse hits + if hitsRaw, ok := resp["hits"]; ok { + if hits, ok := hitsRaw.([]any); ok { + for _, hitRaw := range hits { + hit, ok := hitRaw.(map[string]any) + if !ok { + continue + } + + searchHit := searchindex.SearchHit{ + Representation: make(map[string]any), + Highlights: make(map[string][]string), + } + + // Extract identity + var typeName string + if tn, ok := hit["_typeName"].(string); ok { + typeName = tn + } + + var keyFields map[string]any + if kfJSON, ok := hit["_keyFieldsJSON"].(string); ok { + _ = json.Unmarshal([]byte(kfJSON), &keyFields) + } + + searchHit.Identity = searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + } + + // Build representation from fields (excluding internal Algolia fields) + for k, v := range hit { + if strings.HasPrefix(k, "_") && k != "_typeName" { + continue + } + if k == "objectID" { + continue + } + searchHit.Representation[k] = v + } + + // Add __typename to representation + if typeName != "" { + searchHit.Representation["__typename"] = typeName + } + + // Parse highlights from _highlightResult + if highlightResult, ok := hit["_highlightResult"]; ok { + if hrMap, ok := highlightResult.(map[string]any); ok { + for field, hrVal := range hrMap { + if strings.HasPrefix(field, "_") || field == "objectID" { + continue + } + if hrField, ok := hrVal.(map[string]any); ok { + if matchedWords, ok := hrField["matchedWords"]; ok { + if mw, ok := matchedWords.([]any); ok && len(mw) > 0 { + if value, ok := hrField["value"].(string); ok { + searchHit.Highlights[field] = []string{value} + } + } + } + } + } + } + } + + result.Hits = append(result.Hits, searchHit) + } + } + } + + // Parse facets + if facetsRaw, ok := resp["facets"]; ok { + if facetsMap, ok := facetsRaw.(map[string]any); ok { + for field, valuesRaw := range facetsMap { + if valuesMap, ok := valuesRaw.(map[string]any); ok { + fr := searchindex.FacetResult{} + for value, countRaw := range valuesMap { + count := 0 + switch c := countRaw.(type) { + case float64: + count = int(c) + case json.Number: + n, _ := c.Int64() + count = int(n) + } + fr.Values = append(fr.Values, searchindex.FacetValue{ + Value: value, + Count: count, + }) + } + // Sort facet values by count descending for determinism + sort.Slice(fr.Values, func(i, j int) bool { + return fr.Values[i].Count > fr.Values[j].Count + }) + + // Apply size limit from facet request + for _, facetReq := range req.Facets { + if facetReq.Field == field && facetReq.Size > 0 && len(fr.Values) > facetReq.Size { + fr.Values = fr.Values[:facetReq.Size] + } + } + + result.Facets[field] = fr + } + } + } + } + + return result, nil +} + +// Close releases resources held by the index. +func (idx *Index) Close() error { + idx.client.CloseIdleConnections() + return nil +} + +// buildFilterString converts a searchindex.Filter tree into an Algolia filter string. +func buildFilterString(f *Filter) string { + if f == nil { + return "" + } + + var parts []string + + // AND + if len(f.And) > 0 { + var andParts []string + for _, child := range f.And { + s := buildFilterString(child) + if s != "" { + andParts = append(andParts, s) + } + } + if len(andParts) > 0 { + parts = append(parts, "("+strings.Join(andParts, " AND ")+")") + } + } + + // OR + if len(f.Or) > 0 { + var orParts []string + for _, child := range f.Or { + s := buildFilterString(child) + if s != "" { + orParts = append(orParts, s) + } + } + if len(orParts) > 0 { + parts = append(parts, "("+strings.Join(orParts, " OR ")+")") + } + } + + // NOT + if f.Not != nil { + s := buildFilterString(f.Not) + if s != "" { + parts = append(parts, fmt.Sprintf("NOT %s", s)) + } + } + + // Term filter: field:value + if f.Term != nil { + parts = append(parts, formatTermFilter(f.Term.Field, f.Term.Value)) + } + + // Terms filter (IN): field:val1 OR field:val2 + if f.Terms != nil { + var termParts []string + for _, v := range f.Terms.Values { + termParts = append(termParts, formatTermFilter(f.Terms.Field, v)) + } + if len(termParts) > 0 { + parts = append(parts, "("+strings.Join(termParts, " OR ")+")") + } + } + + // Range filter + if f.Range != nil { + parts = append(parts, formatRangeFilter(f.Range)) + } + + // Prefix filter: not natively supported in Algolia filters, + // approximate with field:value* won't work in filters. We use a workaround. + if f.Prefix != nil { + // Algolia doesn't support prefix in filters directly. Best approximation + // is to use facet filtering. This is a limitation. + parts = append(parts, fmt.Sprintf("%s:%s", f.Prefix.Field, quoteFilterValue(f.Prefix.Value))) + } + + // Exists filter: Algolia doesn't have a direct exists filter. + // We skip it as it's not expressible in Algolia's filter syntax. + // One workaround is checking field != "" but that doesn't work for all types. + + if len(parts) == 0 { + return "" + } + + if len(parts) == 1 { + return parts[0] + } + + return "(" + strings.Join(parts, " AND ") + ")" +} + +// Filter is an alias used in the buildFilterString function for the searchindex.Filter type. +type Filter = searchindex.Filter + +// formatTermFilter formats a single term filter for Algolia. +func formatTermFilter(field string, value any) string { + switch v := value.(type) { + case bool: + if v { + return fmt.Sprintf("%s:true", field) + } + return fmt.Sprintf("%s:false", field) + case float64: + // Format without trailing zeros for integers + if v == float64(int64(v)) { + return fmt.Sprintf("%s:%d", field, int64(v)) + } + return fmt.Sprintf("%s:%s", field, strconv.FormatFloat(v, 'f', -1, 64)) + case float32: + return fmt.Sprintf("%s:%s", field, strconv.FormatFloat(float64(v), 'f', -1, 32)) + case int: + return fmt.Sprintf("%s:%d", field, v) + case int64: + return fmt.Sprintf("%s:%d", field, v) + case string: + return fmt.Sprintf("%s:%s", field, quoteFilterValue(v)) + default: + return fmt.Sprintf("%s:%s", field, quoteFilterValue(fmt.Sprintf("%v", v))) + } +} + +// formatRangeFilter formats a range filter for Algolia. +// Algolia uses: field > 10 AND field < 100 +func formatRangeFilter(r *searchindex.RangeFilter) string { + var parts []string + + if r.GTE != nil { + parts = append(parts, fmt.Sprintf("%s >= %s", r.Field, formatNumericValue(r.GTE))) + } else if r.HasGT && r.GT != nil { + parts = append(parts, fmt.Sprintf("%s > %s", r.Field, formatNumericValue(r.GT))) + } + + if r.LTE != nil { + parts = append(parts, fmt.Sprintf("%s <= %s", r.Field, formatNumericValue(r.LTE))) + } else if r.HasLT && r.LT != nil { + parts = append(parts, fmt.Sprintf("%s < %s", r.Field, formatNumericValue(r.LT))) + } + + if len(parts) == 0 { + return "" + } + + return strings.Join(parts, " AND ") +} + +// formatNumericValue formats a numeric value for use in Algolia filters. +func formatNumericValue(v any) string { + switch n := v.(type) { + case float64: + if n == float64(int64(n)) { + return strconv.FormatInt(int64(n), 10) + } + return strconv.FormatFloat(n, 'f', -1, 64) + case float32: + return strconv.FormatFloat(float64(n), 'f', -1, 32) + case int: + return strconv.Itoa(n) + case int64: + return strconv.FormatInt(n, 10) + case int32: + return strconv.FormatInt(int64(n), 10) + case string: + return n + default: + return fmt.Sprintf("%v", v) + } +} + +// quoteFilterValue quotes a string value for Algolia filters if needed. +func quoteFilterValue(v string) string { + // If value contains spaces or special characters, wrap in quotes + if strings.ContainsAny(v, " \t\"'():<>!=") { + return fmt.Sprintf("%q", v) + } + return v +} + +// Autocomplete returns terms matching the given prefix using Algolia's facet search API. +func (idx *Index) Autocomplete(ctx context.Context, req searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + limit := req.Limit + if limit <= 0 { + limit = 10 + } + + body := map[string]any{ + "facetQuery": strings.ToLower(req.Prefix), + "maxFacetHits": limit, + } + + path := fmt.Sprintf("/1/indexes/%s/facets/%s/query", + url.PathEscape(idx.name), url.PathEscape(req.Field)) + readHosts := algoliaHosts(idx.config.AppID) + + resp, err := idx.doRequestWithHosts(ctx, http.MethodPost, path, body, readHosts) + if err != nil { + return nil, fmt.Errorf("algolia: facet search failed: %w", err) + } + + facetHits, _ := resp["facetHits"].([]any) + terms := make([]searchindex.AutocompleteTerm, 0, len(facetHits)) + for _, hit := range facetHits { + hitMap, ok := hit.(map[string]any) + if !ok { + continue + } + value, _ := hitMap["value"].(string) + count := 0 + if c, ok := hitMap["count"].(float64); ok { + count = int(c) + } + terms = append(terms, searchindex.AutocompleteTerm{Term: value, Count: count}) + } + + return &searchindex.AutocompleteResult{Terms: terms}, nil +} + +// doRequest performs an HTTP request against the Algolia API using write hosts. +func (idx *Index) doRequest(ctx context.Context, method, path string, body any) (map[string]any, error) { + return idx.doRequestWithHosts(ctx, method, path, body, idx.hosts) +} + +// doRequestWithHosts performs an HTTP request with the given host fallback list. +func (idx *Index) doRequestWithHosts(ctx context.Context, method, path string, body any, hosts []string) (map[string]any, error) { + var bodyReader io.Reader + if body != nil { + data, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("algolia: failed to marshal request body: %w", err) + } + bodyReader = bytes.NewReader(data) + } + + var lastErr error + for _, host := range hosts { + reqURL := fmt.Sprintf("https://%s%s", host, path) + + // We need to re-create the reader for each attempt since it may have been consumed. + if body != nil { + data, _ := json.Marshal(body) + bodyReader = bytes.NewReader(data) + } + + req, err := http.NewRequestWithContext(ctx, method, reqURL, bodyReader) + if err != nil { + lastErr = fmt.Errorf("algolia: failed to create request: %w", err) + continue + } + + req.Header.Set("X-Algolia-Application-Id", idx.config.AppID) + req.Header.Set("X-Algolia-API-Key", idx.config.APIKey) + req.Header.Set("Content-Type", "application/json; charset=UTF-8") + + resp, err := idx.client.Do(req) + if err != nil { + lastErr = fmt.Errorf("algolia: request to %s failed: %w", host, err) + continue + } + + respBody, err := io.ReadAll(resp.Body) + resp.Body.Close() + + if err != nil { + lastErr = fmt.Errorf("algolia: failed to read response body: %w", err) + continue + } + + if resp.StatusCode >= 200 && resp.StatusCode < 300 { + var result map[string]any + if err := json.Unmarshal(respBody, &result); err != nil { + return nil, fmt.Errorf("algolia: failed to parse response: %w", err) + } + return result, nil + } + + // For 4xx errors, don't retry (client error) + if resp.StatusCode >= 400 && resp.StatusCode < 500 { + return nil, fmt.Errorf("algolia: API error (status %d): %s", resp.StatusCode, string(respBody)) + } + + // For 5xx errors, try next host + lastErr = fmt.Errorf("algolia: API error (status %d): %s", resp.StatusCode, string(respBody)) + } + + return nil, fmt.Errorf("algolia: all hosts failed, last error: %w", lastErr) +} + +// waitForTask waits for an Algolia async task to complete. +// Algolia returns a taskID for write operations. We poll until the task is "published". +func (idx *Index) waitForTask(ctx context.Context, resp map[string]any) error { + if resp == nil { + return nil + } + + taskIDRaw, ok := resp["taskID"] + if !ok { + return nil + } + + var taskID int64 + switch v := taskIDRaw.(type) { + case float64: + taskID = int64(v) + case json.Number: + n, err := v.Int64() + if err != nil { + return fmt.Errorf("algolia: invalid taskID: %w", err) + } + taskID = n + default: + return nil + } + + path := fmt.Sprintf("/1/indexes/%s/task/%d", url.PathEscape(idx.name), taskID) + readHosts := algoliaHosts(idx.config.AppID) + + // Poll with exponential backoff + backoff := 100 * time.Millisecond + maxBackoff := 5 * time.Second + maxWait := 2 * time.Minute + + deadline := time.Now().Add(maxWait) + + for { + if time.Now().After(deadline) { + return fmt.Errorf("algolia: timeout waiting for task %d", taskID) + } + + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(backoff): + } + + result, err := idx.doRequestWithHosts(ctx, http.MethodGet, path, nil, readHosts) + if err != nil { + // Don't fail immediately on transient errors during polling + backoff = backoff * 2 + if backoff > maxBackoff { + backoff = maxBackoff + } + continue + } + + status, _ := result["status"].(string) + if status == "published" { + return nil + } + + backoff = backoff * 2 + if backoff > maxBackoff { + backoff = maxBackoff + } + } +} diff --git a/v2/pkg/searchindex/algolia/algolia_test.go b/v2/pkg/searchindex/algolia/algolia_test.go new file mode 100644 index 0000000000..ec8f48a8b0 --- /dev/null +++ b/v2/pkg/searchindex/algolia/algolia_test.go @@ -0,0 +1,546 @@ +//go:build integration + +package algolia + +import ( + "context" + "fmt" + "os" + "testing" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func skipIfNoAlgolia(t *testing.T) (string, string) { + t.Helper() + appID := os.Getenv("ALGOLIA_APP_ID") + apiKey := os.Getenv("ALGOLIA_API_KEY") + if appID == "" || apiKey == "" { + t.Skip("ALGOLIA_APP_ID and ALGOLIA_API_KEY environment variables are required for integration tests") + } + return appID, apiKey +} + +func TestAlgoliaIntegration(t *testing.T) { + appID, apiKey := skipIfNoAlgolia(t) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + indexName := fmt.Sprintf("test_integration_%d", time.Now().UnixNano()) + + schema := searchindex.IndexConfig{ + Name: indexName, + Fields: []searchindex.FieldConfig{ + {Name: "title", Type: searchindex.FieldTypeText, Filterable: true, Sortable: false}, + {Name: "description", Type: searchindex.FieldTypeText, Filterable: false, Sortable: false}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: false}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true, Sortable: false}, + }, + } + + configJSON := fmt.Sprintf(`{"app_id": %q, "api_key": %q}`, appID, apiKey) + + factory := &Factory{} + + t.Run("CreateIndex", func(t *testing.T) { + idx, err := factory.CreateIndex(ctx, indexName, schema, []byte(configJSON)) + if err != nil { + t.Fatalf("CreateIndex failed: %v", err) + } + defer idx.Close() + + t.Run("IndexDocuments", func(t *testing.T) { + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }, + Fields: map[string]any{ + "title": "Wireless Keyboard", + "description": "A compact wireless keyboard with Bluetooth connectivity", + "category": "electronics", + "price": 49.99, + "inStock": true, + }, + }, + { + Identity: searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "2"}, + }, + Fields: map[string]any{ + "title": "USB Mouse", + "description": "Ergonomic USB mouse with adjustable DPI", + "category": "electronics", + "price": 29.99, + "inStock": true, + }, + }, + { + Identity: searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "3"}, + }, + Fields: map[string]any{ + "title": "Desk Lamp", + "description": "LED desk lamp with adjustable brightness", + "category": "office", + "price": 35.00, + "inStock": false, + }, + }, + } + + err := idx.IndexDocuments(ctx, docs) + if err != nil { + t.Fatalf("IndexDocuments failed: %v", err) + } + + // Give Algolia a moment to process + time.Sleep(2 * time.Second) + + t.Run("SearchTextQuery", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "keyboard", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search failed: %v", err) + } + if result.TotalCount == 0 { + t.Fatal("Expected at least one result for 'keyboard' search") + } + found := false + for _, hit := range result.Hits { + if hit.Identity.TypeName == "Product" { + found = true + break + } + } + if !found { + t.Fatal("Expected to find a Product hit") + } + t.Logf("Search returned %d hits (total: %d)", len(result.Hits), result.TotalCount) + }) + + t.Run("SearchWithFilter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{ + Field: "category", + Value: "electronics", + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with filter failed: %v", err) + } + if result.TotalCount < 2 { + t.Fatalf("Expected at least 2 results for category=electronics, got %d", result.TotalCount) + } + t.Logf("Filtered search returned %d hits", result.TotalCount) + }) + + t.Run("SearchWithRangeFilter", func(t *testing.T) { + gte := any(30.0) + lte := any(50.0) + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: gte, + LTE: lte, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with range filter failed: %v", err) + } + if result.TotalCount == 0 { + t.Fatal("Expected at least one result for price range 30-50") + } + t.Logf("Range filter search returned %d hits", result.TotalCount) + }) + + t.Run("SearchWithBoolFilter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{ + Field: "inStock", + Value: false, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with bool filter failed: %v", err) + } + if result.TotalCount < 1 { + t.Fatalf("Expected at least 1 result for inStock=false, got %d", result.TotalCount) + } + t.Logf("Bool filter search returned %d hits", result.TotalCount) + }) + + t.Run("SearchWithANDFilter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "electronics"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with AND filter failed: %v", err) + } + if result.TotalCount < 2 { + t.Fatalf("Expected at least 2 results for category=electronics AND inStock=true, got %d", result.TotalCount) + } + t.Logf("AND filter search returned %d hits", result.TotalCount) + }) + + t.Run("SearchWithORFilter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "electronics"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "office"}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with OR filter failed: %v", err) + } + if result.TotalCount < 3 { + t.Fatalf("Expected at least 3 results for category=electronics OR category=office, got %d", result.TotalCount) + } + t.Logf("OR filter search returned %d hits", result.TotalCount) + }) + + t.Run("SearchWithNOTFilter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "electronics"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with NOT filter failed: %v", err) + } + for _, hit := range result.Hits { + if cat, ok := hit.Representation["category"].(string); ok && cat == "electronics" { + t.Fatal("Expected no electronics hits when using NOT category=electronics filter") + } + } + t.Logf("NOT filter search returned %d hits", result.TotalCount) + }) + + t.Run("SearchWithTermsFilter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + Filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"electronics", "office"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with terms filter failed: %v", err) + } + if result.TotalCount < 3 { + t.Fatalf("Expected at least 3 results for category IN [electronics, office], got %d", result.TotalCount) + } + for _, hit := range result.Hits { + cat, ok := hit.Representation["category"].(string) + if !ok { + t.Fatal("Expected category field in hit representation") + } + if cat != "electronics" && cat != "office" { + t.Fatalf("Expected category to be electronics or office, got %q", cat) + } + } + t.Logf("Terms filter search returned %d hits", result.TotalCount) + }) + + t.Run("SearchHitIdentity", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "keyboard", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search failed: %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("Expected at least one hit") + } + hit := result.Hits[0] + if hit.Identity.TypeName != "Product" { + t.Fatalf("Expected Identity.TypeName to be 'Product', got %q", hit.Identity.TypeName) + } + typename, ok := hit.Representation["__typename"] + if !ok { + t.Fatal("Expected __typename in hit Representation") + } + if typename != "Product" { + t.Fatalf("Expected Representation[__typename] to be 'Product', got %v", typename) + } + t.Logf("Hit identity: TypeName=%s, KeyFields=%v, __typename=%v", hit.Identity.TypeName, hit.Identity.KeyFields, typename) + }) + + t.Run("TypeNameFilter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "", + TypeName: "Product", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search with TypeName filter failed: %v", err) + } + if result.TotalCount == 0 { + t.Fatal("Expected at least one result for TypeName=Product") + } + for _, hit := range result.Hits { + if hit.Identity.TypeName != "Product" { + t.Fatalf("Expected all hits to have TypeName 'Product', got %q", hit.Identity.TypeName) + } + } + t.Logf("TypeName filter search returned %d hits", result.TotalCount) + }) + + t.Run("IndexSingleDocument", func(t *testing.T) { + doc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "4"}, + }, + Fields: map[string]any{ + "title": "Monitor Stand", + "description": "Adjustable monitor stand with cable management", + "category": "office", + "price": 59.99, + "inStock": true, + }, + } + err := idx.IndexDocument(ctx, doc) + if err != nil { + t.Fatalf("IndexDocument failed: %v", err) + } + + time.Sleep(2 * time.Second) + + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "monitor stand", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search after IndexDocument failed: %v", err) + } + if result.TotalCount == 0 { + t.Fatal("Expected to find the newly indexed document") + } + }) + + t.Run("DeleteDocument", func(t *testing.T) { + err := idx.DeleteDocument(ctx, searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "4"}, + }) + if err != nil { + t.Fatalf("DeleteDocument failed: %v", err) + } + + time.Sleep(2 * time.Second) + + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "monitor stand", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search after delete failed: %v", err) + } + for _, hit := range result.Hits { + if kf, ok := hit.Identity.KeyFields["id"]; ok && kf == "4" { + t.Fatal("Document should have been deleted but was found") + } + } + }) + + t.Run("DeleteDocuments", func(t *testing.T) { + ids := []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + } + err := idx.DeleteDocuments(ctx, ids) + if err != nil { + t.Fatalf("DeleteDocuments failed: %v", err) + } + }) + }) + }) + + t.Run("FactoryValidation", func(t *testing.T) { + _, err := factory.CreateIndex(ctx, "test", schema, []byte(`{}`)) + if err == nil { + t.Fatal("Expected error for missing app_id and api_key") + } + + _, err = factory.CreateIndex(ctx, "test", schema, []byte(`not json`)) + if err == nil { + t.Fatal("Expected error for invalid JSON config") + } + }) +} + +func TestDocumentObjectID(t *testing.T) { + tests := []struct { + name string + identity searchindex.DocumentIdentity + expected string + }{ + { + name: "single key field", + identity: searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123"}, + }, + expected: "Product:id=123", + }, + { + name: "multiple key fields sorted", + identity: searchindex.DocumentIdentity{ + TypeName: "Order", + KeyFields: map[string]any{"userId": "u1", "orderId": "o1"}, + }, + expected: "Order:orderId=o1,userId=u1", + }, + { + name: "no key fields", + identity: searchindex.DocumentIdentity{ + TypeName: "Singleton", + }, + expected: "Singleton", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := documentObjectID(tt.identity) + if got != tt.expected { + t.Errorf("documentObjectID() = %q, want %q", got, tt.expected) + } + }) + } +} + +func TestBuildFilterString(t *testing.T) { + tests := []struct { + name string + filter *searchindex.Filter + expected string + }{ + { + name: "term filter string", + filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "electronics"}, + }, + expected: "category:electronics", + }, + { + name: "term filter bool true", + filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: true}, + }, + expected: "inStock:true", + }, + { + name: "term filter bool false", + filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + expected: "inStock:false", + }, + { + name: "range filter", + filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: 10.0, + LTE: 100.0, + }, + }, + expected: "price >= 10 AND price <= 100", + }, + { + name: "AND filter", + filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "electronics"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + expected: "(category:electronics AND inStock:true)", + }, + { + name: "OR filter", + filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "electronics"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "office"}}, + }, + }, + expected: "(category:electronics OR category:office)", + }, + { + name: "NOT filter", + filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "electronics"}, + }, + }, + expected: "NOT category:electronics", + }, + { + name: "terms filter", + filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"electronics", "office"}, + }, + }, + expected: "(category:electronics OR category:office)", + }, + { + name: "nil filter", + filter: nil, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := buildFilterString(tt.filter) + if got != tt.expected { + t.Errorf("buildFilterString() = %q, want %q", got, tt.expected) + } + }) + } +} diff --git a/v2/pkg/searchindex/bleve/bleve.go b/v2/pkg/searchindex/bleve/bleve.go new file mode 100644 index 0000000000..a6ad5e3dbe --- /dev/null +++ b/v2/pkg/searchindex/bleve/bleve.go @@ -0,0 +1,784 @@ +// Package bleve provides a Bleve-backed implementation of the searchindex.Index +// and searchindex.IndexFactory interfaces. Bleve is a pure-Go full-text search +// library; it does not support vector search, so vector fields are silently +// ignored during indexing and search. +package bleve + +import ( + "context" + "encoding/json" + "fmt" + "sort" + "strings" + + "github.com/blevesearch/bleve/v2" + "github.com/blevesearch/bleve/v2/mapping" + "github.com/blevesearch/bleve/v2/search" + "github.com/blevesearch/bleve/v2/search/query" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Ensure compile-time interface conformance. +var ( + _ searchindex.Index = (*Index)(nil) + _ searchindex.IndexFactory = (*Factory)(nil) +) + +// reservedTypeNameField is the Bleve document field used to store the entity +// type name so we can reconstruct DocumentIdentity on search results and +// filter by TypeName in SearchRequest. +const reservedTypeNameField = "_typeName" + +// reservedKeyFieldsField stores the JSON-encoded key fields map so we can +// reconstruct the DocumentIdentity from a search hit. +const reservedKeyFieldsField = "_keyFieldsJSON" + +// Factory implements searchindex.IndexFactory for Bleve. +type Factory struct{} + +// NewFactory returns a new Bleve IndexFactory. +func NewFactory() *Factory { + return &Factory{} +} + +// CreateIndex creates a new in-memory Bleve index configured according to the +// given IndexConfig. The configJSON parameter is currently unused but reserved +// for future backend-specific tuning. +func (f *Factory) CreateIndex(_ context.Context, name string, schema searchindex.IndexConfig, _ []byte) (searchindex.Index, error) { + indexMapping := bleve.NewIndexMapping() + + docMapping := bleve.NewDocumentMapping() + + // Map each field from the schema. + for _, fc := range schema.Fields { + fm := fieldMapping(fc) + if fm == nil { + // e.g. vector fields; skip. + continue + } + docMapping.AddFieldMappingsAt(fc.Name, fm) + } + + // Add internal metadata fields. + kwMapping := mapping.NewKeywordFieldMapping() + kwMapping.Store = true + kwMapping.Index = true + docMapping.AddFieldMappingsAt(reservedTypeNameField, kwMapping) + + keyFieldsMapping := mapping.NewKeywordFieldMapping() + keyFieldsMapping.Store = true + keyFieldsMapping.Index = false + docMapping.AddFieldMappingsAt(reservedKeyFieldsField, keyFieldsMapping) + + indexMapping.DefaultMapping = docMapping + + idx, err := bleve.NewMemOnly(indexMapping) + if err != nil { + return nil, fmt.Errorf("bleve: failed to create in-memory index %q: %w", name, err) + } + + return &Index{ + name: name, + idx: idx, + schema: schema, + }, nil +} + +// fieldMapping returns the appropriate Bleve field mapping for a FieldConfig, +// or nil if the field type is not supported (e.g. vectors). +func fieldMapping(fc searchindex.FieldConfig) *mapping.FieldMapping { + switch fc.Type { + case searchindex.FieldTypeText: + fm := bleve.NewTextFieldMapping() + fm.Store = true + fm.Index = true + fm.IncludeTermVectors = true + return fm + case searchindex.FieldTypeKeyword: + fm := mapping.NewKeywordFieldMapping() + fm.Store = true + fm.Index = true + return fm + case searchindex.FieldTypeNumeric: + fm := mapping.NewNumericFieldMapping() + fm.Store = true + fm.Index = true + return fm + case searchindex.FieldTypeBool: + fm := mapping.NewBooleanFieldMapping() + fm.Store = true + fm.Index = true + return fm + case searchindex.FieldTypeVector: + // Bleve does not support vector fields. + return nil + case searchindex.FieldTypeGeo: + // Bleve does not support geo fields. + return nil + case searchindex.FieldTypeDate, searchindex.FieldTypeDateTime: + fm := mapping.NewDateTimeFieldMapping() + fm.Store = true + fm.Index = true + return fm + default: + return nil + } +} + +// Index implements searchindex.Index backed by a Bleve in-memory index. +type Index struct { + name string + idx bleve.Index + schema searchindex.IndexConfig +} + +// documentID computes a deterministic string ID from a DocumentIdentity. +// Format: TypeName:key1=val1,key2=val2,... (keys sorted alphabetically). +func documentID(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return id.TypeName + } + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(id.TypeName) + b.WriteByte(':') + for i, k := range keys { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(k) + b.WriteByte('=') + fmt.Fprintf(&b, "%v", id.KeyFields[k]) + } + return b.String() +} + +// buildDoc converts an EntityDocument into a flat map suitable for Bleve +// indexing. It includes all Fields plus internal metadata. +func buildDoc(doc searchindex.EntityDocument) (map[string]any, error) { + m := make(map[string]any, len(doc.Fields)+2) + for k, v := range doc.Fields { + m[k] = v + } + m[reservedTypeNameField] = doc.Identity.TypeName + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return nil, fmt.Errorf("bleve: failed to marshal key fields: %w", err) + } + m[reservedKeyFieldsField] = string(keyFieldsJSON) + return m, nil +} + +// IndexDocument indexes a single document. +func (idx *Index) IndexDocument(_ context.Context, doc searchindex.EntityDocument) error { + id := documentID(doc.Identity) + m, err := buildDoc(doc) + if err != nil { + return err + } + if err := idx.idx.Index(id, m); err != nil { + return fmt.Errorf("bleve: index document %q: %w", id, err) + } + return nil +} + +// IndexDocuments indexes a batch of documents. Bleve's Batch API is used for +// efficiency. +func (idx *Index) IndexDocuments(_ context.Context, docs []searchindex.EntityDocument) error { + batch := idx.idx.NewBatch() + for _, doc := range docs { + id := documentID(doc.Identity) + m, err := buildDoc(doc) + if err != nil { + return err + } + if err := batch.Index(id, m); err != nil { + return fmt.Errorf("bleve: batch index document %q: %w", id, err) + } + } + if err := idx.idx.Batch(batch); err != nil { + return fmt.Errorf("bleve: batch commit: %w", err) + } + return nil +} + +// DeleteDocument deletes a single document by identity. +func (idx *Index) DeleteDocument(_ context.Context, id searchindex.DocumentIdentity) error { + docID := documentID(id) + if err := idx.idx.Delete(docID); err != nil { + return fmt.Errorf("bleve: delete document %q: %w", docID, err) + } + return nil +} + +// DeleteDocuments deletes a batch of documents by identity. +func (idx *Index) DeleteDocuments(_ context.Context, ids []searchindex.DocumentIdentity) error { + batch := idx.idx.NewBatch() + for _, id := range ids { + batch.Delete(documentID(id)) + } + if err := idx.idx.Batch(batch); err != nil { + return fmt.Errorf("bleve: batch delete: %w", err) + } + return nil +} + +// Search performs a search query and returns results. +func (idx *Index) Search(_ context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + q, err := idx.buildQuery(req) + if err != nil { + return nil, err + } + + isCursorMode := len(req.SearchAfter) > 0 || len(req.SearchBefore) > 0 + offset := req.Offset + if isCursorMode { + offset = 0 // cursor mode ignores offset + } + + bleveReq := bleve.NewSearchRequestOptions(q, effectiveLimit(req.Limit), offset, false) + bleveReq.IncludeLocations = true + + // Highlight. + bleveReq.Highlight = bleve.NewHighlight() + + // Sorting. + if len(req.Sort) > 0 { + sortOrder := make(search.SortOrder, 0, len(req.Sort)) + for _, sf := range req.Sort { + ss := &search.SortField{ + Field: sf.Field, + Desc: !sf.Ascending, + Type: search.SortFieldAuto, + Missing: search.SortFieldMissingLast, + } + sortOrder = append(sortOrder, ss) + } + bleveReq.SortByCustom(sortOrder) + } + + // Cursor-based pagination. + if len(req.SearchAfter) > 0 { + bleveReq.SearchAfter = req.SearchAfter + } + if len(req.SearchBefore) > 0 { + bleveReq.SearchBefore = req.SearchBefore + } + + // Request stored fields so we can reconstruct the document. + bleveReq.Fields = []string{"*"} + + // Facets. + for _, fr := range req.Facets { + size := fr.Size + if size <= 0 { + size = 10 + } + bleveReq.AddFacet(fr.Field, bleve.NewFacetRequest(fr.Field, size)) + } + + result, err := idx.idx.Search(bleveReq) + if err != nil { + return nil, fmt.Errorf("bleve: search failed: %w", err) + } + + hits := make([]searchindex.SearchHit, 0, len(result.Hits)) + for _, hit := range result.Hits { + sh, err := convertHit(hit) + if err != nil { + return nil, err + } + hits = append(hits, sh) + } + + facets := convertFacets(result.Facets) + + return &searchindex.SearchResult{ + Hits: hits, + TotalCount: int(result.Total), + Facets: facets, + }, nil +} + +// Autocomplete returns terms from the Bleve index dictionary matching the given prefix. +func (idx *Index) Autocomplete(_ context.Context, req searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + prefix := strings.ToLower(req.Prefix) + limit := req.Limit + if limit <= 0 { + limit = 10 + } + + dict, err := idx.idx.FieldDictPrefix(req.Field, []byte(prefix)) + if err != nil { + return nil, fmt.Errorf("bleve: field dict prefix for %q: %w", req.Field, err) + } + defer dict.Close() + + var terms []searchindex.AutocompleteTerm + for { + entry, err := dict.Next() + if err != nil { + return nil, fmt.Errorf("bleve: iterating field dict: %w", err) + } + if entry == nil { + break + } + terms = append(terms, searchindex.AutocompleteTerm{ + Term: entry.Term, + Count: int(entry.Count), + }) + if len(terms) >= limit { + break + } + } + + return &searchindex.AutocompleteResult{Terms: terms}, nil +} + +// Close releases resources held by the index. +func (idx *Index) Close() error { + if err := idx.idx.Close(); err != nil { + return fmt.Errorf("bleve: close index %q: %w", idx.name, err) + } + return nil +} + +// effectiveLimit returns a sensible default if limit is zero or negative. +func effectiveLimit(limit int) int { + if limit <= 0 { + return 10 + } + return limit +} + +// buildQuery constructs the top-level Bleve query from a SearchRequest. +func (idx *Index) buildQuery(req searchindex.SearchRequest) (query.Query, error) { + var parts []query.Query + + // Text query. + if req.TextQuery != "" { + if len(req.TextFields) > 0 { + // Build a disjunction of match queries scoped to each field. + fieldQueries := make([]query.Query, 0, len(req.TextFields)) + for _, tf := range req.TextFields { + mq := bleve.NewMatchQuery(req.TextQuery) + mq.SetField(tf.Name) + if tf.Weight != 0 && tf.Weight != 1.0 { + mq.SetBoost(tf.Weight) + } + if req.Fuzziness != nil { + mq.SetFuzziness(int(*req.Fuzziness)) + } + fieldQueries = append(fieldQueries, mq) + } + fieldDisjunction := bleve.NewDisjunctionQuery(fieldQueries...) + parts = append(parts, fieldDisjunction) + } else { + mq := bleve.NewMatchQuery(req.TextQuery) + if req.Fuzziness != nil { + mq.SetFuzziness(int(*req.Fuzziness)) + } + parts = append(parts, mq) + } + } + + // Vector query: Bleve doesn't support vectors; silently ignore. + + // TypeName filter. + if req.TypeName != "" { + tq := bleve.NewTermQuery(req.TypeName) + tq.SetField(reservedTypeNameField) + parts = append(parts, tq) + } + + // Structured filter. + if req.Filter != nil { + fq, err := translateFilter(req.Filter) + if err != nil { + return nil, err + } + if fq != nil { + parts = append(parts, fq) + } + } + + // Combine everything. + switch len(parts) { + case 0: + return bleve.NewMatchAllQuery(), nil + case 1: + return parts[0], nil + default: + return bleve.NewConjunctionQuery(parts...), nil + } +} + +// translateFilter recursively converts a searchindex.Filter tree to a Bleve +// query tree. +func translateFilter(f *searchindex.Filter) (query.Query, error) { + if f == nil { + return nil, nil + } + + // AND + if len(f.And) > 0 { + children := make([]query.Query, 0, len(f.And)) + for _, child := range f.And { + cq, err := translateFilter(child) + if err != nil { + return nil, err + } + if cq != nil { + children = append(children, cq) + } + } + if len(children) == 0 { + return nil, nil + } + return bleve.NewConjunctionQuery(children...), nil + } + + // OR + if len(f.Or) > 0 { + children := make([]query.Query, 0, len(f.Or)) + for _, child := range f.Or { + cq, err := translateFilter(child) + if err != nil { + return nil, err + } + if cq != nil { + children = append(children, cq) + } + } + if len(children) == 0 { + return nil, nil + } + return bleve.NewDisjunctionQuery(children...), nil + } + + // NOT + if f.Not != nil { + inner, err := translateFilter(f.Not) + if err != nil { + return nil, err + } + if inner == nil { + return nil, nil + } + boolQ := bleve.NewBooleanQuery() + boolQ.AddMustNot(inner) + // A boolean query with only MustNot needs a Must to establish the + // universe of documents. + boolQ.AddMust(bleve.NewMatchAllQuery()) + return boolQ, nil + } + + // Term + if f.Term != nil { + return translateTermFilter(f.Term) + } + + // Terms (IN) + if f.Terms != nil { + return translateTermsFilter(f.Terms) + } + + // Range + if f.Range != nil { + return translateRangeFilter(f.Range) + } + + // Prefix + if f.Prefix != nil { + pq := bleve.NewPrefixQuery(f.Prefix.Value) + pq.SetField(f.Prefix.Field) + return pq, nil + } + + // Exists: use a wildcard query that matches any term in the field. + if f.Exists != nil { + // A regexp ".*" on the field will match any value. + rq := bleve.NewRegexpQuery(".*") + rq.SetField(f.Exists.Field) + return rq, nil + } + + return nil, nil +} + +// translateTermFilter converts a TermFilter to a Bleve query. For string +// values it uses TermQuery; for numeric values it uses a point +// NumericRangeQuery (min==max, inclusive); for bool it uses a bool field query. +func translateTermFilter(tf *searchindex.TermFilter) (query.Query, error) { + switch v := tf.Value.(type) { + case string: + tq := bleve.NewTermQuery(v) + tq.SetField(tf.Field) + return tq, nil + case float64: + return numericPoint(tf.Field, v), nil + case float32: + return numericPoint(tf.Field, float64(v)), nil + case int: + return numericPoint(tf.Field, float64(v)), nil + case int64: + return numericPoint(tf.Field, float64(v)), nil + case json.Number: + n, err := v.Float64() + if err != nil { + return nil, fmt.Errorf("bleve: invalid numeric term value: %w", err) + } + return numericPoint(tf.Field, n), nil + case bool: + bq := bleve.NewBoolFieldQuery(v) + bq.SetField(tf.Field) + return bq, nil + default: + // Fall back to string representation. + tq := bleve.NewTermQuery(fmt.Sprintf("%v", v)) + tq.SetField(tf.Field) + return tq, nil + } +} + +// numericPoint creates a numeric range query matching exactly one value. +func numericPoint(field string, val float64) query.Query { + inclusive := true + q := bleve.NewNumericRangeInclusiveQuery(&val, &val, &inclusive, &inclusive) + q.SetField(field) + return q +} + +// translateTermsFilter converts a TermsFilter (IN operator) to a Bleve +// disjunction of term queries. +func translateTermsFilter(tf *searchindex.TermsFilter) (query.Query, error) { + if len(tf.Values) == 0 { + return nil, nil + } + parts := make([]query.Query, 0, len(tf.Values)) + for _, val := range tf.Values { + tq, err := translateTermFilter(&searchindex.TermFilter{ + Field: tf.Field, + Value: val, + }) + if err != nil { + return nil, err + } + parts = append(parts, tq) + } + return bleve.NewDisjunctionQuery(parts...), nil +} + +// translateRangeFilter converts a RangeFilter to a Bleve numeric or date range query. +// Date range is used when the bound values are strings (ISO 8601 date/datetime). +func translateRangeFilter(rf *searchindex.RangeFilter) (query.Query, error) { + if isDateRange(rf) { + return translateDateRangeFilter(rf) + } + + var minVal, maxVal *float64 + var minInclusive, maxInclusive *bool + + // Determine lower bound. + if rf.GTE != nil { + v, err := toFloat64(rf.GTE) + if err != nil { + return nil, fmt.Errorf("bleve: range GTE: %w", err) + } + minVal = &v + t := true + minInclusive = &t + } else if rf.HasGT && rf.GT != nil { + v, err := toFloat64(rf.GT) + if err != nil { + return nil, fmt.Errorf("bleve: range GT: %w", err) + } + minVal = &v + f := false + minInclusive = &f + } + + // Determine upper bound. + if rf.LTE != nil { + v, err := toFloat64(rf.LTE) + if err != nil { + return nil, fmt.Errorf("bleve: range LTE: %w", err) + } + maxVal = &v + t := true + maxInclusive = &t + } else if rf.HasLT && rf.LT != nil { + v, err := toFloat64(rf.LT) + if err != nil { + return nil, fmt.Errorf("bleve: range LT: %w", err) + } + maxVal = &v + f := false + maxInclusive = &f + } + + q := bleve.NewNumericRangeInclusiveQuery(minVal, maxVal, minInclusive, maxInclusive) + q.SetField(rf.Field) + return q, nil +} + +// isDateRange returns true if any of the range bound values are strings (date values). +func isDateRange(rf *searchindex.RangeFilter) bool { + for _, v := range []any{rf.GTE, rf.GT, rf.LTE, rf.LT} { + if _, ok := v.(string); ok { + return true + } + } + return false +} + +// translateDateRangeFilter converts a RangeFilter with string date values to a Bleve date range query. +func translateDateRangeFilter(rf *searchindex.RangeFilter) (query.Query, error) { + var minStr, maxStr string + var minInclusive, maxInclusive *bool + + if rf.GTE != nil { + minStr = rf.GTE.(string) + t := true + minInclusive = &t + } else if rf.HasGT && rf.GT != nil { + minStr = rf.GT.(string) + f := false + minInclusive = &f + } + + if rf.LTE != nil { + maxStr = rf.LTE.(string) + t := true + maxInclusive = &t + } else if rf.HasLT && rf.LT != nil { + maxStr = rf.LT.(string) + f := false + maxInclusive = &f + } + + q := bleve.NewDateRangeInclusiveStringQuery(minStr, maxStr, minInclusive, maxInclusive) + q.SetField(rf.Field) + return q, nil +} + +// toFloat64 converts an any value to float64. +func toFloat64(v any) (float64, error) { + switch n := v.(type) { + case float64: + return n, nil + case float32: + return float64(n), nil + case int: + return float64(n), nil + case int64: + return float64(n), nil + case int32: + return float64(n), nil + case json.Number: + return n.Float64() + default: + return 0, fmt.Errorf("cannot convert %T to float64", v) + } +} + +// convertHit transforms a Bleve search.DocumentMatch into a searchindex.SearchHit. +func convertHit(hit *search.DocumentMatch) (searchindex.SearchHit, error) { + identity, err := extractIdentity(hit.Fields) + if err != nil { + return searchindex.SearchHit{}, err + } + + // Build representation from stored fields, excluding internal fields. + representation := make(map[string]any, len(hit.Fields)) + for k, v := range hit.Fields { + if k == reservedTypeNameField || k == reservedKeyFieldsField { + continue + } + representation[k] = v + } + + // Add __typename. + representation["__typename"] = identity.TypeName + // Merge key fields into representation. + for k, v := range identity.KeyFields { + representation[k] = v + } + + // Highlights. + var highlights map[string][]string + if len(hit.Fragments) > 0 { + highlights = make(map[string][]string, len(hit.Fragments)) + for field, frags := range hit.Fragments { + highlights[field] = frags + } + } + + // Populate SortValues from hit.Sort for cursor-based pagination. + var sortValues []string + if len(hit.Sort) > 0 { + sortValues = make([]string, len(hit.Sort)) + copy(sortValues, hit.Sort) + } + + return searchindex.SearchHit{ + Identity: identity, + Score: hit.Score, + Highlights: highlights, + Representation: representation, + SortValues: sortValues, + }, nil +} + +// extractIdentity reconstructs a DocumentIdentity from stored Bleve fields. +func extractIdentity(fields map[string]any) (searchindex.DocumentIdentity, error) { + typeName, _ := fields[reservedTypeNameField].(string) + keyFieldsRaw, _ := fields[reservedKeyFieldsField].(string) + + var keyFields map[string]any + if keyFieldsRaw != "" { + if err := json.Unmarshal([]byte(keyFieldsRaw), &keyFields); err != nil { + return searchindex.DocumentIdentity{}, fmt.Errorf("bleve: failed to unmarshal key fields: %w", err) + } + } + if keyFields == nil { + keyFields = make(map[string]any) + } + + return searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + }, nil +} + +// convertFacets transforms Bleve facet results into the searchindex format. +func convertFacets(bleveFacets search.FacetResults) map[string]searchindex.FacetResult { + if len(bleveFacets) == 0 { + return nil + } + facets := make(map[string]searchindex.FacetResult, len(bleveFacets)) + for name, fr := range bleveFacets { + var values []searchindex.FacetValue + if fr.Terms != nil { + terms := fr.Terms.Terms() + values = make([]searchindex.FacetValue, 0, len(terms)) + for _, term := range terms { + values = append(values, searchindex.FacetValue{ + Value: term.Term, + Count: term.Count, + }) + } + } + facets[name] = searchindex.FacetResult{ + Values: values, + } + } + return facets +} diff --git a/v2/pkg/searchindex/bleve/bleve_test.go b/v2/pkg/searchindex/bleve/bleve_test.go new file mode 100644 index 0000000000..6ac23a094c --- /dev/null +++ b/v2/pkg/searchindex/bleve/bleve_test.go @@ -0,0 +1,468 @@ +package bleve + +import ( + "context" + "testing" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func newTestIndex(t *testing.T) searchindex.Index { + t.Helper() + factory := NewFactory() + schema := searchindex.IndexConfig{ + Name: "test", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + }, + } + idx, err := factory.CreateIndex(context.Background(), "test", schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func populateTestData(t *testing.T, idx searchindex.Index) { + t.Helper() + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + }, + } + if err := idx.IndexDocuments(context.Background(), docs); err != nil { + t.Fatalf("IndexDocuments: %v", err) + } +} + +func TestIndexAndSearch(t *testing.T) { + idx := newTestIndex(t) + populateTestData(t, idx) + + t.Run("text search", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 2 { + t.Errorf("expected at least 2 hits for 'shoes', got %d", result.TotalCount) + } + }) + + t.Run("text search with field restriction", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + TextFields: []searchindex.TextFieldWeight{{Name: "name"}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 2 { + t.Errorf("expected at least 2 hits for 'shoes' in name, got %d", result.TotalCount) + } + }) + + t.Run("term filter on keyword field", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for category=Footwear, got %d", result.TotalCount) + } + }) + + t.Run("boolean filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 hit for inStock=false, got %d", result.TotalCount) + } + }) + + t.Run("numeric range filter", func(t *testing.T) { + gte := 30.0 + lte := 100.0 + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: gte, + LTE: lte, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 2 { + t.Errorf("expected 2 hits for price 30-100, got %d", result.TotalCount) + } + }) + + t.Run("prefix filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Prefix: &searchindex.PrefixFilter{Field: "category", Value: "Foot"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for category prefix 'Foot', got %d", result.TotalCount) + } + }) + + t.Run("AND filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + // Footwear (3) minus out-of-stock socks... wait, wool socks are in stock. + // Running Shoes (in stock), Basketball Shoes (in stock), Wool Socks (in stock) = 3 + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for Footwear AND inStock, got %d", result.TotalCount) + } + }) + + t.Run("NOT filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 hit for NOT Footwear, got %d", result.TotalCount) + } + }) + + t.Run("sorting", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 4 { + t.Fatalf("expected 4 hits, got %d", result.TotalCount) + } + // First hit should be cheapest (Wool Socks at 12.99) + if result.Hits[0].Representation["name"] != "Wool Socks" { + t.Errorf("expected first hit to be Wool Socks (cheapest), got %v", result.Hits[0].Representation["name"]) + } + }) + + t.Run("pagination", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 2, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 2 { + t.Errorf("expected 2 hits with offset, got %d", len(result.Hits)) + } + }) + + t.Run("facets", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Facets: []searchindex.FacetRequest{{Field: "category", Size: 10}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + facet, ok := result.Facets["category"] + if !ok { + t.Fatal("expected category facet") + } + if len(facet.Values) < 2 { + t.Errorf("expected at least 2 facet values, got %d", len(facet.Values)) + } + }) + + t.Run("search hit identity", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "running shoes", + Limit: 1, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit") + } + hit := result.Hits[0] + if hit.Identity.TypeName != "Product" { + t.Errorf("TypeName = %q, want %q", hit.Identity.TypeName, "Product") + } + if hit.Representation["__typename"] != "Product" { + t.Errorf("__typename = %v, want %q", hit.Representation["__typename"], "Product") + } + }) + + t.Run("OR filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for category=Footwear OR category=Accessories, got %d", result.TotalCount) + } + }) + + t.Run("Terms (IN) filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"Footwear", "Accessories"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for category IN [Footwear, Accessories], got %d", result.TotalCount) + } + }) + + t.Run("TypeName filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TypeName: "Product", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for TypeName=Product, got %d", result.TotalCount) + } + }) +} + +func TestFuzzySearch(t *testing.T) { + idx := newTestIndex(t) + populateTestData(t, idx) + + t.Run("fuzziness LOW finds typo", func(t *testing.T) { + fuzz := searchindex.FuzzinessLow + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "runing", + Fuzziness: &fuzz, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 1 { + t.Errorf("expected >=1 hit for 'runing' with fuzziness LOW, got %d", result.TotalCount) + } + }) + + t.Run("fuzziness EXACT misses typo", func(t *testing.T) { + fuzz := searchindex.FuzzinessExact + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "runing", + Fuzziness: &fuzz, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 0 { + t.Errorf("expected 0 hits for 'runing' with fuzziness EXACT, got %d", result.TotalCount) + } + }) +} + +func TestDeleteDocument(t *testing.T) { + idx := newTestIndex(t) + populateTestData(t, idx) + + // Delete Running Shoes + err := idx.DeleteDocument(context.Background(), searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }) + if err != nil { + t.Fatalf("DeleteDocument: %v", err) + } + + // Should now have 3 documents + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 documents after delete, got %d", result.TotalCount) + } +} + +func TestDeleteDocuments(t *testing.T) { + idx := newTestIndex(t) + populateTestData(t, idx) + + // Delete two documents + err := idx.DeleteDocuments(context.Background(), []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + }) + if err != nil { + t.Fatalf("DeleteDocuments: %v", err) + } + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 2 { + t.Errorf("expected 2 documents after batch delete, got %d", result.TotalCount) + } +} + +func TestIndexSingleDocument(t *testing.T) { + idx := newTestIndex(t) + + doc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "99"}}, + Fields: map[string]any{"name": "Sandals", "description": "Comfortable summer sandals", "category": "Footwear", "price": 49.99, "inStock": true}, + } + if err := idx.IndexDocument(context.Background(), doc); err != nil { + t.Fatalf("IndexDocument: %v", err) + } + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "sandals", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 hit for 'sandals', got %d", result.TotalCount) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit") + } + if result.Hits[0].Identity.TypeName != "Product" { + t.Errorf("TypeName = %q, want %q", result.Hits[0].Identity.TypeName, "Product") + } +} + +func TestUpsertDocument(t *testing.T) { + idx := newTestIndex(t) + populateTestData(t, idx) + + // Re-index product id "1" with an updated name + updated := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Trail Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + } + if err := idx.IndexDocument(context.Background(), updated); err != nil { + t.Fatalf("IndexDocument (upsert): %v", err) + } + + // Total count should still be 4 (upsert, not insert) + allResult, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if allResult.TotalCount != 4 { + t.Errorf("expected 4 total documents after upsert, got %d", allResult.TotalCount) + } + + // Search for "trail" should return the updated document + trailResult, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "trail", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if trailResult.TotalCount != 1 { + t.Errorf("expected 1 hit for 'trail', got %d", trailResult.TotalCount) + } + if len(trailResult.Hits) == 0 { + t.Fatal("expected at least 1 hit for 'trail'") + } + if trailResult.Hits[0].Representation["name"] != "Trail Running Shoes" { + t.Errorf("expected name %q, got %v", "Trail Running Shoes", trailResult.Hits[0].Representation["name"]) + } +} + +func TestDocumentID(t *testing.T) { + id := documentID(searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123", "sku": "ABC"}, + }) + // Keys should be sorted alphabetically + expected := "Product:id=123,sku=ABC" + if id != expected { + t.Errorf("documentID = %q, want %q", id, expected) + } +} diff --git a/v2/pkg/searchindex/config.go b/v2/pkg/searchindex/config.go new file mode 100644 index 0000000000..c9ab9b176c --- /dev/null +++ b/v2/pkg/searchindex/config.go @@ -0,0 +1,79 @@ +package searchindex + +// FieldType defines the type of indexing for a field. +type FieldType int + +const ( + FieldTypeText FieldType = iota // Analyzed full-text search + FieldTypeKeyword // Exact match, not analyzed + FieldTypeNumeric // Numeric range queries + FieldTypeBool // Boolean filtering + FieldTypeVector // Pre-computed embedding vector + FieldTypeGeo // Latitude/longitude geo-point + FieldTypeDate // Calendar date (ISO 8601 full-date, e.g. "2024-01-15") + FieldTypeDateTime // Instant (RFC 3339, e.g. "2024-01-15T10:30:00.000Z") +) + +func (f FieldType) String() string { + switch f { + case FieldTypeText: + return "TEXT" + case FieldTypeKeyword: + return "KEYWORD" + case FieldTypeNumeric: + return "NUMERIC" + case FieldTypeBool: + return "BOOL" + case FieldTypeVector: + return "VECTOR" + case FieldTypeGeo: + return "GEO" + case FieldTypeDate: + return "DATE" + case FieldTypeDateTime: + return "DATETIME" + default: + return "UNKNOWN" + } +} + +// ParseFieldType converts a string to a FieldType. +func ParseFieldType(s string) (FieldType, bool) { + switch s { + case "TEXT": + return FieldTypeText, true + case "KEYWORD": + return FieldTypeKeyword, true + case "NUMERIC": + return FieldTypeNumeric, true + case "BOOL": + return FieldTypeBool, true + case "VECTOR": + return FieldTypeVector, true + case "GEO": + return FieldTypeGeo, true + case "DATE": + return FieldTypeDate, true + case "DATETIME": + return FieldTypeDateTime, true + default: + return 0, false + } +} + +// FieldConfig describes how a field is indexed. +type FieldConfig struct { + Name string + Type FieldType + Filterable bool + Sortable bool + Dimensions int // Required for FieldTypeVector + Weight float64 // Search boost for TEXT fields; 0 treated as 1.0 + Autocomplete bool // Enable term autocomplete for this field +} + +// IndexConfig describes the schema of an index. +type IndexConfig struct { + Name string + Fields []FieldConfig +} diff --git a/v2/pkg/searchindex/config_test.go b/v2/pkg/searchindex/config_test.go new file mode 100644 index 0000000000..ccab5f6210 --- /dev/null +++ b/v2/pkg/searchindex/config_test.go @@ -0,0 +1,59 @@ +package searchindex + +import "testing" + +func TestParseFieldType(t *testing.T) { + tests := []struct { + input string + expected FieldType + ok bool + }{ + {"TEXT", FieldTypeText, true}, + {"KEYWORD", FieldTypeKeyword, true}, + {"NUMERIC", FieldTypeNumeric, true}, + {"BOOL", FieldTypeBool, true}, + {"VECTOR", FieldTypeVector, true}, + {"GEO", FieldTypeGeo, true}, + {"DATE", FieldTypeDate, true}, + {"DATETIME", FieldTypeDateTime, true}, + {"UNKNOWN", 0, false}, + {"", 0, false}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + ft, ok := ParseFieldType(tt.input) + if ok != tt.ok { + t.Errorf("ParseFieldType(%q): ok = %v, want %v", tt.input, ok, tt.ok) + } + if ok && ft != tt.expected { + t.Errorf("ParseFieldType(%q) = %v, want %v", tt.input, ft, tt.expected) + } + }) + } +} + +func TestFieldTypeString(t *testing.T) { + tests := []struct { + ft FieldType + expected string + }{ + {FieldTypeText, "TEXT"}, + {FieldTypeKeyword, "KEYWORD"}, + {FieldTypeNumeric, "NUMERIC"}, + {FieldTypeBool, "BOOL"}, + {FieldTypeVector, "VECTOR"}, + {FieldTypeGeo, "GEO"}, + {FieldTypeDate, "DATE"}, + {FieldTypeDateTime, "DATETIME"}, + {FieldType(99), "UNKNOWN"}, + } + + for _, tt := range tests { + t.Run(tt.expected, func(t *testing.T) { + if got := tt.ft.String(); got != tt.expected { + t.Errorf("FieldType.String() = %q, want %q", got, tt.expected) + } + }) + } +} diff --git a/v2/pkg/searchindex/document.go b/v2/pkg/searchindex/document.go new file mode 100644 index 0000000000..f0d5065877 --- /dev/null +++ b/v2/pkg/searchindex/document.go @@ -0,0 +1,108 @@ +package searchindex + +// DocumentIdentity uniquely identifies an entity document. +type DocumentIdentity struct { + TypeName string + KeyFields map[string]any +} + +// EntityDocument represents an entity to be indexed. +type EntityDocument struct { + Identity DocumentIdentity + Fields map[string]any // text/keyword/numeric/bool fields + Vectors map[string][]float32 // vector fields (field name → embedding) +} + +// Fuzziness controls typo tolerance for text search. +type Fuzziness int + +const ( + FuzzinessExact Fuzziness = 0 // no typo tolerance + FuzzinessLow Fuzziness = 1 // 1 edit distance + FuzzinessHigh Fuzziness = 2 // 2 edit distances +) + +// TextFieldWeight pairs a field name with its search weight/boost. +type TextFieldWeight struct { + Name string + Weight float64 // 0 or 1.0 = default (no boost) +} + +// SearchRequest describes a search query. +type SearchRequest struct { + // TextQuery and Vector can both be set for hybrid search (text + vector combined). + // When only TextQuery is set: BM25/full-text search. + // When only Vector is set: vector/semantic search. + // When both are set: hybrid search combining text and vector scores. + TextQuery string // free-text (BM25 for text-only, or combined with vector for hybrid) + TextFields []TextFieldWeight // text fields to search with optional per-field boost + + Vector []float32 // query embedding (can coexist with TextQuery for hybrid) + VectorField string // which vector field to search + + Filter *Filter // structured filtering + + Sort []SortField + Limit int + Offset int + Facets []FacetRequest + + TypeName string // filter to specific entity type in multi-type index + + GeoDistanceSort *GeoDistanceSort // sort by distance from a geographic point + + Fuzziness *Fuzziness // typo tolerance level (nil = backend default) + + // Cursor-based pagination: sort values from a previous hit's SortValues. + SearchAfter []string // forward cursor sort values (ignore Offset when set) + SearchBefore []string // backward cursor sort values (for last/before) +} + +// SortField defines a sort clause. +type SortField struct { + Field string + Ascending bool +} + +// GeoDistanceSort sorts results by distance from a geographic point. +type GeoDistanceSort struct { + Field string + Center GeoPoint + Ascending bool + Unit string // "km", "mi", "m" — defaults to "km" if empty +} + +// FacetRequest requests facet counts for a field. +type FacetRequest struct { + Field string + Size int // max number of facet values to return +} + +// SearchResult contains the results of a search query. +type SearchResult struct { + Hits []SearchHit + TotalCount int + Facets map[string]FacetResult +} + +// SearchHit represents a single search result. +type SearchHit struct { + Identity DocumentIdentity + Score float64 + Distance float64 // for vector search + Highlights map[string][]string // field → highlighted fragments + Representation map[string]any // e.g. {"__typename":"Product","id":"123"} + SortValues []string // sort keys for this hit, used to build cursors + GeoDistance *float64 // distance in sort unit, populated when GeoDistanceSort is used +} + +// FacetResult contains facet counts for a field. +type FacetResult struct { + Values []FacetValue +} + +// FacetValue is a single facet count entry. +type FacetValue struct { + Value string + Count int +} diff --git a/v2/pkg/searchindex/elasticsearch/elasticsearch.go b/v2/pkg/searchindex/elasticsearch/elasticsearch.go new file mode 100644 index 0000000000..adb5f407df --- /dev/null +++ b/v2/pkg/searchindex/elasticsearch/elasticsearch.go @@ -0,0 +1,1138 @@ +// Package elasticsearch implements the searchindex.Index and searchindex.IndexFactory +// interfaces for Elasticsearch and OpenSearch. +// +// It uses only net/http and encoding/json from the standard library -- no +// external Elasticsearch SDK is required. Communication happens through the +// Elasticsearch REST API (index creation, _bulk indexing, _search, _delete_by_query). +package elasticsearch + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "sort" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Compile-time interface conformance checks. +var ( + _ searchindex.Index = (*Index)(nil) + _ searchindex.IndexFactory = (*Factory)(nil) +) + +// reservedTypeNameField is the Elasticsearch document field used to store the +// entity type name so we can reconstruct DocumentIdentity on search results +// and filter by TypeName in SearchRequest. +const reservedTypeNameField = "_typeName" + +// reservedKeyFieldsField stores the JSON-encoded key fields map so we can +// reconstruct the DocumentIdentity from a search hit. +const reservedKeyFieldsField = "_keyFieldsJSON" + +// Config holds Elasticsearch-specific configuration. It is deserialized from +// the configJSON parameter of CreateIndex. +type Config struct { + Addresses []string `json:"addresses"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + CloudID string `json:"cloud_id,omitempty"` + APIKey string `json:"api_key,omitempty"` +} + +// Factory implements searchindex.IndexFactory for Elasticsearch. +type Factory struct { + // HTTPClient allows callers to inject a custom HTTP client (e.g. for tests). + // If nil, http.DefaultClient is used. + HTTPClient *http.Client +} + +// NewFactory returns a new Elasticsearch IndexFactory. +func NewFactory() *Factory { + return &Factory{} +} + +// CreateIndex creates a new Elasticsearch index with mappings derived from the +// IndexConfig, then returns an Index handle. +func (f *Factory) CreateIndex(ctx context.Context, name string, schema searchindex.IndexConfig, configJSON []byte) (searchindex.Index, error) { + var cfg Config + if len(configJSON) > 0 { + if err := json.Unmarshal(configJSON, &cfg); err != nil { + return nil, fmt.Errorf("elasticsearch: invalid config: %w", err) + } + } + if len(cfg.Addresses) == 0 { + cfg.Addresses = []string{"http://localhost:9200"} + } + + client := f.HTTPClient + if client == nil { + client = http.DefaultClient + } + + idx := &Index{ + name: name, + config: cfg, + schema: schema, + client: client, + } + + // Build the index creation request with mappings. + mappings := buildMappings(schema) + body := map[string]any{ + "mappings": mappings, + "settings": map[string]any{ + "number_of_shards": 1, + "number_of_replicas": 0, + }, + } + + bodyBytes, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("elasticsearch: marshal index body: %w", err) + } + + // PUT /{indexName} + resp, err := idx.doRequest(ctx, http.MethodPut, "/"+url.PathEscape(name), bodyBytes) + if err != nil { + return nil, fmt.Errorf("elasticsearch: create index %q: %w", name, err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("elasticsearch: read create index response: %w", err) + } + + // 200 OK or 400 with "resource_already_exists_exception" are acceptable. + if resp.StatusCode != http.StatusOK { + var esErr esErrorResponse + if json.Unmarshal(respBody, &esErr) == nil && esErr.Error.Type == "resource_already_exists_exception" { + // Index already exists; proceed. + } else { + return nil, fmt.Errorf("elasticsearch: create index %q: status %d: %s", name, resp.StatusCode, string(respBody)) + } + } + + return idx, nil +} + +// buildMappings converts an IndexConfig into the Elasticsearch mappings +// properties object. +func buildMappings(schema searchindex.IndexConfig) map[string]any { + properties := make(map[string]any, len(schema.Fields)+2) + + for _, fc := range schema.Fields { + properties[fc.Name] = fieldMapping(fc) + } + + // Internal metadata fields. + properties[reservedTypeNameField] = map[string]any{"type": "keyword"} + properties[reservedKeyFieldsField] = map[string]any{"type": "keyword", "index": false} + + return map[string]any{ + "properties": properties, + } +} + +// fieldMapping returns the Elasticsearch mapping for a single field. +func fieldMapping(fc searchindex.FieldConfig) map[string]any { + switch fc.Type { + case searchindex.FieldTypeText: + m := map[string]any{"type": "text"} + // Add a keyword sub-field for sorting/aggregation if needed. + if fc.Sortable || fc.Filterable { + m["fields"] = map[string]any{ + "keyword": map[string]any{ + "type": "keyword", + "ignore_above": 256, + }, + } + } + return m + case searchindex.FieldTypeKeyword: + return map[string]any{"type": "keyword"} + case searchindex.FieldTypeNumeric: + return map[string]any{"type": "double"} + case searchindex.FieldTypeBool: + return map[string]any{"type": "boolean"} + case searchindex.FieldTypeVector: + return map[string]any{ + "type": "dense_vector", + "dims": fc.Dimensions, + "index": true, + "similarity": "cosine", + } + case searchindex.FieldTypeGeo: + return map[string]any{"type": "geo_point"} + case searchindex.FieldTypeDate, searchindex.FieldTypeDateTime: + return map[string]any{"type": "date"} + default: + return map[string]any{"type": "keyword"} + } +} + +// Index implements searchindex.Index for Elasticsearch. +type Index struct { + name string + config Config + schema searchindex.IndexConfig + client *http.Client +} + +// documentID computes a deterministic string ID from a DocumentIdentity. +// Format: TypeName:key1=val1,key2=val2,... (keys sorted alphabetically). +// This matches the Bleve implementation's convention. +func documentID(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return id.TypeName + } + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(id.TypeName) + b.WriteByte(':') + for i, k := range keys { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(k) + b.WriteByte('=') + fmt.Fprintf(&b, "%v", id.KeyFields[k]) + } + return b.String() +} + +// IndexDocument indexes a single document. +func (idx *Index) IndexDocument(ctx context.Context, doc searchindex.EntityDocument) error { + return idx.IndexDocuments(ctx, []searchindex.EntityDocument{doc}) +} + +// IndexDocuments indexes a batch of documents using the _bulk API. +func (idx *Index) IndexDocuments(ctx context.Context, docs []searchindex.EntityDocument) error { + if len(docs) == 0 { + return nil + } + + var buf bytes.Buffer + for _, doc := range docs { + id := documentID(doc.Identity) + body, err := buildDocBody(doc) + if err != nil { + return err + } + + // Action line: index + action := map[string]any{ + "index": map[string]any{ + "_index": idx.name, + "_id": id, + }, + } + actionBytes, err := json.Marshal(action) + if err != nil { + return fmt.Errorf("elasticsearch: marshal bulk action: %w", err) + } + buf.Write(actionBytes) + buf.WriteByte('\n') + + docBytes, err := json.Marshal(body) + if err != nil { + return fmt.Errorf("elasticsearch: marshal document: %w", err) + } + buf.Write(docBytes) + buf.WriteByte('\n') + } + + resp, err := idx.doRequest(ctx, http.MethodPost, "/_bulk", buf.Bytes()) + if err != nil { + return fmt.Errorf("elasticsearch: bulk index: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("elasticsearch: read bulk response: %w", err) + } + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("elasticsearch: bulk index: status %d: %s", resp.StatusCode, string(respBody)) + } + + // Check for per-item errors. + var bulkResp bulkResponse + if err := json.Unmarshal(respBody, &bulkResp); err != nil { + return fmt.Errorf("elasticsearch: unmarshal bulk response: %w", err) + } + if bulkResp.Errors { + // Collect the first error for diagnostics. + for _, item := range bulkResp.Items { + if item.Index.Error != nil { + return fmt.Errorf("elasticsearch: bulk index error: [%s] %s: %s", + item.Index.Error.Type, item.Index.Error.Reason, + item.Index.ID) + } + } + return fmt.Errorf("elasticsearch: bulk index reported errors but no details found") + } + + return nil +} + +// buildDocBody converts an EntityDocument into a flat map for indexing. +func buildDocBody(doc searchindex.EntityDocument) (map[string]any, error) { + m := make(map[string]any, len(doc.Fields)+len(doc.Vectors)+2) + for k, v := range doc.Fields { + m[k] = v + } + for k, v := range doc.Vectors { + m[k] = v + } + m[reservedTypeNameField] = doc.Identity.TypeName + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return nil, fmt.Errorf("elasticsearch: marshal key fields: %w", err) + } + m[reservedKeyFieldsField] = string(keyFieldsJSON) + return m, nil +} + +// DeleteDocument deletes a single document by identity. +func (idx *Index) DeleteDocument(ctx context.Context, id searchindex.DocumentIdentity) error { + return idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{id}) +} + +// DeleteDocuments deletes a batch of documents using the _bulk API. +func (idx *Index) DeleteDocuments(ctx context.Context, ids []searchindex.DocumentIdentity) error { + if len(ids) == 0 { + return nil + } + + var buf bytes.Buffer + for _, id := range ids { + docID := documentID(id) + action := map[string]any{ + "delete": map[string]any{ + "_index": idx.name, + "_id": docID, + }, + } + actionBytes, err := json.Marshal(action) + if err != nil { + return fmt.Errorf("elasticsearch: marshal bulk delete action: %w", err) + } + buf.Write(actionBytes) + buf.WriteByte('\n') + } + + resp, err := idx.doRequest(ctx, http.MethodPost, "/_bulk", buf.Bytes()) + if err != nil { + return fmt.Errorf("elasticsearch: bulk delete: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("elasticsearch: read bulk delete response: %w", err) + } + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("elasticsearch: bulk delete: status %d: %s", resp.StatusCode, string(respBody)) + } + + return nil +} + +// Search builds an Elasticsearch query from the SearchRequest and executes it. +func (idx *Index) Search(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + esQuery := idx.buildSearchBody(req) + + bodyBytes, err := json.Marshal(esQuery) + if err != nil { + return nil, fmt.Errorf("elasticsearch: marshal search body: %w", err) + } + + path := "/" + url.PathEscape(idx.name) + "/_search" + resp, err := idx.doRequest(ctx, http.MethodPost, path, bodyBytes) + if err != nil { + return nil, fmt.Errorf("elasticsearch: search: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("elasticsearch: read search response: %w", err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("elasticsearch: search: status %d: %s", resp.StatusCode, string(respBody)) + } + + var esResp esSearchResponse + if err := json.Unmarshal(respBody, &esResp); err != nil { + return nil, fmt.Errorf("elasticsearch: unmarshal search response: %w", err) + } + + return idx.convertSearchResponse(esResp, req.GeoDistanceSort != nil), nil +} + +// Close releases resources. For the HTTP-based implementation there is +// nothing to close, but we send a DELETE for the index to clean up if desired. +// In practice callers should manage index lifecycle separately; Close is a +// no-op here. +func (idx *Index) Close() error { + return nil +} + +// --------------------------------------------------------------------------- +// Query building +// --------------------------------------------------------------------------- + +// buildSearchBody constructs the full ES search request body. +func (idx *Index) buildSearchBody(req searchindex.SearchRequest) map[string]any { + body := make(map[string]any) + + // Size / from. + limit := effectiveLimit(req.Limit) + body["size"] = limit + if len(req.SearchAfter) > 0 { + // Cursor mode: use search_after, no from. + body["search_after"] = req.SearchAfter + } else if req.Offset > 0 { + body["from"] = req.Offset + } + + // Build the main query. + var mustClauses []any + var filterClauses []any + + // Text query. + if req.TextQuery != "" { + if len(req.TextFields) > 0 { + fields := make([]string, len(req.TextFields)) + for i, tf := range req.TextFields { + if tf.Weight != 0 && tf.Weight != 1.0 { + fields[i] = fmt.Sprintf("%s^%g", tf.Name, tf.Weight) + } else { + fields[i] = tf.Name + } + } + mm := map[string]any{ + "query": req.TextQuery, + "fields": fields, + } + if req.Fuzziness != nil { + mm["fuzziness"] = int(*req.Fuzziness) + } + mustClauses = append(mustClauses, map[string]any{ + "multi_match": mm, + }) + } else { + // Search across all text fields from the schema. + textFields := idx.allTextFields() + if len(textFields) > 0 { + mustClauses = append(mustClauses, map[string]any{ + "multi_match": map[string]any{ + "query": req.TextQuery, + "fields": textFields, + }, + }) + } else { + // Use simple_query_string instead of query_string to prevent + // Lucene query syntax injection (field targeting, regex, wildcards). + mustClauses = append(mustClauses, map[string]any{ + "simple_query_string": map[string]any{ + "query": req.TextQuery, + }, + }) + } + } + } + + // TypeName filter. + if req.TypeName != "" { + filterClauses = append(filterClauses, map[string]any{ + "term": map[string]any{ + reservedTypeNameField: req.TypeName, + }, + }) + } + + // Structured filter. + if req.Filter != nil { + fq := translateFilter(req.Filter) + if fq != nil { + filterClauses = append(filterClauses, fq) + } + } + + // Vector (kNN) query. + hasKNN := len(req.Vector) > 0 && req.VectorField != "" + if hasKNN { + knnK := limit + knnCandidates := limit * 2 + if req.TextQuery != "" { + // For hybrid search, fetch more kNN candidates for better RRF fusion. + knnK = limit * 3 + if knnK < 100 { + knnK = 100 + } + knnCandidates = knnK * 2 + } + knnQuery := map[string]any{ + "field": req.VectorField, + "query_vector": req.Vector, + "k": knnK, + "num_candidates": knnCandidates, + } + // Apply filters inside kNN so they are enforced during candidate selection. + if len(filterClauses) > 0 { + if len(filterClauses) == 1 { + knnQuery["filter"] = filterClauses[0] + } else { + knnQuery["filter"] = map[string]any{ + "bool": map[string]any{ + "filter": filterClauses, + }, + } + } + } + body["knn"] = knnQuery + } + + // Assemble the bool query. + if len(mustClauses) > 0 || len(filterClauses) > 0 { + boolQuery := make(map[string]any) + if len(mustClauses) > 0 { + boolQuery["must"] = mustClauses + } + if len(filterClauses) > 0 { + boolQuery["filter"] = filterClauses + } + body["query"] = map[string]any{"bool": boolQuery} + } else if len(req.Vector) == 0 { + // No text, no filter, no vector: match all. + body["query"] = map[string]any{"match_all": map[string]any{}} + } + + // Sorting. + var sortClauses []any + if len(req.Sort) > 0 { + sortClauses = make([]any, 0, len(req.Sort)) + for _, sf := range req.Sort { + order := "asc" + if !sf.Ascending { + order = "desc" + } + fieldName := idx.sortFieldName(sf.Field) + sortClauses = append(sortClauses, map[string]any{ + fieldName: map[string]any{"order": order}, + }) + } + } + if req.GeoDistanceSort != nil { + order := "asc" + if !req.GeoDistanceSort.Ascending { + order = "desc" + } + unit := req.GeoDistanceSort.Unit + if unit == "" { + unit = "km" + } + sortClauses = append(sortClauses, map[string]any{ + "_geo_distance": map[string]any{ + req.GeoDistanceSort.Field: map[string]any{ + "lat": req.GeoDistanceSort.Center.Lat, + "lon": req.GeoDistanceSort.Center.Lon, + }, + "order": order, + "unit": unit, + }, + }) + } + if len(sortClauses) > 0 { + body["sort"] = sortClauses + } + + // Facets (aggregations). + if len(req.Facets) > 0 { + aggs := make(map[string]any, len(req.Facets)) + for _, fr := range req.Facets { + size := fr.Size + if size <= 0 { + size = 10 + } + aggFieldName := idx.aggFieldName(fr.Field) + aggs[fr.Field] = map[string]any{ + "terms": map[string]any{ + "field": aggFieldName, + "size": size, + }, + } + } + body["aggs"] = aggs + } + + // Highlights: request highlighted fragments for all text fields when a text query is present. + if req.TextQuery != "" { + hlFields := make(map[string]any) + if len(req.TextFields) > 0 { + for _, tf := range req.TextFields { + hlFields[tf.Name] = map[string]any{} + } + } else { + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeText { + hlFields[fc.Name] = map[string]any{} + } + } + } + if len(hlFields) > 0 { + body["highlight"] = map[string]any{ + "fields": hlFields, + } + } + } + + // When both text query and kNN are present, Elasticsearch combines the + // scores from both the query and kNN clauses automatically. We do not use + // rank.rrf because it requires a paid (platinum+) license. + + return body +} + +// allTextFields returns the names of all text-type fields in the schema, +// with optional boost syntax (e.g. "name^2") when Weight is set. +func (idx *Index) allTextFields() []string { + var fields []string + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeText { + if fc.Weight != 0 && fc.Weight != 1.0 { + fields = append(fields, fmt.Sprintf("%s^%g", fc.Name, fc.Weight)) + } else { + fields = append(fields, fc.Name) + } + } + } + return fields +} + +// sortFieldName returns the appropriate field name for sorting. Text fields +// need the .keyword sub-field for sorting. +func (idx *Index) sortFieldName(field string) string { + for _, fc := range idx.schema.Fields { + if fc.Name == field && fc.Type == searchindex.FieldTypeText { + return field + ".keyword" + } + } + return field +} + +// aggFieldName returns the appropriate field name for aggregations. Text +// fields need the .keyword sub-field. +func (idx *Index) aggFieldName(field string) string { + return idx.sortFieldName(field) +} + +// effectiveLimit returns a sensible default if limit is zero or negative. +func effectiveLimit(limit int) int { + if limit <= 0 { + return 10 + } + return limit +} + +// --------------------------------------------------------------------------- +// Filter translation +// --------------------------------------------------------------------------- + +// translateFilter recursively converts a searchindex.Filter tree to an +// Elasticsearch query DSL map. +func translateFilter(f *searchindex.Filter) map[string]any { + if f == nil { + return nil + } + + // AND + if len(f.And) > 0 { + children := make([]any, 0, len(f.And)) + for _, child := range f.And { + cq := translateFilter(child) + if cq != nil { + children = append(children, cq) + } + } + if len(children) == 0 { + return nil + } + return map[string]any{ + "bool": map[string]any{ + "must": children, + }, + } + } + + // OR + if len(f.Or) > 0 { + children := make([]any, 0, len(f.Or)) + for _, child := range f.Or { + cq := translateFilter(child) + if cq != nil { + children = append(children, cq) + } + } + if len(children) == 0 { + return nil + } + return map[string]any{ + "bool": map[string]any{ + "should": children, + "minimum_should_match": 1, + }, + } + } + + // NOT + if f.Not != nil { + inner := translateFilter(f.Not) + if inner == nil { + return nil + } + return map[string]any{ + "bool": map[string]any{ + "must_not": []any{inner}, + }, + } + } + + // Term + if f.Term != nil { + return map[string]any{ + "term": map[string]any{ + f.Term.Field: f.Term.Value, + }, + } + } + + // Terms (IN) + if f.Terms != nil { + return map[string]any{ + "terms": map[string]any{ + f.Terms.Field: f.Terms.Values, + }, + } + } + + // Range + if f.Range != nil { + return translateRangeFilter(f.Range) + } + + // Prefix + if f.Prefix != nil { + return map[string]any{ + "prefix": map[string]any{ + f.Prefix.Field: f.Prefix.Value, + }, + } + } + + // Exists + if f.Exists != nil { + return map[string]any{ + "exists": map[string]any{ + "field": f.Exists.Field, + }, + } + } + + // Geo distance + if f.GeoDistance != nil { + return map[string]any{ + "geo_distance": map[string]any{ + "distance": f.GeoDistance.Distance, + f.GeoDistance.Field: map[string]any{ + "lat": f.GeoDistance.Center.Lat, + "lon": f.GeoDistance.Center.Lon, + }, + }, + } + } + + // Geo bounding box + if f.GeoBoundingBox != nil { + return map[string]any{ + "geo_bounding_box": map[string]any{ + f.GeoBoundingBox.Field: map[string]any{ + "top_left": map[string]any{ + "lat": f.GeoBoundingBox.TopLeft.Lat, + "lon": f.GeoBoundingBox.TopLeft.Lon, + }, + "bottom_right": map[string]any{ + "lat": f.GeoBoundingBox.BottomRight.Lat, + "lon": f.GeoBoundingBox.BottomRight.Lon, + }, + }, + }, + } + } + + return nil +} + +// translateRangeFilter converts a RangeFilter to an Elasticsearch range query. +func translateRangeFilter(rf *searchindex.RangeFilter) map[string]any { + rangeClause := make(map[string]any) + + if rf.GTE != nil { + rangeClause["gte"] = rf.GTE + } else if rf.HasGT && rf.GT != nil { + rangeClause["gt"] = rf.GT + } + + if rf.LTE != nil { + rangeClause["lte"] = rf.LTE + } else if rf.HasLT && rf.LT != nil { + rangeClause["lt"] = rf.LT + } + + if len(rangeClause) == 0 { + return nil + } + + return map[string]any{ + "range": map[string]any{ + rf.Field: rangeClause, + }, + } +} + +// --------------------------------------------------------------------------- +// Response parsing +// --------------------------------------------------------------------------- + +// convertSearchResponse transforms the raw ES response into a SearchResult. +func (idx *Index) convertSearchResponse(resp esSearchResponse, hasGeoSort bool) *searchindex.SearchResult { + hits := make([]searchindex.SearchHit, 0, len(resp.Hits.Hits)) + for _, hit := range resp.Hits.Hits { + sh := idx.convertHit(hit, hasGeoSort) + hits = append(hits, sh) + } + + facets := convertAggregations(resp.Aggregations) + + totalCount := resp.Hits.Total.Value + + return &searchindex.SearchResult{ + Hits: hits, + TotalCount: totalCount, + Facets: facets, + } +} + +// convertHit transforms a single ES hit into a SearchHit. +func (idx *Index) convertHit(hit esHit, hasGeoSort bool) searchindex.SearchHit { + identity := extractIdentity(hit.Source) + + // Build representation from source, excluding internal fields. + representation := make(map[string]any, len(hit.Source)) + for k, v := range hit.Source { + if k == reservedTypeNameField || k == reservedKeyFieldsField { + continue + } + representation[k] = v + } + representation["__typename"] = identity.TypeName + for k, v := range identity.KeyFields { + representation[k] = v + } + + // Highlights. + var highlights map[string][]string + if len(hit.Highlight) > 0 { + highlights = hit.Highlight + } + + // Populate SortValues for cursor-based pagination. + var sortValues []string + if len(hit.Sort) > 0 { + sortValues = make([]string, len(hit.Sort)) + for i, v := range hit.Sort { + sortValues[i] = fmt.Sprintf("%v", v) + } + } + + // When geo distance sort is active, ES appends the distance as the last sort value. + var geoDistance *float64 + if hasGeoSort && len(hit.Sort) > 0 { + if dist, ok := hit.Sort[len(hit.Sort)-1].(float64); ok { + geoDistance = &dist + } + } + + return searchindex.SearchHit{ + Identity: identity, + Score: hit.Score, + Highlights: highlights, + Representation: representation, + SortValues: sortValues, + GeoDistance: geoDistance, + } +} + +// extractIdentity reconstructs a DocumentIdentity from the _source fields. +func extractIdentity(source map[string]any) searchindex.DocumentIdentity { + typeName, _ := source[reservedTypeNameField].(string) + keyFieldsRaw, _ := source[reservedKeyFieldsField].(string) + + var keyFields map[string]any + if keyFieldsRaw != "" { + _ = json.Unmarshal([]byte(keyFieldsRaw), &keyFields) + } + if keyFields == nil { + keyFields = make(map[string]any) + } + + return searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + } +} + +// convertAggregations converts ES aggregation results to searchindex facets. +func convertAggregations(aggs map[string]esAggResult) map[string]searchindex.FacetResult { + if len(aggs) == 0 { + return nil + } + facets := make(map[string]searchindex.FacetResult, len(aggs)) + for name, agg := range aggs { + values := make([]searchindex.FacetValue, 0, len(agg.Buckets)) + for _, bucket := range agg.Buckets { + values = append(values, searchindex.FacetValue{ + Value: fmt.Sprintf("%v", bucket.Key), + Count: bucket.DocCount, + }) + } + facets[name] = searchindex.FacetResult{Values: values} + } + return facets +} + +// Autocomplete returns terms from the Elasticsearch index matching the given prefix. +// Uses a prefix query to find matching documents and extracts unique terms from the +// field values. This is more reliable than the _terms_enum API for text fields. +func (idx *Index) Autocomplete(ctx context.Context, req searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + limit := req.Limit + if limit <= 0 { + limit = 10 + } + + prefix := strings.ToLower(req.Prefix) + + body := map[string]any{ + "query": map[string]any{ + "prefix": map[string]any{ + req.Field: map[string]any{ + "value": prefix, + }, + }, + }, + "size": 100, + "_source": []string{req.Field}, + } + bodyJSON, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("elasticsearch: marshal autocomplete body: %w", err) + } + + resp, err := idx.doRequest(ctx, "POST", "/"+url.PathEscape(idx.name)+"/_search", bodyJSON) + if err != nil { + return nil, fmt.Errorf("elasticsearch: autocomplete search request: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("elasticsearch: read autocomplete response: %w", err) + } + + if resp.StatusCode >= 400 { + return nil, fmt.Errorf("elasticsearch: autocomplete search failed (HTTP %d): %s", resp.StatusCode, string(respBody)) + } + + var searchResult struct { + Hits struct { + Hits []struct { + Source map[string]any `json:"_source"` + } `json:"hits"` + } `json:"hits"` + } + if err := json.Unmarshal(respBody, &searchResult); err != nil { + return nil, fmt.Errorf("elasticsearch: unmarshal autocomplete response: %w", err) + } + + // Extract unique terms from field values that match the prefix. + termCounts := make(map[string]int) + for _, hit := range searchResult.Hits.Hits { + val, ok := hit.Source[req.Field] + if !ok { + continue + } + text, ok := val.(string) + if !ok { + continue + } + // Tokenize: split on non-alphanumeric boundaries and lowercase. + for _, token := range tokenize(text) { + if strings.HasPrefix(token, prefix) { + termCounts[token]++ + } + } + } + + terms := make([]searchindex.AutocompleteTerm, 0, len(termCounts)) + for term, count := range termCounts { + terms = append(terms, searchindex.AutocompleteTerm{Term: term, Count: count}) + } + sort.Slice(terms, func(i, j int) bool { + if terms[i].Count != terms[j].Count { + return terms[i].Count > terms[j].Count + } + return terms[i].Term < terms[j].Term + }) + if len(terms) > limit { + terms = terms[:limit] + } + + return &searchindex.AutocompleteResult{Terms: terms}, nil +} + +// tokenize splits text into lowercase tokens, mimicking Elasticsearch's standard analyzer. +func tokenize(text string) []string { + var tokens []string + var current strings.Builder + for _, r := range strings.ToLower(text) { + if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') { + current.WriteRune(r) + } else if current.Len() > 0 { + tokens = append(tokens, current.String()) + current.Reset() + } + } + if current.Len() > 0 { + tokens = append(tokens, current.String()) + } + return tokens +} + +// --------------------------------------------------------------------------- +// HTTP helpers +// --------------------------------------------------------------------------- + +// doRequest performs an HTTP request against the first available ES address. +func (idx *Index) doRequest(ctx context.Context, method, path string, body []byte) (*http.Response, error) { + // Use the first address for simplicity. A production implementation would + // rotate or load-balance. + baseURL := strings.TrimRight(idx.config.Addresses[0], "/") + url := baseURL + path + + var bodyReader io.Reader + if body != nil { + bodyReader = bytes.NewReader(body) + } + + req, err := http.NewRequestWithContext(ctx, method, url, bodyReader) + if err != nil { + return nil, fmt.Errorf("elasticsearch: create request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + + // Authentication. + if idx.config.APIKey != "" { + req.Header.Set("Authorization", "ApiKey "+idx.config.APIKey) + } else if idx.config.Username != "" { + req.SetBasicAuth(idx.config.Username, idx.config.Password) + } + + return idx.client.Do(req) +} + +// --------------------------------------------------------------------------- +// Elasticsearch response types +// --------------------------------------------------------------------------- + +// esErrorResponse is the top-level error envelope from ES. +type esErrorResponse struct { + Error struct { + Type string `json:"type"` + Reason string `json:"reason"` + } `json:"error"` + Status int `json:"status"` +} + +// bulkResponse is the response from the _bulk API. +type bulkResponse struct { + Errors bool `json:"errors"` + Items []bulkItem `json:"items"` +} + +type bulkItem struct { + Index bulkItemResult `json:"index"` + Delete bulkItemResult `json:"delete"` +} + +type bulkItemResult struct { + ID string `json:"_id"` + Status int `json:"status"` + Error *bulkItemErr `json:"error,omitempty"` +} + +type bulkItemErr struct { + Type string `json:"type"` + Reason string `json:"reason"` +} + +// esSearchResponse is the top-level search response from ES. +type esSearchResponse struct { + Hits esHitsWrapper `json:"hits"` + Aggregations map[string]esAggResult `json:"aggregations"` +} + +type esHitsWrapper struct { + Total esTotal `json:"total"` + Hits []esHit `json:"hits"` +} + +type esTotal struct { + Value int `json:"value"` + Relation string `json:"relation"` +} + +type esHit struct { + Index string `json:"_index"` + ID string `json:"_id"` + Score float64 `json:"_score"` + Source map[string]any `json:"_source"` + Highlight map[string][]string `json:"highlight,omitempty"` + Sort []any `json:"sort,omitempty"` +} + +type esAggResult struct { + Buckets []esAggBucket `json:"buckets"` +} + +type esAggBucket struct { + Key any `json:"key"` + DocCount int `json:"doc_count"` +} diff --git a/v2/pkg/searchindex/elasticsearch/elasticsearch_test.go b/v2/pkg/searchindex/elasticsearch/elasticsearch_test.go new file mode 100644 index 0000000000..907d6078d4 --- /dev/null +++ b/v2/pkg/searchindex/elasticsearch/elasticsearch_test.go @@ -0,0 +1,549 @@ +//go:build integration + +package elasticsearch + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// startElasticsearch spins up an Elasticsearch container and returns the +// base URL (e.g. "http://localhost:49200") plus a cleanup function. +func startElasticsearch(t *testing.T) string { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "docker.elastic.co/elasticsearch/elasticsearch:8.13.4", + ExposedPorts: []string{"9200/tcp"}, + Env: map[string]string{ + "discovery.type": "single-node", + "xpack.security.enabled": "false", + "ES_JAVA_OPTS": "-Xms512m -Xmx512m", + }, + WaitingFor: wait.ForHTTP("/"). + WithPort("9200/tcp"). + WithStartupTimeout(120 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start elasticsearch container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate container: %v", err) + } + }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + port, err := container.MappedPort(ctx, "9200/tcp") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return fmt.Sprintf("http://%s:%s", host, port.Port()) +} + +func newTestIndex(t *testing.T, baseURL string) searchindex.Index { + t.Helper() + + factory := NewFactory() + schema := searchindex.IndexConfig{ + Name: "test-products", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + }, + } + + cfg := Config{ + Addresses: []string{baseURL}, + } + cfgJSON, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("marshal config: %v", err) + } + + // Use a unique index name per test to avoid collisions. + indexName := fmt.Sprintf("test-products-%d", time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, schema, cfgJSON) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func populateTestData(t *testing.T, idx searchindex.Index) { + t.Helper() + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + }, + } + if err := idx.IndexDocuments(context.Background(), docs); err != nil { + t.Fatalf("IndexDocuments: %v", err) + } + + // Elasticsearch is near-real-time; wait for a refresh. + time.Sleep(2 * time.Second) +} + +func TestFullLifecycle(t *testing.T) { + baseURL := startElasticsearch(t) + + t.Run("create index and batch index", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + // Verify all 4 documents are searchable. + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 documents, got %d", result.TotalCount) + } + }) + + t.Run("text search", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 2 { + t.Errorf("expected at least 2 hits for 'shoes', got %d", result.TotalCount) + } + }) + + t.Run("text search with field restriction", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + TextFields: []searchindex.TextFieldWeight{{Name: "name"}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 2 { + t.Errorf("expected at least 2 hits for 'shoes' in name, got %d", result.TotalCount) + } + }) + + t.Run("term filter on keyword field", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for category=Footwear, got %d", result.TotalCount) + } + }) + + t.Run("boolean filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 hit for inStock=false, got %d", result.TotalCount) + } + }) + + t.Run("numeric range filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: 30.0, + LTE: 100.0, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 2 { + t.Errorf("expected 2 hits for price 30-100, got %d", result.TotalCount) + } + }) + + t.Run("prefix filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Prefix: &searchindex.PrefixFilter{Field: "category", Value: "Foot"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for category prefix 'Foot', got %d", result.TotalCount) + } + }) + + t.Run("AND filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for Footwear AND inStock, got %d", result.TotalCount) + } + }) + + t.Run("OR filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + {Range: &searchindex.RangeFilter{Field: "price", GTE: 100.0}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + // Accessories=1 (Leather Belt) + price>=100=1 (Basketball Shoes) = 2 + if result.TotalCount != 2 { + t.Errorf("expected 2 hits for Accessories OR price>=100, got %d", result.TotalCount) + } + }) + + t.Run("NOT filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 hit for NOT Footwear, got %d", result.TotalCount) + } + }) + + t.Run("sorting", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 4 { + t.Fatalf("expected 4 hits, got %d", result.TotalCount) + } + // First hit should be cheapest (Wool Socks at 12.99). + if name, _ := result.Hits[0].Representation["name"].(string); name != "Wool Socks" { + t.Errorf("expected first hit to be Wool Socks (cheapest), got %v", result.Hits[0].Representation["name"]) + } + }) + + t.Run("pagination", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 2, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 2 { + t.Errorf("expected 2 hits with offset, got %d", len(result.Hits)) + } + }) + + t.Run("facets", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Facets: []searchindex.FacetRequest{{Field: "category", Size: 10}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + facet, ok := result.Facets["category"] + if !ok { + t.Fatal("expected category facet") + } + if len(facet.Values) < 2 { + t.Errorf("expected at least 2 facet values, got %d", len(facet.Values)) + } + }) + + t.Run("search hit identity", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "running shoes", + Limit: 1, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit") + } + hit := result.Hits[0] + if hit.Identity.TypeName != "Product" { + t.Errorf("TypeName = %q, want %q", hit.Identity.TypeName, "Product") + } + if hit.Representation["__typename"] != "Product" { + t.Errorf("__typename = %v, want %q", hit.Representation["__typename"], "Product") + } + }) + + t.Run("delete single document", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + err := idx.DeleteDocument(context.Background(), searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }) + if err != nil { + t.Fatalf("DeleteDocument: %v", err) + } + + // Wait for refresh. + time.Sleep(2 * time.Second) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 documents after delete, got %d", result.TotalCount) + } + }) + + t.Run("delete multiple documents", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + err := idx.DeleteDocuments(context.Background(), []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + }) + if err != nil { + t.Fatalf("DeleteDocuments: %v", err) + } + + // Wait for refresh. + time.Sleep(2 * time.Second) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 2 { + t.Errorf("expected 2 documents after batch delete, got %d", result.TotalCount) + } + }) + + t.Run("TypeName filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TypeName: "Product", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for TypeName=Product, got %d", result.TotalCount) + } + }) + + t.Run("terms (IN) filter", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"Footwear", "Accessories"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for category IN [Footwear, Accessories], got %d", result.TotalCount) + } + }) + + t.Run("IndexDocument single", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + + err := idx.IndexDocument(context.Background(), searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "100"}}, + Fields: map[string]any{"name": "Flip Flops", "description": "Casual summer footwear", "category": "Footwear", "price": 19.99, "inStock": true}, + }) + if err != nil { + t.Fatalf("IndexDocument: %v", err) + } + + // Wait for refresh. + time.Sleep(2 * time.Second) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 document after IndexDocument, got %d", result.TotalCount) + } + }) + + t.Run("upsert document", func(t *testing.T) { + idx := newTestIndex(t, baseURL) + populateTestData(t, idx) + + // Re-index product id="1" with an updated name. + err := idx.IndexDocument(context.Background(), searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Trail Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }) + if err != nil { + t.Fatalf("IndexDocument (upsert): %v", err) + } + + // Wait for refresh. + time.Sleep(2 * time.Second) + + // Total count should still be 4 (upsert, not insert). + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 documents after upsert, got %d", result.TotalCount) + } + + // Search for the updated name to verify the change took effect. + result, err = idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "trail", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search for 'trail': %v", err) + } + if result.TotalCount < 1 { + t.Errorf("expected at least 1 hit for 'trail' after upsert, got %d", result.TotalCount) + } + }) +} + +func TestDocumentID(t *testing.T) { + id := documentID(searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123", "sku": "ABC"}, + }) + // Keys should be sorted alphabetically. + expected := "Product:id=123,sku=ABC" + if id != expected { + t.Errorf("documentID = %q, want %q", id, expected) + } +} diff --git a/v2/pkg/searchindex/embedder.go b/v2/pkg/searchindex/embedder.go new file mode 100644 index 0000000000..0d3ab47edb --- /dev/null +++ b/v2/pkg/searchindex/embedder.go @@ -0,0 +1,49 @@ +package searchindex + +import "context" + +// Embedder converts text to vector embeddings. Pluggable: OpenAI, Ollama, local models. +type Embedder interface { + // Embed converts a batch of texts to embedding vectors. + Embed(ctx context.Context, texts []string) ([][]float32, error) + // EmbedSingle converts a single text to an embedding vector. + EmbedSingle(ctx context.Context, text string) ([]float32, error) + // Dimensions returns the dimensionality of the embeddings produced by this embedder. + Dimensions() int +} + +// TextTransformer converts entity fields into a string for embedding. +type TextTransformer interface { + Transform(fields map[string]any) string +} + +// FuncTransformer allows arbitrary Go functions for programmatic use. +type FuncTransformer struct { + Fn func(fields map[string]any) string +} + +// Transform calls the underlying function. +func (f *FuncTransformer) Transform(fields map[string]any) string { + return f.Fn(fields) +} + +// EmbeddingPipeline combines a transformer and embedder for derived embeddings. +type EmbeddingPipeline struct { + Transformer TextTransformer + Embedder Embedder +} + +// Process converts entity fields → string → embedding vector. +func (p *EmbeddingPipeline) Process(ctx context.Context, fields map[string]any) ([]float32, error) { + text := p.Transformer.Transform(fields) + return p.Embedder.EmbedSingle(ctx, text) +} + +// ProcessBatch converts multiple entities' fields → strings → embedding vectors. +func (p *EmbeddingPipeline) ProcessBatch(ctx context.Context, fieldSets []map[string]any) ([][]float32, error) { + texts := make([]string, len(fieldSets)) + for i, fields := range fieldSets { + texts[i] = p.Transformer.Transform(fields) + } + return p.Embedder.Embed(ctx, texts) +} diff --git a/v2/pkg/searchindex/embedder/ollama/ollama.go b/v2/pkg/searchindex/embedder/ollama/ollama.go new file mode 100644 index 0000000000..64253b28ee --- /dev/null +++ b/v2/pkg/searchindex/embedder/ollama/ollama.go @@ -0,0 +1,148 @@ +// Package ollama provides an Embedder implementation backed by the Ollama embed API. +package ollama + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +const ( + defaultBaseURL = "http://localhost:11434" +) + +// Embedder implements searchindex.Embedder using the Ollama embed API. +type Embedder struct { + baseURL string + model string + dimensions int + client *http.Client +} + +// Option configures an Embedder. +type Option func(*Embedder) + +// WithHTTPClient sets a custom HTTP client. +func WithHTTPClient(client *http.Client) Option { + return func(e *Embedder) { + e.client = client + } +} + +// NewEmbedder creates a new Ollama embedder. +// +// baseURL is the Ollama server address (e.g. "http://localhost:11434"). +// Pass an empty string to use the default (http://localhost:11434). +// +// model is the Ollama model to use for embeddings (e.g. "nomic-embed-text", +// "mxbai-embed-large", "all-minilm"). +// +// dimensions is the dimensionality of the vectors produced by the chosen model. +// The caller must provide this value because Ollama does not report it in the API response. +func NewEmbedder(baseURL, model string, dimensions int, opts ...Option) *Embedder { + if baseURL == "" { + baseURL = defaultBaseURL + } + e := &Embedder{ + baseURL: baseURL, + model: model, + dimensions: dimensions, + client: http.DefaultClient, + } + for _, opt := range opts { + opt(e) + } + return e +} + +// Dimensions returns the dimensionality of the embeddings produced. +func (e *Embedder) Dimensions() int { + return e.dimensions +} + +// embedRequest is the JSON body sent to the Ollama /api/embed endpoint. +type embedRequest struct { + Model string `json:"model"` + Input []string `json:"input"` +} + +// embedResponse is the JSON body returned by the Ollama /api/embed endpoint. +type embedResponse struct { + Embeddings [][]float32 `json:"embeddings"` + Error string `json:"error,omitempty"` +} + +// Embed converts a batch of texts to embedding vectors. +// The Ollama /api/embed endpoint accepts multiple texts in a single request. +func (e *Embedder) Embed(ctx context.Context, texts []string) ([][]float32, error) { + if len(texts) == 0 { + return nil, nil + } + + reqBody := embedRequest{ + Model: e.model, + Input: texts, + } + + body, err := json.Marshal(reqBody) + if err != nil { + return nil, fmt.Errorf("ollama: marshal request: %w", err) + } + + endpoint := e.baseURL + "/api/embed" + req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("ollama: create request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + + resp, err := e.client.Do(req) + if err != nil { + return nil, fmt.Errorf("ollama: send request: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("ollama: read response: %w", err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("ollama: unexpected status %d: %s", resp.StatusCode, string(respBody)) + } + + var embResp embedResponse + if err := json.Unmarshal(respBody, &embResp); err != nil { + return nil, fmt.Errorf("ollama: unmarshal response: %w", err) + } + + if embResp.Error != "" { + return nil, fmt.Errorf("ollama: api error: %s", embResp.Error) + } + + if len(embResp.Embeddings) != len(texts) { + return nil, fmt.Errorf("ollama: expected %d embeddings, got %d", len(texts), len(embResp.Embeddings)) + } + + return embResp.Embeddings, nil +} + +// EmbedSingle converts a single text to an embedding vector. +func (e *Embedder) EmbedSingle(ctx context.Context, text string) ([]float32, error) { + embeddings, err := e.Embed(ctx, []string{text}) + if err != nil { + return nil, err + } + if len(embeddings) == 0 { + return nil, fmt.Errorf("ollama: empty response for single embedding") + } + return embeddings[0], nil +} + +// Verify interface compliance at compile time. +var _ searchindex.Embedder = (*Embedder)(nil) diff --git a/v2/pkg/searchindex/embedder/openai/openai.go b/v2/pkg/searchindex/embedder/openai/openai.go new file mode 100644 index 0000000000..042550292e --- /dev/null +++ b/v2/pkg/searchindex/embedder/openai/openai.go @@ -0,0 +1,268 @@ +// Package openai provides an Embedder implementation backed by the OpenAI embeddings API. +package openai + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "math" + "net/http" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +const ( + defaultEndpoint = "https://api.openai.com/v1/embeddings" + maxBatchSize = 2048 + maxRetries = 3 + baseRetryDelay = 500 * time.Millisecond +) + +// Embedder implements searchindex.Embedder using the OpenAI embeddings API. +type Embedder struct { + apiKey string + model string + dimensions int + endpoint string + client *http.Client +} + +// Option configures an Embedder. +type Option func(*Embedder) + +// WithEndpoint overrides the default OpenAI embeddings endpoint. +func WithEndpoint(endpoint string) Option { + return func(e *Embedder) { + e.endpoint = endpoint + } +} + +// WithHTTPClient sets a custom HTTP client. +func WithHTTPClient(client *http.Client) Option { + return func(e *Embedder) { + e.client = client + } +} + +// NewEmbedder creates a new OpenAI embedder. +// +// Supported models and their default dimensions: +// - "text-embedding-3-small": 1536 +// - "text-embedding-3-large": 3072 +// +// The dimensions parameter allows requesting a shorter embedding from the API +// (supported by text-embedding-3-* models). Pass 0 to use the model's default. +func NewEmbedder(apiKey, model string, dimensions int, opts ...Option) *Embedder { + e := &Embedder{ + apiKey: apiKey, + model: model, + dimensions: dimensions, + endpoint: defaultEndpoint, + client: http.DefaultClient, + } + for _, opt := range opts { + opt(e) + } + if e.dimensions == 0 { + switch model { + case "text-embedding-3-small": + e.dimensions = 1536 + case "text-embedding-3-large": + e.dimensions = 3072 + default: + e.dimensions = 1536 + } + } + return e +} + +// Dimensions returns the dimensionality of the embeddings produced. +func (e *Embedder) Dimensions() int { + return e.dimensions +} + +// embeddingRequest is the JSON body sent to the OpenAI API. +type embeddingRequest struct { + Input []string `json:"input"` + Model string `json:"model"` + Dimensions int `json:"dimensions,omitempty"` +} + +// embeddingResponse is the JSON body returned by the OpenAI API. +type embeddingResponse struct { + Data []embeddingData `json:"data"` + Error *apiError `json:"error,omitempty"` +} + +type embeddingData struct { + Index int `json:"index"` + Embedding []float32 `json:"embedding"` +} + +type apiError struct { + Message string `json:"message"` + Type string `json:"type"` +} + +// Embed converts a batch of texts to embedding vectors. +// Texts are split into sub-batches of up to 2048 items per API request. +func (e *Embedder) Embed(ctx context.Context, texts []string) ([][]float32, error) { + if len(texts) == 0 { + return nil, nil + } + + results := make([][]float32, len(texts)) + + for start := 0; start < len(texts); start += maxBatchSize { + end := start + maxBatchSize + if end > len(texts) { + end = len(texts) + } + batch := texts[start:end] + + embeddings, err := e.requestWithRetry(ctx, batch) + if err != nil { + return nil, fmt.Errorf("openai embed batch [%d:%d]: %w", start, end, err) + } + + for _, item := range embeddings { + results[start+item.Index] = item.Embedding + } + } + + return results, nil +} + +// EmbedSingle converts a single text to an embedding vector. +func (e *Embedder) EmbedSingle(ctx context.Context, text string) ([]float32, error) { + embeddings, err := e.Embed(ctx, []string{text}) + if err != nil { + return nil, err + } + if len(embeddings) == 0 { + return nil, fmt.Errorf("openai: empty response for single embedding") + } + return embeddings[0], nil +} + +// requestWithRetry sends the embedding request with exponential backoff on rate-limit errors. +func (e *Embedder) requestWithRetry(ctx context.Context, texts []string) ([]embeddingData, error) { + var lastErr error + + for attempt := 0; attempt <= maxRetries; attempt++ { + if attempt > 0 { + delay := baseRetryDelay * time.Duration(math.Pow(2, float64(attempt-1))) + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-time.After(delay): + } + } + + data, err := e.doRequest(ctx, texts) + if err == nil { + return data, nil + } + + if isRetryable(err) { + lastErr = err + continue + } + + return nil, err + } + + return nil, fmt.Errorf("openai: max retries exceeded: %w", lastErr) +} + +// rateLimitError indicates a 429 response. +type rateLimitError struct { + message string +} + +func (e *rateLimitError) Error() string { + return e.message +} + +// serverError indicates a 5xx response. +type serverError struct { + statusCode int + message string +} + +func (e *serverError) Error() string { + return e.message +} + +func isRetryable(err error) bool { + var rle *rateLimitError + var se *serverError + return errors.As(err, &rle) || errors.As(err, &se) +} + +func (e *Embedder) doRequest(ctx context.Context, texts []string) ([]embeddingData, error) { + reqBody := embeddingRequest{ + Input: texts, + Model: e.model, + } + // Only include dimensions for text-embedding-3-* models that support it. + if e.dimensions > 0 { + reqBody.Dimensions = e.dimensions + } + + body, err := json.Marshal(reqBody) + if err != nil { + return nil, fmt.Errorf("openai: marshal request: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, e.endpoint, bytes.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("openai: create request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+e.apiKey) + + resp, err := e.client.Do(req) + if err != nil { + return nil, fmt.Errorf("openai: send request: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("openai: read response: %w", err) + } + + if resp.StatusCode == http.StatusTooManyRequests { + return nil, &rateLimitError{message: fmt.Sprintf("openai: rate limited (429): %s", string(respBody))} + } + + if resp.StatusCode >= 500 { + return nil, &serverError{ + statusCode: resp.StatusCode, + message: fmt.Sprintf("openai: server error (%d): %s", resp.StatusCode, string(respBody)), + } + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("openai: unexpected status %d: %s", resp.StatusCode, string(respBody)) + } + + var embResp embeddingResponse + if err := json.Unmarshal(respBody, &embResp); err != nil { + return nil, fmt.Errorf("openai: unmarshal response: %w", err) + } + + if embResp.Error != nil { + return nil, fmt.Errorf("openai: api error: %s (%s)", embResp.Error.Message, embResp.Error.Type) + } + + return embResp.Data, nil +} + +// Verify interface compliance at compile time. +var _ searchindex.Embedder = (*Embedder)(nil) diff --git a/v2/pkg/searchindex/filter.go b/v2/pkg/searchindex/filter.go new file mode 100644 index 0000000000..2ad8aea6ea --- /dev/null +++ b/v2/pkg/searchindex/filter.go @@ -0,0 +1,69 @@ +package searchindex + +// Filter represents a composable filter tree that translates to each backend's native format. +type Filter struct { + And []*Filter + Or []*Filter + Not *Filter + Term *TermFilter + Terms *TermsFilter + Range *RangeFilter + Prefix *PrefixFilter + Exists *ExistsFilter + GeoDistance *GeoDistanceFilter + GeoBoundingBox *GeoBoundingBoxFilter +} + +// TermFilter matches a single exact value. +type TermFilter struct { + Field string + Value any +} + +// TermsFilter matches any of a set of values (IN operator). +type TermsFilter struct { + Field string + Values []any +} + +// RangeFilter matches numeric/string ranges. +type RangeFilter struct { + Field string + GT any // greater than + GTE any // greater than or equal + LT any // less than + LTE any // less than or equal + HasGT bool // whether GT is set + HasLT bool // whether LT is set +} + +// PrefixFilter matches values starting with a prefix. +type PrefixFilter struct { + Field string + Value string +} + +// ExistsFilter matches documents where a field exists. +type ExistsFilter struct { + Field string +} + +// GeoPoint represents a latitude/longitude coordinate. +type GeoPoint struct { + Lat float64 + Lon float64 +} + +// GeoDistanceFilter matches documents within a radius of a point. +type GeoDistanceFilter struct { + Field string + Center GeoPoint + Distance string // e.g. "10km", "5mi" — passed directly to backend +} + +// GeoBoundingBoxFilter matches documents within a rectangular region. +type GeoBoundingBoxFilter struct { + Field string + TopLeft GeoPoint + BottomRight GeoPoint +} diff --git a/v2/pkg/searchindex/index.go b/v2/pkg/searchindex/index.go new file mode 100644 index 0000000000..d70fb631b5 --- /dev/null +++ b/v2/pkg/searchindex/index.go @@ -0,0 +1,45 @@ +package searchindex + +import "context" + +// Index is the core interface for a search index backend. +type Index interface { + // IndexDocument indexes a single document. + IndexDocument(ctx context.Context, doc EntityDocument) error + // IndexDocuments indexes a batch of documents. + IndexDocuments(ctx context.Context, docs []EntityDocument) error + // DeleteDocument deletes a single document by identity. + DeleteDocument(ctx context.Context, id DocumentIdentity) error + // DeleteDocuments deletes a batch of documents by identity. + DeleteDocuments(ctx context.Context, ids []DocumentIdentity) error + // Search performs a search query and returns results. + Search(ctx context.Context, req SearchRequest) (*SearchResult, error) + // Autocomplete returns terms from the index dictionary matching the given prefix. + Autocomplete(ctx context.Context, req AutocompleteRequest) (*AutocompleteResult, error) + // Close releases resources held by the index. + Close() error +} + +// AutocompleteRequest describes a term-prefix autocomplete query. +type AutocompleteRequest struct { + Field string + Prefix string + Limit int +} + +// AutocompleteResult contains matching terms from the index dictionary. +type AutocompleteResult struct { + Terms []AutocompleteTerm +} + +// AutocompleteTerm is a single term with its document count. +type AutocompleteTerm struct { + Term string + Count int +} + +// IndexFactory creates Index instances for a specific backend. +type IndexFactory interface { + // CreateIndex creates a new index with the given name and configuration. + CreateIndex(ctx context.Context, name string, schema IndexConfig, configJSON []byte) (Index, error) +} diff --git a/v2/pkg/searchindex/meilisearch/meilisearch.go b/v2/pkg/searchindex/meilisearch/meilisearch.go new file mode 100644 index 0000000000..e398b17ef8 --- /dev/null +++ b/v2/pkg/searchindex/meilisearch/meilisearch.go @@ -0,0 +1,950 @@ +// Package meilisearch implements the searchindex.Index and searchindex.IndexFactory +// interfaces backed by a Meilisearch server. It uses only net/http and encoding/json +// for communication -- no external SDK is required. +// +// Supports: full-text search with typo tolerance, structured filtering, sorting, facets. +// Filter translation: searchindex.Filter -> Meilisearch filter string syntax. +package meilisearch + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "sort" + "strconv" + "strings" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Compile-time interface conformance checks. +var ( + _ searchindex.Index = (*Index)(nil) + _ searchindex.IndexFactory = (*Factory)(nil) +) + +// reservedDocIDField is the Meilisearch primary key field. +const reservedDocIDField = "_docId" + +// reservedTypeNameField stores the entity type name for DocumentIdentity reconstruction. +const reservedTypeNameField = "_typeName" + +// reservedKeyFieldsField stores the JSON-encoded key fields for DocumentIdentity reconstruction. +const reservedKeyFieldsField = "_keyFieldsJSON" + +// taskPollInterval is the interval between task status polls. +const taskPollInterval = 100 * time.Millisecond + +// taskPollTimeout is the maximum time to wait for a task to complete. +const taskPollTimeout = 30 * time.Second + +// Config holds Meilisearch-specific configuration. +type Config struct { + Host string `json:"host"` + APIKey string `json:"api_key,omitempty"` +} + +// Factory implements searchindex.IndexFactory for Meilisearch. +type Factory struct{} + +// NewFactory returns a new Meilisearch IndexFactory. +func NewFactory() *Factory { + return &Factory{} +} + +// CreateIndex creates a new Meilisearch index with the given name and configuration. +// It creates the index via the Meilisearch API, then configures filterable and sortable +// attributes based on the IndexConfig schema. +func (f *Factory) CreateIndex(ctx context.Context, name string, schema searchindex.IndexConfig, configJSON []byte) (searchindex.Index, error) { + var cfg Config + if len(configJSON) > 0 { + if err := json.Unmarshal(configJSON, &cfg); err != nil { + return nil, fmt.Errorf("meilisearch: invalid config: %w", err) + } + } + if cfg.Host == "" { + cfg.Host = "http://localhost:7700" + } + // Normalize: strip trailing slash. + cfg.Host = strings.TrimRight(cfg.Host, "/") + + idx := &Index{ + name: name, + config: cfg, + schema: schema, + client: &http.Client{}, + } + + // Step 1: Create the index. + createBody := map[string]string{ + "uid": name, + "primaryKey": reservedDocIDField, + } + taskUID, err := idx.doTaskRequest(ctx, http.MethodPost, "/indexes", createBody) + if err != nil { + return nil, fmt.Errorf("meilisearch: create index %q: %w", name, err) + } + if err := idx.waitForTask(ctx, taskUID); err != nil { + return nil, fmt.Errorf("meilisearch: create index %q wait: %w", name, err) + } + + // Step 2: Configure filterable and sortable attributes from the schema. + filterable, sortable := deriveAttributes(schema) + if len(filterable) > 0 || len(sortable) > 0 { + settings := map[string]any{} + if len(filterable) > 0 { + settings["filterableAttributes"] = filterable + } + if len(sortable) > 0 { + settings["sortableAttributes"] = sortable + } + taskUID, err = idx.doTaskRequest(ctx, http.MethodPatch, "/indexes/"+name+"/settings", settings) + if err != nil { + return nil, fmt.Errorf("meilisearch: configure settings for %q: %w", name, err) + } + if err := idx.waitForTask(ctx, taskUID); err != nil { + return nil, fmt.Errorf("meilisearch: configure settings for %q wait: %w", name, err) + } + } + + return idx, nil +} + +// deriveAttributes computes filterable and sortable attribute lists from an IndexConfig. +// The reserved metadata fields are always included as filterable. +func deriveAttributes(schema searchindex.IndexConfig) (filterable, sortable []string) { + filterableSet := map[string]struct{}{ + reservedTypeNameField: {}, + } + sortableSet := map[string]struct{}{} + + for _, fc := range schema.Fields { + if fc.Filterable || fc.Autocomplete { + filterableSet[fc.Name] = struct{}{} + } + if fc.Sortable { + sortableSet[fc.Name] = struct{}{} + } + } + + filterable = make([]string, 0, len(filterableSet)) + for k := range filterableSet { + filterable = append(filterable, k) + } + sort.Strings(filterable) + + sortable = make([]string, 0, len(sortableSet)) + for k := range sortableSet { + sortable = append(sortable, k) + } + sort.Strings(sortable) + + return filterable, sortable +} + +// Index implements searchindex.Index backed by a Meilisearch server. +type Index struct { + name string + config Config + schema searchindex.IndexConfig + client *http.Client +} + +// documentID computes a deterministic string ID from a DocumentIdentity. +// Meilisearch only allows alphanumeric characters, hyphens (-), and underscores (_). +// Format: TypeName_key1-val1_key2-val2 (keys sorted alphabetically). +func documentID(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return sanitizeMeiliID(id.TypeName) + } + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(sanitizeMeiliID(id.TypeName)) + b.WriteByte('_') + for i, k := range keys { + if i > 0 { + b.WriteByte('_') + } + b.WriteString(sanitizeMeiliID(k)) + b.WriteByte('-') + b.WriteString(sanitizeMeiliID(fmt.Sprintf("%v", id.KeyFields[k]))) + } + return b.String() +} + +// sanitizeMeiliID replaces characters not allowed in Meilisearch document IDs. +func sanitizeMeiliID(s string) string { + var b strings.Builder + for _, r := range s { + if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' || r == '_' { + b.WriteRune(r) + } + } + return b.String() +} + +// buildDoc converts an EntityDocument into a flat map suitable for Meilisearch indexing. +// It includes all Fields, the _docId primary key, and internal metadata fields. +func buildDoc(doc searchindex.EntityDocument) (map[string]any, error) { + m := make(map[string]any, len(doc.Fields)+3) + for k, v := range doc.Fields { + m[k] = v + } + m[reservedDocIDField] = documentID(doc.Identity) + m[reservedTypeNameField] = doc.Identity.TypeName + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return nil, fmt.Errorf("meilisearch: failed to marshal key fields: %w", err) + } + m[reservedKeyFieldsField] = string(keyFieldsJSON) + return m, nil +} + +// dateFieldSet returns the set of field names that are DATE or DATETIME type. +func (idx *Index) dateFieldSet() map[string]bool { + m := make(map[string]bool) + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeDate || fc.Type == searchindex.FieldTypeDateTime { + m[fc.Name] = true + } + } + return m +} + +// dateToUnix parses an ISO 8601 date or datetime string and returns the unix timestamp. +func dateToUnix(s string) (int64, error) { + for _, layout := range []string{ + time.RFC3339Nano, + time.RFC3339, + "2006-01-02T15:04:05", + time.DateOnly, + } { + if t, err := time.Parse(layout, s); err == nil { + return t.Unix(), nil + } + } + return 0, fmt.Errorf("meilisearch: cannot parse date %q", s) +} + +// convertDateFieldsInDoc converts ISO date strings to unix timestamps for date fields. +func convertDateFieldsInDoc(doc map[string]any, dateFields map[string]bool) error { + for name := range dateFields { + v, ok := doc[name] + if !ok { + continue + } + s, ok := v.(string) + if !ok { + continue + } + ts, err := dateToUnix(s) + if err != nil { + return err + } + doc[name] = ts + } + return nil +} + +// convertDateFilters walks a filter tree and converts string date values to +// unix timestamps for fields that are DATE or DATETIME type. +func convertDateFilters(f *searchindex.Filter, dateFields map[string]bool) { + if f == nil { + return + } + for _, child := range f.And { + convertDateFilters(child, dateFields) + } + for _, child := range f.Or { + convertDateFilters(child, dateFields) + } + if f.Not != nil { + convertDateFilters(f.Not, dateFields) + } + if f.Term != nil && dateFields[f.Term.Field] { + if s, ok := f.Term.Value.(string); ok { + if ts, err := dateToUnix(s); err == nil { + f.Term.Value = ts + } + } + } + if f.Range != nil && dateFields[f.Range.Field] { + convertRangeValue := func(v any) any { + if s, ok := v.(string); ok { + if ts, err := dateToUnix(s); err == nil { + return ts + } + } + return v + } + if f.Range.GT != nil { + f.Range.GT = convertRangeValue(f.Range.GT) + } + if f.Range.GTE != nil { + f.Range.GTE = convertRangeValue(f.Range.GTE) + } + if f.Range.LT != nil { + f.Range.LT = convertRangeValue(f.Range.LT) + } + if f.Range.LTE != nil { + f.Range.LTE = convertRangeValue(f.Range.LTE) + } + } +} + +// IndexDocument indexes a single document. +func (idx *Index) IndexDocument(ctx context.Context, doc searchindex.EntityDocument) error { + return idx.IndexDocuments(ctx, []searchindex.EntityDocument{doc}) +} + +// IndexDocuments indexes a batch of documents. +func (idx *Index) IndexDocuments(ctx context.Context, docs []searchindex.EntityDocument) error { + dateFields := idx.dateFieldSet() + msDocs := make([]map[string]any, 0, len(docs)) + for _, doc := range docs { + m, err := buildDoc(doc) + if err != nil { + return err + } + if len(dateFields) > 0 { + if err := convertDateFieldsInDoc(m, dateFields); err != nil { + return err + } + } + msDocs = append(msDocs, m) + } + + taskUID, err := idx.doTaskRequest(ctx, http.MethodPost, "/indexes/"+idx.name+"/documents", msDocs) + if err != nil { + return fmt.Errorf("meilisearch: index documents: %w", err) + } + return idx.waitForTask(ctx, taskUID) +} + +// DeleteDocument deletes a single document by identity. +func (idx *Index) DeleteDocument(ctx context.Context, id searchindex.DocumentIdentity) error { + return idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{id}) +} + +// DeleteDocuments deletes a batch of documents by identity. +func (idx *Index) DeleteDocuments(ctx context.Context, ids []searchindex.DocumentIdentity) error { + docIDs := make([]string, 0, len(ids)) + for _, id := range ids { + docIDs = append(docIDs, documentID(id)) + } + + taskUID, err := idx.doTaskRequest(ctx, http.MethodPost, "/indexes/"+idx.name+"/documents/delete-batch", docIDs) + if err != nil { + return fmt.Errorf("meilisearch: delete documents: %w", err) + } + return idx.waitForTask(ctx, taskUID) +} + +// Search performs a search query and returns results. +func (idx *Index) Search(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + body := map[string]any{} + + // Text query. + if req.TextQuery != "" { + body["q"] = req.TextQuery + } else { + body["q"] = "" + } + + // Build filter string. + filterParts := []string{} + if req.TypeName != "" { + filterParts = append(filterParts, fmt.Sprintf("%s = %q", reservedTypeNameField, req.TypeName)) + } + if req.Filter != nil { + dateFields := idx.dateFieldSet() + if len(dateFields) > 0 { + convertDateFilters(req.Filter, dateFields) + } + filterStr, err := translateFilter(req.Filter) + if err != nil { + return nil, err + } + if filterStr != "" { + filterParts = append(filterParts, filterStr) + } + } + if len(filterParts) > 0 { + body["filter"] = strings.Join(filterParts, " AND ") + } + + // Sort. + if len(req.Sort) > 0 { + sortArr := make([]string, 0, len(req.Sort)) + for _, sf := range req.Sort { + dir := "desc" + if sf.Ascending { + dir = "asc" + } + sortArr = append(sortArr, sf.Field+":"+dir) + } + body["sort"] = sortArr + } + + // Limit and offset. + limit := req.Limit + if limit <= 0 { + limit = 10 + } + body["limit"] = limit + if req.Offset > 0 { + body["offset"] = req.Offset + } + + // Facets. + if len(req.Facets) > 0 { + facetFields := make([]string, 0, len(req.Facets)) + for _, fr := range req.Facets { + facetFields = append(facetFields, fr.Field) + } + body["facets"] = facetFields + } + + // Text field restriction: Meilisearch supports attributesToSearchOn. + // Per-field weights are not supported at query time by Meilisearch. + if len(req.TextFields) > 0 { + names := make([]string, len(req.TextFields)) + for i, tf := range req.TextFields { + names[i] = tf.Name + } + body["attributesToSearchOn"] = names + } + + respBody, err := idx.doRequest(ctx, http.MethodPost, "/indexes/"+idx.name+"/search", body) + if err != nil { + return nil, fmt.Errorf("meilisearch: search: %w", err) + } + + var msResult meiliSearchResponse + if err := json.Unmarshal(respBody, &msResult); err != nil { + return nil, fmt.Errorf("meilisearch: unmarshal search response: %w", err) + } + + hits := make([]searchindex.SearchHit, 0, len(msResult.Hits)) + for _, hitRaw := range msResult.Hits { + var hitMap map[string]any + if err := json.Unmarshal(hitRaw, &hitMap); err != nil { + return nil, fmt.Errorf("meilisearch: unmarshal hit: %w", err) + } + sh, err := convertHit(hitMap) + if err != nil { + return nil, err + } + hits = append(hits, sh) + } + + facets := convertFacets(msResult.FacetDistribution) + + return &searchindex.SearchResult{ + Hits: hits, + TotalCount: msResult.EstimatedTotalHits, + Facets: facets, + }, nil +} + +// Autocomplete returns terms matching the given prefix using a search query. +// Searches for documents containing terms that match the prefix, then extracts +// unique matching tokens from the field values. +func (idx *Index) Autocomplete(ctx context.Context, req searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + limit := req.Limit + if limit <= 0 { + limit = 10 + } + + prefix := strings.ToLower(req.Prefix) + + // Search for documents matching the prefix. + body := map[string]any{ + "q": prefix, + "limit": 100, + "attributesToSearchOn": []string{req.Field}, + "attributesToRetrieve": []string{req.Field}, + } + + path := fmt.Sprintf("/indexes/%s/search", url.PathEscape(idx.name)) + respBody, err := idx.doRequest(ctx, "POST", path, body) + if err != nil { + return nil, fmt.Errorf("meilisearch: autocomplete search failed: %w", err) + } + + var result struct { + Hits []map[string]any `json:"hits"` + } + if err := json.Unmarshal(respBody, &result); err != nil { + return nil, fmt.Errorf("meilisearch: unmarshal autocomplete response: %w", err) + } + + // Extract unique terms from the field values that match the prefix. + termCounts := make(map[string]int) + for _, hit := range result.Hits { + val, ok := hit[req.Field] + if !ok { + continue + } + text, ok := val.(string) + if !ok { + continue + } + for _, token := range tokenize(text) { + if strings.HasPrefix(token, prefix) { + termCounts[token]++ + } + } + } + + terms := make([]searchindex.AutocompleteTerm, 0, len(termCounts)) + for term, count := range termCounts { + terms = append(terms, searchindex.AutocompleteTerm{Term: term, Count: count}) + } + sort.Slice(terms, func(i, j int) bool { + if terms[i].Count != terms[j].Count { + return terms[i].Count > terms[j].Count + } + return terms[i].Term < terms[j].Term + }) + if len(terms) > limit { + terms = terms[:limit] + } + + return &searchindex.AutocompleteResult{Terms: terms}, nil +} + +// tokenize splits text into lowercase tokens, mimicking standard text analysis. +func tokenize(text string) []string { + var tokens []string + var current strings.Builder + for _, r := range strings.ToLower(text) { + if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') { + current.WriteRune(r) + } else if current.Len() > 0 { + tokens = append(tokens, current.String()) + current.Reset() + } + } + if current.Len() > 0 { + tokens = append(tokens, current.String()) + } + return tokens +} + +// Close releases resources. For the HTTP-based Meilisearch client, this is a no-op. +func (idx *Index) Close() error { + return nil +} + +// ---------- HTTP helpers ---------- + +// doRequest performs an HTTP request and returns the response body bytes. +func (idx *Index) doRequest(ctx context.Context, method, path string, body any) ([]byte, error) { + var bodyReader io.Reader + if body != nil { + data, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("meilisearch: marshal request body: %w", err) + } + bodyReader = bytes.NewReader(data) + } + + url := idx.config.Host + path + httpReq, err := http.NewRequestWithContext(ctx, method, url, bodyReader) + if err != nil { + return nil, fmt.Errorf("meilisearch: create request: %w", err) + } + httpReq.Header.Set("Content-Type", "application/json") + if idx.config.APIKey != "" { + httpReq.Header.Set("Authorization", "Bearer "+idx.config.APIKey) + } + + resp, err := idx.client.Do(httpReq) + if err != nil { + return nil, fmt.Errorf("meilisearch: do request %s %s: %w", method, path, err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("meilisearch: read response body: %w", err) + } + + if resp.StatusCode >= 400 { + return nil, fmt.Errorf("meilisearch: %s %s returned %d: %s", method, path, resp.StatusCode, string(respBody)) + } + + return respBody, nil +} + +// taskResponse represents a Meilisearch async task response. +type taskResponse struct { + TaskUID int `json:"taskUid"` + Status string `json:"status"` +} + +// taskStatusResponse represents a Meilisearch task status response from GET /tasks/{uid}. +type taskStatusResponse struct { + UID int `json:"uid"` + Status string `json:"status"` + Error *struct { + Message string `json:"message"` + Code string `json:"code"` + Type string `json:"type"` + } `json:"error,omitempty"` +} + +// doTaskRequest performs an HTTP request that returns a task UID. +func (idx *Index) doTaskRequest(ctx context.Context, method, path string, body any) (int, error) { + respBody, err := idx.doRequest(ctx, method, path, body) + if err != nil { + return 0, err + } + + var task taskResponse + if err := json.Unmarshal(respBody, &task); err != nil { + return 0, fmt.Errorf("meilisearch: unmarshal task response: %w (body: %s)", err, string(respBody)) + } + return task.TaskUID, nil +} + +// waitForTask polls GET /tasks/{taskUid} until status is "succeeded" or "failed". +func (idx *Index) waitForTask(ctx context.Context, taskUID int) error { + path := "/tasks/" + strconv.Itoa(taskUID) + deadline := time.Now().Add(taskPollTimeout) + + for { + if time.Now().After(deadline) { + return fmt.Errorf("meilisearch: task %d timed out after %v", taskUID, taskPollTimeout) + } + + respBody, err := idx.doRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return fmt.Errorf("meilisearch: poll task %d: %w", taskUID, err) + } + + var status taskStatusResponse + if err := json.Unmarshal(respBody, &status); err != nil { + return fmt.Errorf("meilisearch: unmarshal task status: %w", err) + } + + switch status.Status { + case "succeeded": + return nil + case "failed": + errMsg := "unknown error" + if status.Error != nil { + errMsg = status.Error.Message + } + return fmt.Errorf("meilisearch: task %d failed: %s", taskUID, errMsg) + case "enqueued", "processing": + // Still running; wait and retry. + default: + return fmt.Errorf("meilisearch: task %d unexpected status: %s", taskUID, status.Status) + } + + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(taskPollInterval): + } + } +} + +// ---------- Search response types ---------- + +// meiliSearchResponse represents the Meilisearch search response. +type meiliSearchResponse struct { + Hits []json.RawMessage `json:"hits"` + EstimatedTotalHits int `json:"estimatedTotalHits"` + FacetDistribution map[string]map[string]int `json:"facetDistribution,omitempty"` +} + +// ---------- Hit conversion ---------- + +// convertHit transforms a Meilisearch search hit map into a searchindex.SearchHit. +func convertHit(hitMap map[string]any) (searchindex.SearchHit, error) { + identity, err := extractIdentity(hitMap) + if err != nil { + return searchindex.SearchHit{}, err + } + + // Build representation, excluding internal fields. + representation := make(map[string]any, len(hitMap)) + for k, v := range hitMap { + if k == reservedDocIDField || k == reservedTypeNameField || k == reservedKeyFieldsField { + continue + } + representation[k] = v + } + + // Add __typename. + representation["__typename"] = identity.TypeName + // Merge key fields into representation. + for k, v := range identity.KeyFields { + representation[k] = v + } + + return searchindex.SearchHit{ + Identity: identity, + Representation: representation, + }, nil +} + +// extractIdentity reconstructs a DocumentIdentity from stored Meilisearch fields. +func extractIdentity(fields map[string]any) (searchindex.DocumentIdentity, error) { + typeName, _ := fields[reservedTypeNameField].(string) + keyFieldsRaw, _ := fields[reservedKeyFieldsField].(string) + + var keyFields map[string]any + if keyFieldsRaw != "" { + if err := json.Unmarshal([]byte(keyFieldsRaw), &keyFields); err != nil { + return searchindex.DocumentIdentity{}, fmt.Errorf("meilisearch: failed to unmarshal key fields: %w", err) + } + } + if keyFields == nil { + keyFields = make(map[string]any) + } + + return searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + }, nil +} + +// ---------- Facet conversion ---------- + +// convertFacets transforms Meilisearch facetDistribution into the searchindex format. +func convertFacets(facetDist map[string]map[string]int) map[string]searchindex.FacetResult { + if len(facetDist) == 0 { + return nil + } + facets := make(map[string]searchindex.FacetResult, len(facetDist)) + for field, counts := range facetDist { + values := make([]searchindex.FacetValue, 0, len(counts)) + for val, count := range counts { + values = append(values, searchindex.FacetValue{ + Value: val, + Count: count, + }) + } + // Sort by count descending for deterministic output. + sort.Slice(values, func(i, j int) bool { + if values[i].Count != values[j].Count { + return values[i].Count > values[j].Count + } + return values[i].Value < values[j].Value + }) + facets[field] = searchindex.FacetResult{Values: values} + } + return facets +} + +// ---------- Filter translation ---------- + +// translateFilter recursively converts a searchindex.Filter tree into a Meilisearch +// filter string. +func translateFilter(f *searchindex.Filter) (string, error) { + if f == nil { + return "", nil + } + + // AND + if len(f.And) > 0 { + parts := make([]string, 0, len(f.And)) + for _, child := range f.And { + s, err := translateFilter(child) + if err != nil { + return "", err + } + if s != "" { + parts = append(parts, "("+s+")") + } + } + if len(parts) == 0 { + return "", nil + } + return strings.Join(parts, " AND "), nil + } + + // OR + if len(f.Or) > 0 { + parts := make([]string, 0, len(f.Or)) + for _, child := range f.Or { + s, err := translateFilter(child) + if err != nil { + return "", err + } + if s != "" { + parts = append(parts, "("+s+")") + } + } + if len(parts) == 0 { + return "", nil + } + return strings.Join(parts, " OR "), nil + } + + // NOT + if f.Not != nil { + inner, err := translateFilter(f.Not) + if err != nil { + return "", err + } + if inner == "" { + return "", nil + } + return "NOT (" + inner + ")", nil + } + + // Term + if f.Term != nil { + return translateTermFilter(f.Term), nil + } + + // Terms (IN) + if f.Terms != nil { + return translateTermsFilter(f.Terms), nil + } + + // Range + if f.Range != nil { + return translateRangeFilter(f.Range) + } + + // Prefix: Meilisearch does not natively support prefix filters on filterable + // attributes. As a best-effort approximation we cannot do a true prefix match + // with the filter syntax, so we return an error indicating this limitation. + // Callers should use TextQuery for prefix-style matching. + if f.Prefix != nil { + // Meilisearch does not support prefix filters in the filter parameter. + // As a workaround, we return an unsupported error. + return "", fmt.Errorf("meilisearch: prefix filter is not supported in Meilisearch filter syntax") + } + + // Exists: Meilisearch supports "field EXISTS". + if f.Exists != nil { + return f.Exists.Field + " EXISTS", nil + } + + return "", nil +} + +// translateTermFilter converts a TermFilter to a Meilisearch filter expression. +func translateTermFilter(tf *searchindex.TermFilter) string { + return tf.Field + " = " + formatFilterValue(tf.Value) +} + +// translateTermsFilter converts a TermsFilter (IN) to a Meilisearch filter expression. +func translateTermsFilter(tf *searchindex.TermsFilter) string { + if len(tf.Values) == 0 { + return "" + } + vals := make([]string, 0, len(tf.Values)) + for _, v := range tf.Values { + vals = append(vals, formatFilterValue(v)) + } + return tf.Field + " IN [" + strings.Join(vals, ", ") + "]" +} + +// translateRangeFilter converts a RangeFilter to a Meilisearch filter expression. +func translateRangeFilter(rf *searchindex.RangeFilter) (string, error) { + var parts []string + + if rf.GTE != nil { + v, err := formatNumericValue(rf.GTE) + if err != nil { + return "", fmt.Errorf("meilisearch: range GTE: %w", err) + } + parts = append(parts, rf.Field+" >= "+v) + } else if rf.HasGT && rf.GT != nil { + v, err := formatNumericValue(rf.GT) + if err != nil { + return "", fmt.Errorf("meilisearch: range GT: %w", err) + } + parts = append(parts, rf.Field+" > "+v) + } + + if rf.LTE != nil { + v, err := formatNumericValue(rf.LTE) + if err != nil { + return "", fmt.Errorf("meilisearch: range LTE: %w", err) + } + parts = append(parts, rf.Field+" <= "+v) + } else if rf.HasLT && rf.LT != nil { + v, err := formatNumericValue(rf.LT) + if err != nil { + return "", fmt.Errorf("meilisearch: range LT: %w", err) + } + parts = append(parts, rf.Field+" < "+v) + } + + if len(parts) == 0 { + return "", nil + } + return strings.Join(parts, " AND "), nil +} + +// formatFilterValue formats a value for use in a Meilisearch filter expression. +// Strings are quoted; numbers and bools are unquoted. +func formatFilterValue(v any) string { + switch val := v.(type) { + case string: + // Escape double quotes inside the string. + escaped := strings.ReplaceAll(val, `\`, `\\`) + escaped = strings.ReplaceAll(escaped, `"`, `\"`) + return `"` + escaped + `"` + case float64: + return strconv.FormatFloat(val, 'f', -1, 64) + case float32: + return strconv.FormatFloat(float64(val), 'f', -1, 32) + case int: + return strconv.Itoa(val) + case int64: + return strconv.FormatInt(val, 10) + case int32: + return strconv.FormatInt(int64(val), 10) + case bool: + if val { + return "true" + } + return "false" + case json.Number: + return val.String() + default: + return fmt.Sprintf("%q", fmt.Sprintf("%v", v)) + } +} + +// formatNumericValue converts a numeric value to its string representation for +// range filter expressions. +func formatNumericValue(v any) (string, error) { + switch n := v.(type) { + case float64: + return strconv.FormatFloat(n, 'f', -1, 64), nil + case float32: + return strconv.FormatFloat(float64(n), 'f', -1, 32), nil + case int: + return strconv.Itoa(n), nil + case int64: + return strconv.FormatInt(n, 10), nil + case int32: + return strconv.FormatInt(int64(n), 10), nil + case json.Number: + return n.String(), nil + default: + return "", fmt.Errorf("cannot convert %T to numeric", v) + } +} diff --git a/v2/pkg/searchindex/meilisearch/meilisearch_test.go b/v2/pkg/searchindex/meilisearch/meilisearch_test.go new file mode 100644 index 0000000000..4e968a8bf4 --- /dev/null +++ b/v2/pkg/searchindex/meilisearch/meilisearch_test.go @@ -0,0 +1,475 @@ +//go:build integration + +package meilisearch + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +const testMasterKey = "test-master-key" + +// startMeilisearch starts a Meilisearch container and returns the host URL and a cleanup function. +func startMeilisearch(t *testing.T) string { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "getmeili/meilisearch:v1.6", + ExposedPorts: []string{"7700/tcp"}, + Env: map[string]string{ + "MEILI_MASTER_KEY": testMasterKey, + }, + WaitingFor: wait.ForHTTP("/health").WithPort("7700/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + require.NoError(t, err, "failed to start Meilisearch container") + t.Cleanup(func() { + _ = container.Terminate(ctx) + }) + + host, err := container.Host(ctx) + require.NoError(t, err) + port, err := container.MappedPort(ctx, "7700") + require.NoError(t, err) + + return fmt.Sprintf("http://%s:%s", host, port.Port()) +} + +func newTestIndex(t *testing.T, meiliHost string) searchindex.Index { + t.Helper() + factory := NewFactory() + schema := searchindex.IndexConfig{ + Name: "test", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + }, + } + + cfg := Config{ + Host: meiliHost, + APIKey: testMasterKey, + } + cfgJSON, err := json.Marshal(cfg) + require.NoError(t, err) + + // Use a unique index name per test to avoid collisions. + indexName := fmt.Sprintf("test_%d", time.Now().UnixNano()) + idx, err := factory.CreateIndex(context.Background(), indexName, schema, cfgJSON) + require.NoError(t, err, "CreateIndex") + t.Cleanup(func() { _ = idx.Close() }) + return idx +} + +func populateTestData(t *testing.T, idx searchindex.Index) { + t.Helper() + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + }, + } + err := idx.IndexDocuments(context.Background(), docs) + require.NoError(t, err, "IndexDocuments") +} + +func TestMeilisearchLifecycle(t *testing.T) { + meiliHost := startMeilisearch(t) + + t.Run("full lifecycle", func(t *testing.T) { + idx := newTestIndex(t, meiliHost) + populateTestData(t, idx) + + t.Run("text search", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + Limit: 10, + }) + require.NoError(t, err) + assert.GreaterOrEqual(t, result.TotalCount, 2, "expected at least 2 hits for 'shoes'") + }) + + t.Run("text search with field restriction", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + TextFields: []searchindex.TextFieldWeight{{Name: "name"}}, + Limit: 10, + }) + require.NoError(t, err) + assert.GreaterOrEqual(t, result.TotalCount, 2, "expected at least 2 hits for 'shoes' in name") + }) + + t.Run("term filter on keyword field", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 3, result.TotalCount, "expected 3 hits for category=Footwear") + }) + + t.Run("boolean filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 1, result.TotalCount, "expected 1 hit for inStock=false") + }) + + t.Run("numeric range filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: 30.0, + LTE: 100.0, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 2, result.TotalCount, "expected 2 hits for price 30-100") + }) + + t.Run("AND filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 3, result.TotalCount, "expected 3 hits for Footwear AND inStock") + }) + + t.Run("OR filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, result.TotalCount, "expected 4 hits for Footwear OR Accessories") + }) + + t.Run("NOT filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 1, result.TotalCount, "expected 1 hit for NOT Footwear") + }) + + t.Run("terms filter (IN)", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"Footwear", "Accessories"}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, result.TotalCount, "expected 4 hits for category IN [Footwear, Accessories]") + }) + + t.Run("sorting ascending", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(result.Hits), 4) + // First hit should be cheapest (Wool Socks at 12.99). + assert.Equal(t, "Wool Socks", result.Hits[0].Representation["name"]) + }) + + t.Run("sorting descending", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: false}}, + Limit: 10, + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(result.Hits), 4) + // First hit should be most expensive (Basketball Shoes at 129.99). + assert.Equal(t, "Basketball Shoes", result.Hits[0].Representation["name"]) + }) + + t.Run("pagination", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 2, + }) + require.NoError(t, err) + assert.Equal(t, 2, len(result.Hits), "expected 2 hits with offset") + }) + + t.Run("facets", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Facets: []searchindex.FacetRequest{{Field: "category", Size: 10}}, + Limit: 10, + }) + require.NoError(t, err) + facet, ok := result.Facets["category"] + require.True(t, ok, "expected category facet") + assert.GreaterOrEqual(t, len(facet.Values), 2, "expected at least 2 facet values") + }) + + t.Run("search hit identity", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "running shoes", + Limit: 1, + }) + require.NoError(t, err) + require.NotEmpty(t, result.Hits, "expected at least 1 hit") + hit := result.Hits[0] + assert.Equal(t, "Product", hit.Identity.TypeName) + assert.Equal(t, "Product", hit.Representation["__typename"]) + }) + + t.Run("type name filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TypeName: "Product", + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, result.TotalCount, "expected 4 products") + }) + + t.Run("prefix filter is unsupported", func(t *testing.T) { + _, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Prefix: &searchindex.PrefixFilter{Field: "category", Value: "Foot"}, + }, + Limit: 10, + }) + require.Error(t, err, "prefix filter should return an error in Meilisearch") + assert.Contains(t, err.Error(), "prefix filter is not supported") + }) + + t.Run("exists filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Exists: &searchindex.ExistsFilter{Field: "category"}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, result.TotalCount, "expected 4 hits where category exists") + }) + + t.Run("upsert overwrites", func(t *testing.T) { + // Re-index product id="1" with an updated name. + err := idx.IndexDocument(context.Background(), searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Trail Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }) + require.NoError(t, err, "upsert IndexDocument") + + // Total count should still be 4 (upsert, not insert). + allResult, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + require.NoError(t, err) + assert.Equal(t, 4, allResult.TotalCount, "expected 4 documents after upsert (not 5)") + + // Searching for "trail" should find the updated document. + trailResult, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "trail", + Limit: 10, + }) + require.NoError(t, err) + require.NotEmpty(t, trailResult.Hits, "expected at least 1 hit for 'trail'") + + found := false + for _, hit := range trailResult.Hits { + if hit.Representation["name"] == "Trail Running Shoes" { + found = true + break + } + } + assert.True(t, found, "expected to find 'Trail Running Shoes' in search results") + }) + }) + + t.Run("delete documents", func(t *testing.T) { + idx := newTestIndex(t, meiliHost) + populateTestData(t, idx) + + // Delete Running Shoes. + err := idx.DeleteDocument(context.Background(), searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }) + require.NoError(t, err, "DeleteDocument") + + // Should now have 3 documents. + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + require.NoError(t, err) + assert.Equal(t, 3, result.TotalCount, "expected 3 documents after delete") + }) + + t.Run("batch delete documents", func(t *testing.T) { + idx := newTestIndex(t, meiliHost) + populateTestData(t, idx) + + // Delete two documents. + err := idx.DeleteDocuments(context.Background(), []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + }) + require.NoError(t, err, "DeleteDocuments") + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + require.NoError(t, err) + assert.Equal(t, 2, result.TotalCount, "expected 2 documents after batch delete") + }) + + t.Run("index single document", func(t *testing.T) { + idx := newTestIndex(t, meiliHost) + + err := idx.IndexDocument(context.Background(), searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "100"}}, + Fields: map[string]any{"name": "Single Item", "category": "Test", "price": 9.99, "inStock": true}, + }) + require.NoError(t, err, "IndexDocument") + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "Single Item", + Limit: 10, + }) + require.NoError(t, err) + assert.GreaterOrEqual(t, result.TotalCount, 1, "expected at least 1 hit") + }) +} + +func TestDocumentID(t *testing.T) { + id := documentID(searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123", "sku": "ABC"}, + }) + // Keys should be sorted alphabetically, using Meilisearch-safe characters. + expected := "Product_id-123_sku-ABC" + assert.Equal(t, expected, id) +} + +func TestFilterTranslation(t *testing.T) { + tests := []struct { + name string + filter *searchindex.Filter + expected string + }{ + { + name: "term string", + filter: &searchindex.Filter{Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + expected: `category = "Footwear"`, + }, + { + name: "term numeric", + filter: &searchindex.Filter{Term: &searchindex.TermFilter{Field: "price", Value: 42.5}}, + expected: `price = 42.5`, + }, + { + name: "term bool", + filter: &searchindex.Filter{Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + expected: `inStock = true`, + }, + { + name: "terms IN", + filter: &searchindex.Filter{Terms: &searchindex.TermsFilter{ + Field: "category", Values: []any{"Footwear", "Accessories"}, + }}, + expected: `category IN ["Footwear", "Accessories"]`, + }, + { + name: "range GTE and LTE", + filter: &searchindex.Filter{Range: &searchindex.RangeFilter{ + Field: "price", GTE: 10.0, LTE: 100.0, + }}, + expected: `price >= 10 AND price <= 100`, + }, + { + name: "exists", + filter: &searchindex.Filter{Exists: &searchindex.ExistsFilter{Field: "category"}}, + expected: `category EXISTS`, + }, + { + name: "AND", + filter: &searchindex.Filter{And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }}, + expected: `(category = "Footwear") AND (inStock = true)`, + }, + { + name: "OR", + filter: &searchindex.Filter{Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + }}, + expected: `(category = "Footwear") OR (category = "Accessories")`, + }, + { + name: "NOT", + filter: &searchindex.Filter{Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }}, + expected: `NOT (category = "Footwear")`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := translateFilter(tt.filter) + require.NoError(t, err) + assert.Equal(t, tt.expected, result) + }) + } +} diff --git a/v2/pkg/searchindex/pgvector/pgvector.go b/v2/pkg/searchindex/pgvector/pgvector.go new file mode 100644 index 0000000000..35d215a7e2 --- /dev/null +++ b/v2/pkg/searchindex/pgvector/pgvector.go @@ -0,0 +1,1388 @@ +// Package pgvector implements the searchindex.Index and searchindex.IndexFactory +// interfaces for PostgreSQL with pgvector. +// +// It uses only database/sql from the standard library -- no external pgvector +// SDK or driver is imported. The caller is responsible for registering a +// PostgreSQL driver (e.g. "github.com/lib/pq") and providing an open *sql.DB. +// +// Supports: vector search (pgvector <=> operator), full-text (tsvector/tsquery), +// CTE-based hybrid search with Reciprocal Rank Fusion (RRF). +// Filter translation: searchindex.Filter -> SQL WHERE clauses with parameterized queries. +package pgvector + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "sort" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Compile-time interface conformance checks. +var ( + _ searchindex.Index = (*Index)(nil) + _ searchindex.IndexFactory = (*Factory)(nil) +) + +// defaultTablePrefix is used when Config.TablePrefix is empty. +const defaultTablePrefix = "searchindex_" + +// defaultRRFConstant is the k parameter in Reciprocal Rank Fusion: 1/(k+rank). +const defaultRRFConstant = 60 + +// Config holds pgvector-specific configuration. +type Config struct { + TablePrefix string `json:"table_prefix,omitempty"` +} + +// Factory implements searchindex.IndexFactory for pgvector. +// It holds a reference to a *sql.DB that must already be connected with a +// PostgreSQL driver registered by the caller. +type Factory struct { + DB *sql.DB +} + +// NewFactory returns a new pgvector IndexFactory backed by the given database +// connection. The caller is responsible for importing a PostgreSQL driver +// (e.g. _ "github.com/lib/pq") and opening the *sql.DB. +func NewFactory(db *sql.DB) *Factory { + return &Factory{DB: db} +} + +// CreateIndex creates a new index backed by a PostgreSQL table. It executes +// DDL statements to create the pgvector extension (if not present), the table, +// and appropriate indexes (GIN for tsvector, HNSW for vector columns, B-tree +// for filterable scalar columns). +func (f *Factory) CreateIndex(ctx context.Context, name string, schema searchindex.IndexConfig, configJSON []byte) (searchindex.Index, error) { + var cfg Config + if len(configJSON) > 0 { + if err := json.Unmarshal(configJSON, &cfg); err != nil { + return nil, fmt.Errorf("pgvector: invalid config: %w", err) + } + } + + prefix := cfg.TablePrefix + if prefix == "" { + prefix = defaultTablePrefix + } + + tableName := prefix + sanitizeIdentifier(name) + + idx := &Index{ + db: f.DB, + name: name, + tableName: tableName, + schema: schema, + prefix: prefix, + } + + // Classify fields by type for later use. + idx.classifyFields() + + if err := idx.createTable(ctx); err != nil { + return nil, err + } + + return idx, nil +} + +// Index implements searchindex.Index backed by a PostgreSQL table with pgvector. +type Index struct { + db *sql.DB + name string + tableName string + schema searchindex.IndexConfig + prefix string + + // Cached field classifications. + textFields []string // field names of type TEXT + vectorFields map[string]int // field name -> dimensions + allFieldNames []string // all user-defined field names in schema order + fieldTypes map[string]fieldDef // field name -> definition +} + +// fieldDef stores the type information for a single field. +type fieldDef struct { + config searchindex.FieldConfig + colSQL string // SQL column type +} + +// classifyFields pre-computes field classifications from the schema. +func (idx *Index) classifyFields() { + idx.vectorFields = make(map[string]int) + idx.fieldTypes = make(map[string]fieldDef, len(idx.schema.Fields)) + + for _, fc := range idx.schema.Fields { + colSQL := fieldColumnType(fc) + idx.fieldTypes[fc.Name] = fieldDef{config: fc, colSQL: colSQL} + idx.allFieldNames = append(idx.allFieldNames, fc.Name) + + switch fc.Type { + case searchindex.FieldTypeText: + idx.textFields = append(idx.textFields, fc.Name) + case searchindex.FieldTypeVector: + idx.vectorFields[fc.Name] = fc.Dimensions + } + } +} + +// fieldColumnType returns the SQL column type string for a FieldConfig. +func fieldColumnType(fc searchindex.FieldConfig) string { + switch fc.Type { + case searchindex.FieldTypeText: + return "TEXT" + case searchindex.FieldTypeKeyword: + return "TEXT" + case searchindex.FieldTypeNumeric: + return "DOUBLE PRECISION" + case searchindex.FieldTypeBool: + return "BOOLEAN" + case searchindex.FieldTypeVector: + return fmt.Sprintf("vector(%d)", fc.Dimensions) + case searchindex.FieldTypeGeo: + // Store geo as JSONB for now; PostGIS support can be added later. + return "JSONB" + case searchindex.FieldTypeDate: + return "DATE" + case searchindex.FieldTypeDateTime: + return "TIMESTAMPTZ" + default: + return "TEXT" + } +} + +// createTable executes DDL to create the extension, table, and indexes. +func (idx *Index) createTable(ctx context.Context) error { + // Enable pgvector extension. + if _, err := idx.db.ExecContext(ctx, "CREATE EXTENSION IF NOT EXISTS vector"); err != nil { + return fmt.Errorf("pgvector: create extension: %w", err) + } + + // Build CREATE TABLE statement. + var sb strings.Builder + sb.WriteString(fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (\n", quoteIdent(idx.tableName))) + sb.WriteString(" doc_id TEXT PRIMARY KEY,\n") + sb.WriteString(" type_name TEXT NOT NULL,\n") + sb.WriteString(" key_fields_json TEXT NOT NULL") + + for _, fc := range idx.schema.Fields { + def := idx.fieldTypes[fc.Name] + sb.WriteString(fmt.Sprintf(",\n %s %s", quoteIdent(fc.Name), def.colSQL)) + } + + // Add tsvector column for full-text search if there are text fields. + if len(idx.textFields) > 0 { + sb.WriteString(",\n tsv tsvector") + } + + sb.WriteString("\n)") + + if _, err := idx.db.ExecContext(ctx, sb.String()); err != nil { + return fmt.Errorf("pgvector: create table: %w", err) + } + + // Create indexes. + if err := idx.createIndexes(ctx); err != nil { + return err + } + + return nil +} + +// createIndexes creates supporting indexes on the table. +func (idx *Index) createIndexes(ctx context.Context) error { + // GIN index on tsvector column for full-text search. + if len(idx.textFields) > 0 { + ginSQL := fmt.Sprintf( + "CREATE INDEX IF NOT EXISTS %s ON %s USING GIN (tsv)", + quoteIdent(idx.tableName+"_tsv_idx"), + quoteIdent(idx.tableName), + ) + if _, err := idx.db.ExecContext(ctx, ginSQL); err != nil { + return fmt.Errorf("pgvector: create GIN index: %w", err) + } + } + + // HNSW indexes on vector columns. + for fieldName := range idx.vectorFields { + hnswSQL := fmt.Sprintf( + "CREATE INDEX IF NOT EXISTS %s ON %s USING hnsw (%s vector_cosine_ops)", + quoteIdent(idx.tableName+"_"+fieldName+"_hnsw_idx"), + quoteIdent(idx.tableName), + quoteIdent(fieldName), + ) + if _, err := idx.db.ExecContext(ctx, hnswSQL); err != nil { + return fmt.Errorf("pgvector: create HNSW index on %s: %w", fieldName, err) + } + } + + // B-tree indexes on filterable/sortable scalar columns. + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeVector { + continue + } + if fc.Filterable || fc.Sortable { + btreeSQL := fmt.Sprintf( + "CREATE INDEX IF NOT EXISTS %s ON %s (%s)", + quoteIdent(idx.tableName+"_"+fc.Name+"_idx"), + quoteIdent(idx.tableName), + quoteIdent(fc.Name), + ) + if _, err := idx.db.ExecContext(ctx, btreeSQL); err != nil { + return fmt.Errorf("pgvector: create B-tree index on %s: %w", fc.Name, err) + } + } + } + + // B-tree index on type_name for type filtering. + typeIdx := fmt.Sprintf( + "CREATE INDEX IF NOT EXISTS %s ON %s (type_name)", + quoteIdent(idx.tableName+"_type_name_idx"), + quoteIdent(idx.tableName), + ) + if _, err := idx.db.ExecContext(ctx, typeIdx); err != nil { + return fmt.Errorf("pgvector: create type_name index: %w", err) + } + + return nil +} + +// documentID computes a deterministic string ID from a DocumentIdentity. +// Format: TypeName:key1=val1,key2=val2,... (keys sorted alphabetically). +// This matches the convention used by the Bleve backend. +func documentID(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return id.TypeName + } + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(id.TypeName) + b.WriteByte(':') + for i, k := range keys { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(k) + b.WriteByte('=') + fmt.Fprintf(&b, "%v", id.KeyFields[k]) + } + return b.String() +} + +// IndexDocument indexes a single document. +func (idx *Index) IndexDocument(ctx context.Context, doc searchindex.EntityDocument) error { + return idx.IndexDocuments(ctx, []searchindex.EntityDocument{doc}) +} + +// IndexDocuments indexes a batch of documents using upserts. +func (idx *Index) IndexDocuments(ctx context.Context, docs []searchindex.EntityDocument) error { + if len(docs) == 0 { + return nil + } + + tx, err := idx.db.BeginTx(ctx, nil) + if err != nil { + return fmt.Errorf("pgvector: begin tx: %w", err) + } + defer tx.Rollback() //nolint:errcheck + + for _, doc := range docs { + if err := idx.upsertDocument(ctx, tx, doc); err != nil { + return err + } + } + + if err := tx.Commit(); err != nil { + return fmt.Errorf("pgvector: commit: %w", err) + } + return nil +} + +// upsertDocument performs INSERT ... ON CONFLICT DO UPDATE for a single document. +func (idx *Index) upsertDocument(ctx context.Context, tx *sql.Tx, doc searchindex.EntityDocument) error { + docID := documentID(doc.Identity) + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return fmt.Errorf("pgvector: marshal key fields: %w", err) + } + + // Build column list and value placeholders. + columns := []string{"doc_id", "type_name", "key_fields_json"} + args := []any{docID, doc.Identity.TypeName, string(keyFieldsJSON)} + paramIdx := 4 // next parameter index + + for _, fieldName := range idx.allFieldNames { + columns = append(columns, quoteIdent(fieldName)) + + def := idx.fieldTypes[fieldName] + if def.config.Type == searchindex.FieldTypeVector { + // Vector field: format as pgvector string. + if vec, ok := doc.Vectors[fieldName]; ok { + args = append(args, formatVector(vec)) + } else { + args = append(args, nil) + } + } else { + // Scalar field. + if val, ok := doc.Fields[fieldName]; ok { + args = append(args, val) + } else { + args = append(args, nil) + } + } + paramIdx++ + } + + // Add tsvector column if there are text fields. + hasTSV := len(idx.textFields) > 0 + if hasTSV { + columns = append(columns, "tsv") + // Build tsvector from text fields. + tsvParts := make([]string, 0, len(idx.textFields)) + for _, tf := range idx.textFields { + if val, ok := doc.Fields[tf]; ok { + tsvParts = append(tsvParts, fmt.Sprintf("%v", val)) + } + } + tsvText := strings.Join(tsvParts, " ") + args = append(args, tsvText) + } + + // Build placeholders. + placeholders := make([]string, len(args)) + for i := range args { + if i < 3 { + // First 3 are plain columns. + placeholders[i] = fmt.Sprintf("$%d", i+1) + } else if hasTSV && i == len(args)-1 { + // Last arg is the tsvector text; wrap in to_tsvector. + placeholders[i] = fmt.Sprintf("to_tsvector('english', $%d)", i+1) + } else { + // Check if this is a vector field. + fieldIdx := i - 3 // index into allFieldNames + if fieldIdx >= 0 && fieldIdx < len(idx.allFieldNames) { + def := idx.fieldTypes[idx.allFieldNames[fieldIdx]] + if def.config.Type == searchindex.FieldTypeVector { + placeholders[i] = fmt.Sprintf("$%d::vector", i+1) + } else { + placeholders[i] = fmt.Sprintf("$%d", i+1) + } + } else { + placeholders[i] = fmt.Sprintf("$%d", i+1) + } + } + } + + // Build UPDATE SET clause (exclude doc_id). + setClauses := make([]string, 0, len(columns)-1) + for i := 1; i < len(columns); i++ { + setClauses = append(setClauses, fmt.Sprintf("%s = EXCLUDED.%s", columns[i], columns[i])) + } + + query := fmt.Sprintf( + "INSERT INTO %s (%s) VALUES (%s) ON CONFLICT (doc_id) DO UPDATE SET %s", + quoteIdent(idx.tableName), + strings.Join(columns, ", "), + strings.Join(placeholders, ", "), + strings.Join(setClauses, ", "), + ) + + if _, err := tx.ExecContext(ctx, query, args...); err != nil { + return fmt.Errorf("pgvector: upsert document %q: %w", docID, err) + } + return nil +} + +// formatVector formats a float32 slice as a pgvector literal string, e.g. "[0.1,0.2,0.3]". +func formatVector(vec []float32) string { + if len(vec) == 0 { + return "[]" + } + var sb strings.Builder + sb.WriteByte('[') + for i, v := range vec { + if i > 0 { + sb.WriteByte(',') + } + fmt.Fprintf(&sb, "%g", v) + } + sb.WriteByte(']') + return sb.String() +} + +// DeleteDocument deletes a single document by identity. +func (idx *Index) DeleteDocument(ctx context.Context, id searchindex.DocumentIdentity) error { + return idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{id}) +} + +// DeleteDocuments deletes a batch of documents by identity. +func (idx *Index) DeleteDocuments(ctx context.Context, ids []searchindex.DocumentIdentity) error { + if len(ids) == 0 { + return nil + } + + if len(ids) == 1 { + docID := documentID(ids[0]) + query := fmt.Sprintf("DELETE FROM %s WHERE doc_id = $1", quoteIdent(idx.tableName)) + if _, err := idx.db.ExecContext(ctx, query, docID); err != nil { + return fmt.Errorf("pgvector: delete document %q: %w", docID, err) + } + return nil + } + + // Batch delete using ANY. + docIDs := make([]string, len(ids)) + for i, id := range ids { + docIDs[i] = documentID(id) + } + + // Build an array literal for use with ANY. + query := fmt.Sprintf("DELETE FROM %s WHERE doc_id = ANY($1::text[])", quoteIdent(idx.tableName)) + arrayLiteral := formatTextArray(docIDs) + if _, err := idx.db.ExecContext(ctx, query, arrayLiteral); err != nil { + return fmt.Errorf("pgvector: batch delete: %w", err) + } + return nil +} + +// formatTextArray formats a string slice as a PostgreSQL text array literal. +func formatTextArray(vals []string) string { + var sb strings.Builder + sb.WriteByte('{') + for i, v := range vals { + if i > 0 { + sb.WriteByte(',') + } + sb.WriteByte('"') + // Escape backslashes and double quotes. + for _, c := range v { + if c == '\\' || c == '"' { + sb.WriteByte('\\') + } + sb.WriteRune(c) + } + sb.WriteByte('"') + } + sb.WriteByte('}') + return sb.String() +} + +// Search performs a search query and returns results. +func (idx *Index) Search(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + hasText := req.TextQuery != "" + hasVector := len(req.Vector) > 0 + + switch { + case hasText && hasVector: + return idx.hybridSearch(ctx, req) + case hasVector: + return idx.vectorSearch(ctx, req) + default: + // Text-only or filter-only search. + return idx.textSearch(ctx, req) + } +} + +// textSearch handles text-only and filter-only queries. +func (idx *Index) textSearch(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + var ( + qb queryBuilder + selCols []string + orderBy []string + hasText = req.TextQuery != "" + ) + + // SELECT clause. + selCols = append(selCols, "doc_id", "type_name", "key_fields_json") + for _, fn := range idx.allFieldNames { + def := idx.fieldTypes[fn] + if def.config.Type == searchindex.FieldTypeVector { + continue // Don't select vector columns in text results. + } + selCols = append(selCols, quoteIdent(fn)) + } + + if hasText { + tsvQuery := idx.buildTSVQuery(req) + p := qb.addParam(req.TextQuery) + selCols = append(selCols, fmt.Sprintf("ts_rank(tsv, %s) AS score", tsvQuery)) + qb.addWhere(fmt.Sprintf("tsv @@ %s", tsvQuery)) + _ = p // param already added + if len(req.Sort) == 0 { + orderBy = append(orderBy, "score DESC") + } + } else { + selCols = append(selCols, "0::float AS score") + } + + // Type filter. + if req.TypeName != "" { + p := qb.addParam(req.TypeName) + qb.addWhere(fmt.Sprintf("type_name = %s", p)) + } + + // Structured filter. + if req.Filter != nil { + filterSQL, err := idx.translateFilter(req.Filter, &qb) + if err != nil { + return nil, err + } + if filterSQL != "" { + qb.addWhere(filterSQL) + } + } + + // Sorting. + isCursorMode := len(req.SearchAfter) > 0 || len(req.SearchBefore) > 0 + isBackward := len(req.SearchBefore) > 0 + + for _, sf := range req.Sort { + dir := "ASC" + if !sf.Ascending { + dir = "DESC" + } + if isBackward { + if dir == "ASC" { + dir = "DESC" + } else { + dir = "ASC" + } + } + orderBy = append(orderBy, fmt.Sprintf("%s %s", quoteIdent(sf.Field), dir)) + } + + // Cursor-based keyset WHERE clause. + if isCursorMode && len(req.Sort) > 0 { + cursorVals := req.SearchAfter + if isBackward { + cursorVals = req.SearchBefore + } + if len(cursorVals) > 0 { + op := ">" + if isBackward { + op = "<" + } + p := qb.addParam(cursorVals[0]) + qb.addWhere(fmt.Sprintf("%s %s %s", quoteIdent(req.Sort[0].Field), op, p)) + } + } + + // Build the main query. + mainQuery := fmt.Sprintf("SELECT %s FROM %s", strings.Join(selCols, ", "), quoteIdent(idx.tableName)) + if len(qb.wheres) > 0 { + mainQuery += " WHERE " + strings.Join(qb.wheres, " AND ") + } + if len(orderBy) > 0 { + mainQuery += " ORDER BY " + strings.Join(orderBy, ", ") + } + + // Count query. + countQuery := fmt.Sprintf("SELECT COUNT(*) FROM %s", quoteIdent(idx.tableName)) + if len(qb.wheres) > 0 { + countQuery += " WHERE " + strings.Join(qb.wheres, " AND ") + } + + // Pagination. + limit := effectiveLimit(req.Limit) + mainQuery += fmt.Sprintf(" LIMIT %d", limit) + if !isCursorMode && req.Offset > 0 { + mainQuery += fmt.Sprintf(" OFFSET %d", req.Offset) + } + + // Execute count query. + var totalCount int + if err := idx.db.QueryRowContext(ctx, countQuery, qb.params...).Scan(&totalCount); err != nil { + return nil, fmt.Errorf("pgvector: count query: %w", err) + } + + // Execute main query. + rows, err := idx.db.QueryContext(ctx, mainQuery, qb.params...) + if err != nil { + return nil, fmt.Errorf("pgvector: search query: %w", err) + } + defer rows.Close() + + hits, err := idx.scanHits(rows) + if err != nil { + return nil, err + } + + // Populate SortValues from sort fields for cursor-based pagination. + if len(req.Sort) > 0 { + for i := range hits { + sortVals := make([]string, 0, len(req.Sort)) + for _, sf := range req.Sort { + if v, ok := hits[i].Representation[sf.Field]; ok { + sortVals = append(sortVals, fmt.Sprintf("%v", v)) + } + } + hits[i].SortValues = sortVals + } + } + + result := &searchindex.SearchResult{ + Hits: hits, + TotalCount: totalCount, + } + + // Facets. + if len(req.Facets) > 0 { + facets, err := idx.executeFacets(ctx, req, &qb) + if err != nil { + return nil, err + } + result.Facets = facets + } + + return result, nil +} + +// buildTSVQuery builds the tsquery expression based on text fields configuration. +func (idx *Index) buildTSVQuery(req searchindex.SearchRequest) string { + return "plainto_tsquery('english', $1)" +} + +// vectorSearch handles vector-only queries using the <=> distance operator. +func (idx *Index) vectorSearch(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + vectorField := req.VectorField + if vectorField == "" { + // Use first vector field if not specified. + for fn := range idx.vectorFields { + vectorField = fn + break + } + } + if vectorField == "" { + return nil, fmt.Errorf("pgvector: no vector field available for vector search") + } + + var ( + qb queryBuilder + selCols []string + orderBy []string + ) + + // Format vector as a SQL literal rather than a parameter placeholder. + // pgvector values must be inlined because lib/pq cannot determine the + // type of a $N placeholder used with the <=> operator, and count queries + // that don't reference the vector column would receive extra parameters. + vecLiteral := fmt.Sprintf("'%s'::vector", formatVector(req.Vector)) + + selCols = append(selCols, "doc_id", "type_name", "key_fields_json") + for _, fn := range idx.allFieldNames { + def := idx.fieldTypes[fn] + if def.config.Type == searchindex.FieldTypeVector { + continue + } + selCols = append(selCols, quoteIdent(fn)) + } + selCols = append(selCols, fmt.Sprintf("%s <=> %s AS distance", quoteIdent(vectorField), vecLiteral)) + + // Type filter. + if req.TypeName != "" { + p := qb.addParam(req.TypeName) + qb.addWhere(fmt.Sprintf("type_name = %s", p)) + } + + // Structured filter. + if req.Filter != nil { + filterSQL, err := idx.translateFilter(req.Filter, &qb) + if err != nil { + return nil, err + } + if filterSQL != "" { + qb.addWhere(filterSQL) + } + } + + // For vector search, default order is by distance ASC. + if len(req.Sort) == 0 { + orderBy = append(orderBy, "distance ASC") + } else { + for _, sf := range req.Sort { + dir := "ASC" + if !sf.Ascending { + dir = "DESC" + } + orderBy = append(orderBy, fmt.Sprintf("%s %s", quoteIdent(sf.Field), dir)) + } + } + + mainQuery := fmt.Sprintf("SELECT %s FROM %s", strings.Join(selCols, ", "), quoteIdent(idx.tableName)) + if len(qb.wheres) > 0 { + mainQuery += " WHERE " + strings.Join(qb.wheres, " AND ") + } + mainQuery += " ORDER BY " + strings.Join(orderBy, ", ") + + // Count query. + countQuery := fmt.Sprintf("SELECT COUNT(*) FROM %s", quoteIdent(idx.tableName)) + if len(qb.wheres) > 0 { + countQuery += " WHERE " + strings.Join(qb.wheres, " AND ") + } + + limit := effectiveLimit(req.Limit) + mainQuery += fmt.Sprintf(" LIMIT %d", limit) + if req.Offset > 0 { + mainQuery += fmt.Sprintf(" OFFSET %d", req.Offset) + } + + var totalCount int + if err := idx.db.QueryRowContext(ctx, countQuery, qb.params...).Scan(&totalCount); err != nil { + return nil, fmt.Errorf("pgvector: vector count query: %w", err) + } + + rows, err := idx.db.QueryContext(ctx, mainQuery, qb.params...) + if err != nil { + return nil, fmt.Errorf("pgvector: vector search query: %w", err) + } + defer rows.Close() + + hits, err := idx.scanVectorHits(rows) + if err != nil { + return nil, err + } + + result := &searchindex.SearchResult{ + Hits: hits, + TotalCount: totalCount, + } + + if len(req.Facets) > 0 { + facets, err := idx.executeFacets(ctx, req, &qb) + if err != nil { + return nil, err + } + result.Facets = facets + } + + return result, nil +} + +// hybridSearch performs CTE-based Reciprocal Rank Fusion combining text and vector results. +func (idx *Index) hybridSearch(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + vectorField := req.VectorField + if vectorField == "" { + for fn := range idx.vectorFields { + vectorField = fn + break + } + } + if vectorField == "" { + return nil, fmt.Errorf("pgvector: no vector field available for hybrid search") + } + + var qb queryBuilder + limit := effectiveLimit(req.Limit) + k := defaultRRFConstant + + // Parameter for text query. + textParam := qb.addParam(req.TextQuery) + // Format vector as a SQL literal rather than a parameter placeholder. + // pgvector values must be inlined because lib/pq cannot determine the + // type of a $N placeholder used with the <=> operator, and count queries + // that don't reference the vector column would receive extra parameters. + vecLiteral := fmt.Sprintf("'%s'::vector", formatVector(req.Vector)) + + // Build filter WHERE clause (shared by both CTEs). + var filterWhere string + if req.TypeName != "" { + p := qb.addParam(req.TypeName) + filterWhere = fmt.Sprintf("type_name = %s", p) + } + if req.Filter != nil { + filterSQL, err := idx.translateFilter(req.Filter, &qb) + if err != nil { + return nil, err + } + if filterSQL != "" { + if filterWhere != "" { + filterWhere += " AND " + filterSQL + } else { + filterWhere = filterSQL + } + } + } + + textWhere := fmt.Sprintf("tsv @@ plainto_tsquery('english', %s)", textParam) + if filterWhere != "" { + textWhere += " AND " + filterWhere + } + + vecWhere := filterWhere + + // Build the CTE-based RRF query. + // text_results: ranked by ts_rank descending. + // vec_results: ranked by distance ascending (closest first). + // Combined using RRF: score = 1/(k+rank). + // + // We use ROW_NUMBER() for ranking within each CTE. + rrfLimit := limit * 3 // Fetch more candidates from each CTE for better fusion. + if rrfLimit < 100 { + rrfLimit = 100 + } + + var sb strings.Builder + sb.WriteString("WITH text_results AS (\n") + sb.WriteString(fmt.Sprintf(" SELECT doc_id, ROW_NUMBER() OVER (ORDER BY ts_rank(tsv, plainto_tsquery('english', %s)) DESC) AS rank\n", textParam)) + sb.WriteString(fmt.Sprintf(" FROM %s\n", quoteIdent(idx.tableName))) + sb.WriteString(fmt.Sprintf(" WHERE %s\n", textWhere)) + sb.WriteString(fmt.Sprintf(" LIMIT %d\n", rrfLimit)) + sb.WriteString("),\nvec_results AS (\n") + sb.WriteString(fmt.Sprintf(" SELECT doc_id, ROW_NUMBER() OVER (ORDER BY %s <=> %s ASC) AS rank\n", quoteIdent(vectorField), vecLiteral)) + sb.WriteString(fmt.Sprintf(" FROM %s\n", quoteIdent(idx.tableName))) + if vecWhere != "" { + sb.WriteString(fmt.Sprintf(" WHERE %s\n", vecWhere)) + } + sb.WriteString(fmt.Sprintf(" LIMIT %d\n", rrfLimit)) + sb.WriteString("),\ncombined AS (\n") + sb.WriteString(" SELECT COALESCE(t.doc_id, v.doc_id) AS doc_id,\n") + sb.WriteString(fmt.Sprintf(" COALESCE(1.0/(%d + t.rank), 0) + COALESCE(1.0/(%d + v.rank), 0) AS rrf_score\n", k, k)) + sb.WriteString(" FROM text_results t\n") + sb.WriteString(" FULL OUTER JOIN vec_results v ON t.doc_id = v.doc_id\n") + sb.WriteString(")\n") + + // Select combined results joined back to the main table. + selCols := []string{"m.doc_id", "m.type_name", "m.key_fields_json"} + for _, fn := range idx.allFieldNames { + def := idx.fieldTypes[fn] + if def.config.Type == searchindex.FieldTypeVector { + continue + } + selCols = append(selCols, "m."+quoteIdent(fn)) + } + selCols = append(selCols, "c.rrf_score AS score") + + sb.WriteString(fmt.Sprintf("SELECT %s\n", strings.Join(selCols, ", "))) + sb.WriteString(fmt.Sprintf("FROM combined c JOIN %s m ON c.doc_id = m.doc_id\n", quoteIdent(idx.tableName))) + + // Sorting. + if len(req.Sort) > 0 { + sortClauses := make([]string, 0, len(req.Sort)) + for _, sf := range req.Sort { + dir := "ASC" + if !sf.Ascending { + dir = "DESC" + } + sortClauses = append(sortClauses, fmt.Sprintf("m.%s %s", quoteIdent(sf.Field), dir)) + } + sb.WriteString("ORDER BY " + strings.Join(sortClauses, ", ") + "\n") + } else { + sb.WriteString("ORDER BY c.rrf_score DESC\n") + } + + sb.WriteString(fmt.Sprintf("LIMIT %d", limit)) + if req.Offset > 0 { + sb.WriteString(fmt.Sprintf(" OFFSET %d", req.Offset)) + } + + mainQuery := sb.String() + + // Count query for hybrid: count the combined CTE. + var countSB strings.Builder + countSB.WriteString("WITH text_results AS (\n") + countSB.WriteString(fmt.Sprintf(" SELECT doc_id FROM %s WHERE %s LIMIT %d\n", quoteIdent(idx.tableName), textWhere, rrfLimit)) + countSB.WriteString("),\nvec_results AS (\n") + countSB.WriteString(fmt.Sprintf(" SELECT doc_id FROM %s", quoteIdent(idx.tableName))) + if vecWhere != "" { + countSB.WriteString(fmt.Sprintf(" WHERE %s", vecWhere)) + } + countSB.WriteString(fmt.Sprintf(" LIMIT %d\n", rrfLimit)) + countSB.WriteString(")\n") + countSB.WriteString("SELECT COUNT(DISTINCT doc_id) FROM (\n") + countSB.WriteString(" SELECT doc_id FROM text_results\n") + countSB.WriteString(" UNION\n") + countSB.WriteString(" SELECT doc_id FROM vec_results\n") + countSB.WriteString(") AS all_docs") + + var totalCount int + if err := idx.db.QueryRowContext(ctx, countSB.String(), qb.params...).Scan(&totalCount); err != nil { + return nil, fmt.Errorf("pgvector: hybrid count query: %w", err) + } + + rows, err := idx.db.QueryContext(ctx, mainQuery, qb.params...) + if err != nil { + return nil, fmt.Errorf("pgvector: hybrid search query: %w", err) + } + defer rows.Close() + + hits, err := idx.scanHits(rows) + if err != nil { + return nil, err + } + + result := &searchindex.SearchResult{ + Hits: hits, + TotalCount: totalCount, + } + + if len(req.Facets) > 0 { + facets, err := idx.executeFacets(ctx, req, &qb) + if err != nil { + return nil, err + } + result.Facets = facets + } + + return result, nil +} + +// scanHits scans rows from a text or hybrid search query into SearchHit slices. +// Expected columns: doc_id, type_name, key_fields_json, [scalar fields...], score. +func (idx *Index) scanHits(rows *sql.Rows) ([]searchindex.SearchHit, error) { + var hits []searchindex.SearchHit + + scalarFields := idx.scalarFieldNames() + + for rows.Next() { + var ( + docID string + typeName string + keyFieldsRaw string + score float64 + ) + + scanDest := make([]any, 0, 3+len(scalarFields)+1) + scanDest = append(scanDest, &docID, &typeName, &keyFieldsRaw) + + fieldPtrs := make([]*sql.NullString, len(scalarFields)) + for i := range scalarFields { + fieldPtrs[i] = &sql.NullString{} + scanDest = append(scanDest, fieldPtrs[i]) + } + scanDest = append(scanDest, &score) + + if err := rows.Scan(scanDest...); err != nil { + return nil, fmt.Errorf("pgvector: scan hit: %w", err) + } + + identity, err := parseIdentity(typeName, keyFieldsRaw) + if err != nil { + return nil, err + } + + representation := make(map[string]any, len(scalarFields)+2) + representation["__typename"] = typeName + for k, v := range identity.KeyFields { + representation[k] = v + } + + for i, fn := range scalarFields { + if fieldPtrs[i].Valid { + representation[fn] = idx.parseFieldValue(fn, fieldPtrs[i].String) + } + } + + hits = append(hits, searchindex.SearchHit{ + Identity: identity, + Score: score, + Representation: representation, + }) + } + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("pgvector: rows iteration: %w", err) + } + + if hits == nil { + hits = []searchindex.SearchHit{} + } + + return hits, nil +} + +// scanVectorHits scans rows from a vector search query into SearchHit slices. +// Expected columns: doc_id, type_name, key_fields_json, [scalar fields...], distance. +func (idx *Index) scanVectorHits(rows *sql.Rows) ([]searchindex.SearchHit, error) { + var hits []searchindex.SearchHit + + scalarFields := idx.scalarFieldNames() + + for rows.Next() { + var ( + docID string + typeName string + keyFieldsRaw string + distance float64 + ) + + scanDest := make([]any, 0, 3+len(scalarFields)+1) + scanDest = append(scanDest, &docID, &typeName, &keyFieldsRaw) + + fieldPtrs := make([]*sql.NullString, len(scalarFields)) + for i := range scalarFields { + fieldPtrs[i] = &sql.NullString{} + scanDest = append(scanDest, fieldPtrs[i]) + } + scanDest = append(scanDest, &distance) + + if err := rows.Scan(scanDest...); err != nil { + return nil, fmt.Errorf("pgvector: scan vector hit: %w", err) + } + + identity, err := parseIdentity(typeName, keyFieldsRaw) + if err != nil { + return nil, err + } + + representation := make(map[string]any, len(scalarFields)+2) + representation["__typename"] = typeName + for k, v := range identity.KeyFields { + representation[k] = v + } + + for i, fn := range scalarFields { + if fieldPtrs[i].Valid { + representation[fn] = idx.parseFieldValue(fn, fieldPtrs[i].String) + } + } + + hits = append(hits, searchindex.SearchHit{ + Identity: identity, + Score: 1.0 / (1.0 + distance), // Convert distance to similarity score. + Distance: distance, + Representation: representation, + }) + } + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("pgvector: vector rows iteration: %w", err) + } + + if hits == nil { + hits = []searchindex.SearchHit{} + } + + return hits, nil +} + +// scalarFieldNames returns the names of all non-vector fields in schema order. +func (idx *Index) scalarFieldNames() []string { + var names []string + for _, fn := range idx.allFieldNames { + if idx.fieldTypes[fn].config.Type != searchindex.FieldTypeVector { + names = append(names, fn) + } + } + return names +} + +// parseFieldValue converts a string value from the database to the appropriate Go type +// based on the field's type definition. +func (idx *Index) parseFieldValue(fieldName string, raw string) any { + def, ok := idx.fieldTypes[fieldName] + if !ok { + return raw + } + + switch def.config.Type { + case searchindex.FieldTypeNumeric: + var f float64 + if _, err := fmt.Sscanf(raw, "%f", &f); err == nil { + return f + } + return raw + case searchindex.FieldTypeBool: + switch raw { + case "true", "t", "1": + return true + case "false", "f", "0": + return false + } + return raw + default: + return raw + } +} + +// parseIdentity reconstructs a DocumentIdentity from stored fields. +func parseIdentity(typeName, keyFieldsRaw string) (searchindex.DocumentIdentity, error) { + var keyFields map[string]any + if keyFieldsRaw != "" { + if err := json.Unmarshal([]byte(keyFieldsRaw), &keyFields); err != nil { + return searchindex.DocumentIdentity{}, fmt.Errorf("pgvector: unmarshal key fields: %w", err) + } + } + if keyFields == nil { + keyFields = make(map[string]any) + } + + return searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + }, nil +} + +// executeFacets runs facet aggregation queries and returns the results. +func (idx *Index) executeFacets(ctx context.Context, req searchindex.SearchRequest, baseQB *queryBuilder) (map[string]searchindex.FacetResult, error) { + facets := make(map[string]searchindex.FacetResult, len(req.Facets)) + + for _, fr := range req.Facets { + size := fr.Size + if size <= 0 { + size = 10 + } + + facetQuery := fmt.Sprintf( + "SELECT %s AS val, COUNT(*) AS cnt FROM %s", + quoteIdent(fr.Field), + quoteIdent(idx.tableName), + ) + if len(baseQB.wheres) > 0 { + facetQuery += " WHERE " + strings.Join(baseQB.wheres, " AND ") + } + facetQuery += fmt.Sprintf( + " GROUP BY %s ORDER BY cnt DESC LIMIT %d", + quoteIdent(fr.Field), + size, + ) + + rows, err := idx.db.QueryContext(ctx, facetQuery, baseQB.params...) + if err != nil { + return nil, fmt.Errorf("pgvector: facet query for %s: %w", fr.Field, err) + } + + var values []searchindex.FacetValue + for rows.Next() { + var ( + val sql.NullString + cnt int + ) + if err := rows.Scan(&val, &cnt); err != nil { + rows.Close() + return nil, fmt.Errorf("pgvector: scan facet: %w", err) + } + if val.Valid { + values = append(values, searchindex.FacetValue{ + Value: val.String, + Count: cnt, + }) + } + } + rows.Close() + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("pgvector: facet rows: %w", err) + } + + facets[fr.Field] = searchindex.FacetResult{Values: values} + } + + return facets, nil +} + +// translateFilter recursively converts a searchindex.Filter tree to SQL WHERE +// clause fragments using parameterized queries. +func (idx *Index) translateFilter(f *searchindex.Filter, qb *queryBuilder) (string, error) { + if f == nil { + return "", nil + } + + // AND + if len(f.And) > 0 { + parts := make([]string, 0, len(f.And)) + for _, child := range f.And { + s, err := idx.translateFilter(child, qb) + if err != nil { + return "", err + } + if s != "" { + parts = append(parts, s) + } + } + if len(parts) == 0 { + return "", nil + } + return "(" + strings.Join(parts, " AND ") + ")", nil + } + + // OR + if len(f.Or) > 0 { + parts := make([]string, 0, len(f.Or)) + for _, child := range f.Or { + s, err := idx.translateFilter(child, qb) + if err != nil { + return "", err + } + if s != "" { + parts = append(parts, s) + } + } + if len(parts) == 0 { + return "", nil + } + return "(" + strings.Join(parts, " OR ") + ")", nil + } + + // NOT + if f.Not != nil { + s, err := idx.translateFilter(f.Not, qb) + if err != nil { + return "", err + } + if s == "" { + return "", nil + } + return fmt.Sprintf("NOT (%s)", s), nil + } + + // Term + if f.Term != nil { + p := qb.addParam(f.Term.Value) + return fmt.Sprintf("%s = %s", quoteIdent(f.Term.Field), p), nil + } + + // Terms (IN) + if f.Terms != nil { + if len(f.Terms.Values) == 0 { + return "", nil + } + placeholders := make([]string, len(f.Terms.Values)) + for i, v := range f.Terms.Values { + placeholders[i] = qb.addParam(v) + } + return fmt.Sprintf("%s IN (%s)", quoteIdent(f.Terms.Field), strings.Join(placeholders, ", ")), nil + } + + // Range + if f.Range != nil { + return idx.translateRangeFilter(f.Range, qb) + } + + // Prefix: escape LIKE wildcards in the user value before appending %. + if f.Prefix != nil { + escaped := strings.NewReplacer("%", "\\%", "_", "\\_").Replace(f.Prefix.Value) + p := qb.addParam(escaped + "%") + return fmt.Sprintf("%s LIKE %s", quoteIdent(f.Prefix.Field), p), nil + } + + // Exists + if f.Exists != nil { + return fmt.Sprintf("%s IS NOT NULL", quoteIdent(f.Exists.Field)), nil + } + + return "", nil +} + +// translateRangeFilter converts a RangeFilter to SQL predicates. +func (idx *Index) translateRangeFilter(rf *searchindex.RangeFilter, qb *queryBuilder) (string, error) { + var parts []string + + if rf.GTE != nil { + p := qb.addParam(rf.GTE) + parts = append(parts, fmt.Sprintf("%s >= %s", quoteIdent(rf.Field), p)) + } else if rf.HasGT && rf.GT != nil { + p := qb.addParam(rf.GT) + parts = append(parts, fmt.Sprintf("%s > %s", quoteIdent(rf.Field), p)) + } + + if rf.LTE != nil { + p := qb.addParam(rf.LTE) + parts = append(parts, fmt.Sprintf("%s <= %s", quoteIdent(rf.Field), p)) + } else if rf.HasLT && rf.LT != nil { + p := qb.addParam(rf.LT) + parts = append(parts, fmt.Sprintf("%s < %s", quoteIdent(rf.Field), p)) + } + + if len(parts) == 0 { + return "", nil + } + return "(" + strings.Join(parts, " AND ") + ")", nil +} + +// Autocomplete returns terms from the index matching the given prefix. +// It splits text field values into words and returns distinct words matching the prefix. +func (idx *Index) Autocomplete(ctx context.Context, req searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + limit := req.Limit + if limit <= 0 { + limit = 10 + } + + prefix := strings.ToLower(req.Prefix) + + query := fmt.Sprintf( + `SELECT word, COUNT(*) AS cnt FROM ( + SELECT DISTINCT unnest(regexp_split_to_array(LOWER(%s), '\s+')) AS word + FROM %s + ) sub WHERE word LIKE $1 GROUP BY word ORDER BY cnt DESC, word ASC LIMIT $2`, + quoteIdent(req.Field), quoteIdent(idx.tableName)) + + rows, err := idx.db.QueryContext(ctx, query, prefix+"%", limit) + if err != nil { + return nil, fmt.Errorf("pgvector: autocomplete query: %w", err) + } + defer rows.Close() + + var terms []searchindex.AutocompleteTerm + for rows.Next() { + var t searchindex.AutocompleteTerm + if err := rows.Scan(&t.Term, &t.Count); err != nil { + return nil, fmt.Errorf("pgvector: scan autocomplete row: %w", err) + } + terms = append(terms, t) + } + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("pgvector: autocomplete rows: %w", err) + } + + return &searchindex.AutocompleteResult{Terms: terms}, nil +} + +// Close releases resources held by the index. The underlying *sql.DB is NOT +// closed since it is owned by the caller. +func (idx *Index) Close() error { + return nil +} + +// ---------- Helpers ---------- + +// queryBuilder tracks parameterized query state. +type queryBuilder struct { + params []any + wheres []string +} + +// addParam adds a parameter and returns its placeholder string ($N). +func (qb *queryBuilder) addParam(val any) string { + qb.params = append(qb.params, val) + return fmt.Sprintf("$%d", len(qb.params)) +} + +// addWhere adds a WHERE clause fragment. +func (qb *queryBuilder) addWhere(clause string) { + qb.wheres = append(qb.wheres, clause) +} + +// effectiveLimit returns a sensible default if limit is zero or negative. +func effectiveLimit(limit int) int { + if limit <= 0 { + return 10 + } + return limit +} + +// quoteIdent wraps a SQL identifier in double quotes to prevent injection +// and handle reserved words. Internal double quotes are doubled per SQL standard. +func quoteIdent(s string) string { + return `"` + strings.ReplaceAll(s, `"`, `""`) + `"` +} + +// sanitizeIdentifier removes characters not suitable for use in a PostgreSQL +// identifier. Only letters, digits, and underscores are kept. +func sanitizeIdentifier(s string) string { + var sb strings.Builder + for _, c := range s { + if (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' { + sb.WriteRune(c) + } + } + return sb.String() +} diff --git a/v2/pkg/searchindex/pgvector/pgvector_test.go b/v2/pkg/searchindex/pgvector/pgvector_test.go new file mode 100644 index 0000000000..1ef8d093d0 --- /dev/null +++ b/v2/pkg/searchindex/pgvector/pgvector_test.go @@ -0,0 +1,665 @@ +//go:build integration + +package pgvector + +import ( + "context" + "database/sql" + "fmt" + "testing" + "time" + + _ "github.com/lib/pq" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// startPgvectorContainer starts a PostgreSQL container with the pgvector extension +// and returns a connected *sql.DB along with a cleanup function. +func startPgvectorContainer(t *testing.T) *sql.DB { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "pgvector/pgvector:pg16", + ExposedPorts: []string{"5432/tcp"}, + Env: map[string]string{ + "POSTGRES_USER": "test", + "POSTGRES_PASSWORD": "test", + "POSTGRES_DB": "testdb", + }, + WaitingFor: wait.ForListeningPort("5432/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start pgvector container: %v", err) + } + t.Cleanup(func() { container.Terminate(ctx) }) + + host, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + + port, err := container.MappedPort(ctx, "5432") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + dsn := fmt.Sprintf("postgres://test:test@%s:%s/testdb?sslmode=disable", host, port.Port()) + + db, err := sql.Open("postgres", dsn) + if err != nil { + t.Fatalf("failed to open database: %v", err) + } + t.Cleanup(func() { db.Close() }) + + // Wait for the database to be ready. + for i := 0; i < 30; i++ { + if err := db.PingContext(ctx); err == nil { + break + } + time.Sleep(500 * time.Millisecond) + } + if err := db.PingContext(ctx); err != nil { + t.Fatalf("database not ready: %v", err) + } + + return db +} + +func newTestIndex(t *testing.T, db *sql.DB) searchindex.Index { + t.Helper() + factory := NewFactory(db) + schema := searchindex.IndexConfig{ + Name: "test", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + }, + } + idx, err := factory.CreateIndex(context.Background(), "test_products", schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func newTestIndexWithVectors(t *testing.T, db *sql.DB) searchindex.Index { + t.Helper() + factory := NewFactory(db) + schema := searchindex.IndexConfig{ + Name: "test_vec", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "embedding", Type: searchindex.FieldTypeVector, Dimensions: 3}, + }, + } + idx, err := factory.CreateIndex(context.Background(), "test_vectors", schema, nil) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func populateTestData(t *testing.T, idx searchindex.Index) { + t.Helper() + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + }, + } + if err := idx.IndexDocuments(context.Background(), docs); err != nil { + t.Fatalf("IndexDocuments: %v", err) + } +} + +func populateVectorData(t *testing.T, idx searchindex.Index) { + t.Helper() + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "category": "Footwear", "price": 89.99}, + Vectors: map[string][]float32{"embedding": {0.1, 0.2, 0.3}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "category": "Footwear", "price": 129.99}, + Vectors: map[string][]float32{"embedding": {0.15, 0.25, 0.35}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "category": "Accessories", "price": 35.00}, + Vectors: map[string][]float32{"embedding": {0.9, 0.8, 0.7}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "category": "Footwear", "price": 12.99}, + Vectors: map[string][]float32{"embedding": {0.12, 0.22, 0.32}}, + }, + } + if err := idx.IndexDocuments(context.Background(), docs); err != nil { + t.Fatalf("IndexDocuments: %v", err) + } +} + +func TestFullLifecycle(t *testing.T) { + db := startPgvectorContainer(t) + idx := newTestIndex(t, db) + populateTestData(t, idx) + + ctx := context.Background() + + t.Run("text search", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "shoes", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 2 { + t.Errorf("expected at least 2 hits for 'shoes', got %d", result.TotalCount) + } + }) + + t.Run("term filter on keyword field", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for category=Footwear, got %d", result.TotalCount) + } + }) + + t.Run("boolean filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 hit for inStock=false, got %d", result.TotalCount) + } + }) + + t.Run("numeric range filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: 30.0, + LTE: 100.0, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 2 { + t.Errorf("expected 2 hits for price 30-100, got %d", result.TotalCount) + } + }) + + t.Run("prefix filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Prefix: &searchindex.PrefixFilter{Field: "category", Value: "Foot"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for category prefix 'Foot', got %d", result.TotalCount) + } + }) + + t.Run("AND filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for Footwear AND inStock, got %d", result.TotalCount) + } + }) + + t.Run("OR filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for Footwear OR Accessories, got %d", result.TotalCount) + } + }) + + t.Run("NOT filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Errorf("expected 1 hit for NOT Footwear, got %d", result.TotalCount) + } + }) + + t.Run("sorting by price ascending", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 4 { + t.Fatalf("expected 4 hits, got %d", result.TotalCount) + } + if len(result.Hits) < 4 { + t.Fatalf("expected 4 hits in results, got %d", len(result.Hits)) + } + // First hit should be cheapest (Wool Socks at 12.99). + if result.Hits[0].Representation["name"] != "Wool Socks" { + t.Errorf("expected first hit to be Wool Socks (cheapest), got %v", result.Hits[0].Representation["name"]) + } + }) + + t.Run("pagination", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 2, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 2 { + t.Errorf("expected 2 hits with offset, got %d", len(result.Hits)) + } + // TotalCount should still reflect the full count. + if result.TotalCount != 4 { + t.Errorf("expected TotalCount=4, got %d", result.TotalCount) + } + }) + + t.Run("facets", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Facets: []searchindex.FacetRequest{{Field: "category", Size: 10}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + facet, ok := result.Facets["category"] + if !ok { + t.Fatal("expected category facet") + } + if len(facet.Values) < 2 { + t.Errorf("expected at least 2 facet values, got %d", len(facet.Values)) + } + }) + + t.Run("type name filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TypeName: "Product", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for TypeName=Product, got %d", result.TotalCount) + } + }) + + t.Run("search hit identity", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "running shoes", + Limit: 1, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit") + } + hit := result.Hits[0] + if hit.Identity.TypeName != "Product" { + t.Errorf("TypeName = %q, want %q", hit.Identity.TypeName, "Product") + } + if hit.Representation["__typename"] != "Product" { + t.Errorf("__typename = %v, want %q", hit.Representation["__typename"], "Product") + } + }) + + t.Run("text search with field restriction", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "shoes", + TextFields: []searchindex.TextFieldWeight{{Name: "name"}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 2 { + t.Errorf("expected at least 2 hits for 'shoes' with field restriction, got %d", result.TotalCount) + } + }) + + t.Run("terms filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"Footwear", "Accessories"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits for category IN (Footwear, Accessories), got %d", result.TotalCount) + } + }) + + t.Run("upsert overwrites existing document", func(t *testing.T) { + // Update Running Shoes price. + doc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 79.99, "inStock": true}, + } + if err := idx.IndexDocument(ctx, doc); err != nil { + t.Fatalf("IndexDocument (upsert): %v", err) + } + + // Total should still be 4. + result, err := idx.Search(ctx, searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits after upsert, got %d", result.TotalCount) + } + }) +} + +func TestDeleteDocument(t *testing.T) { + db := startPgvectorContainer(t) + idx := newTestIndex(t, db) + populateTestData(t, idx) + + ctx := context.Background() + + err := idx.DeleteDocument(ctx, searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }) + if err != nil { + t.Fatalf("DeleteDocument: %v", err) + } + + result, err := idx.Search(ctx, searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 documents after delete, got %d", result.TotalCount) + } +} + +func TestDeleteDocuments(t *testing.T) { + db := startPgvectorContainer(t) + idx := newTestIndex(t, db) + populateTestData(t, idx) + + ctx := context.Background() + + err := idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + }) + if err != nil { + t.Fatalf("DeleteDocuments: %v", err) + } + + result, err := idx.Search(ctx, searchindex.SearchRequest{Limit: 10}) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 2 { + t.Errorf("expected 2 documents after batch delete, got %d", result.TotalCount) + } +} + +func TestVectorSearch(t *testing.T) { + db := startPgvectorContainer(t) + idx := newTestIndexWithVectors(t, db) + populateVectorData(t, idx) + + ctx := context.Background() + + t.Run("nearest neighbor search", func(t *testing.T) { + // Query vector close to Running Shoes [0.1, 0.2, 0.3]. + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3}, + VectorField: "embedding", + Limit: 4, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 4 { + t.Errorf("expected 4 hits, got %d", result.TotalCount) + } + if len(result.Hits) < 1 { + t.Fatal("expected at least 1 hit") + } + // The closest vector should be Running Shoes (exact match). + if result.Hits[0].Identity.TypeName != "Product" { + t.Errorf("expected Product, got %s", result.Hits[0].Identity.TypeName) + } + if result.Hits[0].Distance < 0 { + t.Errorf("expected non-negative distance, got %f", result.Hits[0].Distance) + } + }) + + t.Run("vector search with filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3}, + VectorField: "embedding", + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 3 { + t.Errorf("expected 3 hits for Footwear, got %d", result.TotalCount) + } + }) +} + +func TestHybridSearch(t *testing.T) { + db := startPgvectorContainer(t) + idx := newTestIndexWithVectors(t, db) + populateVectorData(t, idx) + + ctx := context.Background() + + t.Run("text and vector combined", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "shoes", + Vector: []float32{0.1, 0.2, 0.3}, + VectorField: "embedding", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount < 1 { + t.Errorf("expected at least 1 hit for hybrid search, got %d", result.TotalCount) + } + }) +} + +func TestDocumentID(t *testing.T) { + id := documentID(searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123", "sku": "ABC"}, + }) + expected := "Product:id=123,sku=ABC" + if id != expected { + t.Errorf("documentID = %q, want %q", id, expected) + } +} + +func TestDocumentIDNoKeys(t *testing.T) { + id := documentID(searchindex.DocumentIdentity{ + TypeName: "Singleton", + }) + if id != "Singleton" { + t.Errorf("documentID = %q, want %q", id, "Singleton") + } +} + +func TestFormatVector(t *testing.T) { + got := formatVector([]float32{0.1, 0.2, 0.3}) + expected := "[0.1,0.2,0.3]" + if got != expected { + t.Errorf("formatVector = %q, want %q", got, expected) + } +} + +func TestIndexSingleDocument(t *testing.T) { + db := startPgvectorContainer(t) + idx := newTestIndex(t, db) + ctx := context.Background() + + doc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "42"}, + }, + Fields: map[string]any{ + "name": "Hiking Boots", + "description": "Durable boots for mountain trails", + "category": "Footwear", + "price": 149.99, + "inStock": true, + }, + } + + if err := idx.IndexDocument(ctx, doc); err != nil { + t.Fatalf("IndexDocument: %v", err) + } + + result, err := idx.Search(ctx, searchindex.SearchRequest{ + TextQuery: "hiking", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if result.TotalCount != 1 { + t.Fatalf("expected 1 hit for 'hiking', got %d", result.TotalCount) + } + if len(result.Hits) != 1 { + t.Fatalf("expected 1 hit in results, got %d", len(result.Hits)) + } + hit := result.Hits[0] + if hit.Identity.TypeName != "Product" { + t.Errorf("TypeName = %q, want %q", hit.Identity.TypeName, "Product") + } + if hit.Representation["name"] != "Hiking Boots" { + t.Errorf("name = %v, want %q", hit.Representation["name"], "Hiking Boots") + } + if hit.Representation["category"] != "Footwear" { + t.Errorf("category = %v, want %q", hit.Representation["category"], "Footwear") + } +} + +func TestSanitizeIdentifier(t *testing.T) { + tests := []struct { + input string + want string + }{ + {"simple", "simple"}, + {"with spaces", "withspaces"}, + {"with-dashes", "withdashes"}, + {"with_underscores", "with_underscores"}, + {"MiXeD123", "MiXeD123"}, + {"drop;table", "droptable"}, + } + for _, tt := range tests { + got := sanitizeIdentifier(tt.input) + if got != tt.want { + t.Errorf("sanitizeIdentifier(%q) = %q, want %q", tt.input, got, tt.want) + } + } +} diff --git a/v2/pkg/searchindex/qdrant/qdrant.go b/v2/pkg/searchindex/qdrant/qdrant.go new file mode 100644 index 0000000000..964ed83099 --- /dev/null +++ b/v2/pkg/searchindex/qdrant/qdrant.go @@ -0,0 +1,849 @@ +// Package qdrant implements the searchindex.Index interface for Qdrant. +// +// Priority: P2 +// Supports: vector-native search, prefetch + fusion hybrid. +// Filter translation: searchindex.Filter -> Qdrant must/should/must_not clauses. +// +// This implementation uses only net/http + encoding/json (no external SDK). +// It communicates with Qdrant's REST API. +package qdrant + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "hash/fnv" + "io" + "net/http" + "sort" + "strings" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Compile-time interface checks. +var ( + _ searchindex.Index = (*Index)(nil) + _ searchindex.IndexFactory = (*Factory)(nil) +) + +// reservedTypeNameField is the payload field used to store the entity type name. +const reservedTypeNameField = "_typeName" + +// reservedKeyFieldsField stores the JSON-encoded key fields map so we can +// reconstruct DocumentIdentity from search results. +const reservedKeyFieldsField = "_keyFieldsJSON" + +// Config holds Qdrant-specific configuration. +type Config struct { + Host string `json:"host"` + Port int `json:"port,omitempty"` + APIKey string `json:"api_key,omitempty"` + UseTLS bool `json:"use_tls,omitempty"` +} + +// baseURL returns the Qdrant REST API base URL derived from the config. +func (c *Config) baseURL() string { + scheme := "http" + if c.UseTLS { + scheme = "https" + } + return fmt.Sprintf("%s://%s:%d", scheme, c.Host, c.Port) +} + +// Factory implements searchindex.IndexFactory for Qdrant. +type Factory struct{} + +// NewFactory returns a new Qdrant IndexFactory. +func NewFactory() *Factory { + return &Factory{} +} + +// CreateIndex creates a new Qdrant collection with the given name and schema, +// then returns an Index that can be used for CRUD and search operations. +func (f *Factory) CreateIndex(ctx context.Context, name string, schema searchindex.IndexConfig, configJSON []byte) (searchindex.Index, error) { + var cfg Config + if len(configJSON) > 0 { + if err := json.Unmarshal(configJSON, &cfg); err != nil { + return nil, fmt.Errorf("qdrant: invalid config: %w", err) + } + } + if cfg.Host == "" { + cfg.Host = "localhost" + } + if cfg.Port == 0 { + cfg.Port = 6333 + } + + idx := &Index{ + name: name, + config: cfg, + schema: schema, + client: &http.Client{}, + } + + if err := idx.createCollection(ctx); err != nil { + return nil, err + } + + if err := idx.createPayloadIndexes(ctx); err != nil { + return nil, err + } + + return idx, nil +} + +// Index implements searchindex.Index for Qdrant. +type Index struct { + name string + config Config + schema searchindex.IndexConfig + client *http.Client +} + +// createCollection creates (or recreates) a Qdrant collection via PUT /collections/{name}. +func (idx *Index) createCollection(ctx context.Context) error { + // Determine vector config from schema. + var vectorSize int + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeVector && fc.Dimensions > 0 { + vectorSize = fc.Dimensions + break + } + } + if vectorSize == 0 { + // No vector fields defined; use a small dummy vector so the collection can be created. + vectorSize = 4 + } + + body := map[string]any{ + "vectors": map[string]any{ + "size": vectorSize, + "distance": "Cosine", + }, + } + + _, err := idx.doRequest(ctx, http.MethodPut, fmt.Sprintf("/collections/%s", idx.name), body) + if err != nil { + return fmt.Errorf("qdrant: create collection %q: %w", idx.name, err) + } + return nil +} + +// createPayloadIndexes creates payload indexes for filterable and sortable fields. +func (idx *Index) createPayloadIndexes(ctx context.Context) error { + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeVector { + continue + } + if !fc.Filterable && !fc.Sortable { + continue + } + fieldSchema := qdrantFieldSchema(fc.Type) + if fieldSchema == "" { + continue + } + body := map[string]any{ + "field_name": fc.Name, + "field_schema": fieldSchema, + } + _, err := idx.doRequest(ctx, http.MethodPut, fmt.Sprintf("/collections/%s/index", idx.name), body) + if err != nil { + return fmt.Errorf("qdrant: create payload index for field %q: %w", fc.Name, err) + } + } + + // Also create indexes for metadata fields. + for _, meta := range []struct { + name string + schema string + }{ + {reservedTypeNameField, "keyword"}, + {reservedKeyFieldsField, "keyword"}, + } { + body := map[string]any{ + "field_name": meta.name, + "field_schema": meta.schema, + } + _, err := idx.doRequest(ctx, http.MethodPut, fmt.Sprintf("/collections/%s/index", idx.name), body) + if err != nil { + return fmt.Errorf("qdrant: create payload index for field %q: %w", meta.name, err) + } + } + + return nil +} + +// qdrantFieldSchema maps a searchindex.FieldType to the Qdrant payload field_schema string. +func qdrantFieldSchema(ft searchindex.FieldType) string { + switch ft { + case searchindex.FieldTypeText: + return "text" + case searchindex.FieldTypeKeyword: + return "keyword" + case searchindex.FieldTypeNumeric: + return "float" + case searchindex.FieldTypeBool: + return "bool" + case searchindex.FieldTypeGeo: + return "geo" + case searchindex.FieldTypeDate, searchindex.FieldTypeDateTime: + return "datetime" + default: + return "" + } +} + +// IndexDocument indexes a single document. +func (idx *Index) IndexDocument(ctx context.Context, doc searchindex.EntityDocument) error { + return idx.IndexDocuments(ctx, []searchindex.EntityDocument{doc}) +} + +// IndexDocuments indexes a batch of documents via PUT /collections/{name}/points. +func (idx *Index) IndexDocuments(ctx context.Context, docs []searchindex.EntityDocument) error { + points := make([]map[string]any, 0, len(docs)) + for _, doc := range docs { + point, err := idx.buildPoint(doc) + if err != nil { + return err + } + points = append(points, point) + } + + body := map[string]any{ + "points": points, + } + + _, err := idx.doRequest(ctx, http.MethodPut, fmt.Sprintf("/collections/%s/points?wait=true", idx.name), body) + if err != nil { + return fmt.Errorf("qdrant: index documents: %w", err) + } + return nil +} + +// buildPoint converts an EntityDocument into a Qdrant point. +func (idx *Index) buildPoint(doc searchindex.EntityDocument) (map[string]any, error) { + pointID := documentIDHash(doc.Identity) + + // Build payload from all fields plus metadata. + payload := make(map[string]any, len(doc.Fields)+2) + for k, v := range doc.Fields { + payload[k] = v + } + payload[reservedTypeNameField] = doc.Identity.TypeName + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return nil, fmt.Errorf("qdrant: failed to marshal key fields: %w", err) + } + payload[reservedKeyFieldsField] = string(keyFieldsJSON) + + // Extract vector: use the first vector field from the document's Vectors map. + var vector []float32 + if len(doc.Vectors) > 0 { + // Pick the first vector. If the schema specifies a vector field, prefer that. + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeVector { + if v, ok := doc.Vectors[fc.Name]; ok { + vector = v + break + } + } + } + // If not found via schema, just pick the first one. + if vector == nil { + for _, v := range doc.Vectors { + vector = v + break + } + } + } + + // If no vector is provided, use a zero vector matching the collection's vector size. + if vector == nil { + size := idx.vectorSize() + vector = make([]float32, size) + } + + point := map[string]any{ + "id": pointID, + "vector": vector, + "payload": payload, + } + return point, nil +} + +// vectorSize returns the configured vector dimension size from the schema. +func (idx *Index) vectorSize() int { + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeVector && fc.Dimensions > 0 { + return fc.Dimensions + } + } + return 4 // dummy size matching createCollection default +} + +// documentIDHash computes a deterministic uint64 hash from a DocumentIdentity +// using FNV-1a. The identity is serialized as TypeName:key1=val1,key2=val2,... +// with keys sorted alphabetically. +func documentIDHash(id searchindex.DocumentIdentity) uint64 { + s := documentIDString(id) + h := fnv.New64a() + _, _ = h.Write([]byte(s)) + return h.Sum64() +} + +// documentIDString computes a deterministic string ID from a DocumentIdentity. +func documentIDString(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return id.TypeName + } + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(id.TypeName) + b.WriteByte(':') + for i, k := range keys { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(k) + b.WriteByte('=') + fmt.Fprintf(&b, "%v", id.KeyFields[k]) + } + return b.String() +} + +// DeleteDocument deletes a single document by identity. +func (idx *Index) DeleteDocument(ctx context.Context, id searchindex.DocumentIdentity) error { + return idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{id}) +} + +// DeleteDocuments deletes a batch of documents by identity via +// POST /collections/{name}/points/delete. +func (idx *Index) DeleteDocuments(ctx context.Context, ids []searchindex.DocumentIdentity) error { + pointIDs := make([]uint64, 0, len(ids)) + for _, id := range ids { + pointIDs = append(pointIDs, documentIDHash(id)) + } + + body := map[string]any{ + "points": pointIDs, + } + + _, err := idx.doRequest(ctx, http.MethodPost, fmt.Sprintf("/collections/%s/points/delete?wait=true", idx.name), body) + if err != nil { + return fmt.Errorf("qdrant: delete documents: %w", err) + } + return nil +} + +// Search performs a search query and returns results. +func (idx *Index) Search(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + // Build the Qdrant filter (may be nil). + filter := idx.buildFilter(req) + + limit := req.Limit + if limit <= 0 { + limit = 10 + } + + // Determine which search mode to use. + if len(req.Vector) > 0 { + return idx.vectorSearch(ctx, req.Vector, filter, req.Sort, limit, req.Offset) + } + + // No vector provided: use scroll to retrieve with filter. + return idx.scrollSearch(ctx, filter, req.Sort, limit, req.Offset) +} + +// vectorSearch performs a vector search via POST /collections/{name}/points/search. +func (idx *Index) vectorSearch(ctx context.Context, vector []float32, filter map[string]any, sortFields []searchindex.SortField, limit, offset int) (*searchindex.SearchResult, error) { + body := map[string]any{ + "vector": vector, + "limit": limit, + "with_payload": true, + } + if offset > 0 { + body["offset"] = offset + } + if filter != nil { + body["filter"] = filter + } + + respBody, err := idx.doRequest(ctx, http.MethodPost, fmt.Sprintf("/collections/%s/points/search", idx.name), body) + if err != nil { + return nil, fmt.Errorf("qdrant: vector search: %w", err) + } + + var resp struct { + Result []struct { + ID json.RawMessage `json:"id"` + Score float64 `json:"score"` + Payload map[string]interface{} `json:"payload"` + } `json:"result"` + } + if err := json.Unmarshal(respBody, &resp); err != nil { + return nil, fmt.Errorf("qdrant: decode search response: %w", err) + } + + hits := make([]searchindex.SearchHit, 0, len(resp.Result)) + for _, r := range resp.Result { + hit, err := convertPayloadToHit(r.Payload, r.Score) + if err != nil { + return nil, err + } + hits = append(hits, hit) + } + + // Apply payload-based sorting if sort fields are specified. + if len(sortFields) > 0 { + sortHits(hits, sortFields) + } + + return &searchindex.SearchResult{ + Hits: hits, + TotalCount: len(hits), + }, nil +} + +// scrollSearch performs a filtered retrieval using POST /collections/{name}/points/scroll. +func (idx *Index) scrollSearch(ctx context.Context, filter map[string]any, sortFields []searchindex.SortField, limit, offset int) (*searchindex.SearchResult, error) { + body := map[string]any{ + "limit": limit + offset, // fetch enough to handle offset + "with_payload": true, + } + if filter != nil { + body["filter"] = filter + } + + // If sort is requested and Qdrant supports order_by (v1.7+), add it. + if len(sortFields) > 0 { + sf := sortFields[0] // Qdrant scroll supports single order_by + direction := "asc" + if !sf.Ascending { + direction = "desc" + } + body["order_by"] = map[string]any{ + "key": sf.Field, + "direction": direction, + } + } + + respBody, err := idx.doRequest(ctx, http.MethodPost, fmt.Sprintf("/collections/%s/points/scroll", idx.name), body) + if err != nil { + return nil, fmt.Errorf("qdrant: scroll search: %w", err) + } + + var resp struct { + Result struct { + Points []struct { + ID json.RawMessage `json:"id"` + Payload map[string]interface{} `json:"payload"` + } `json:"points"` + } `json:"result"` + } + if err := json.Unmarshal(respBody, &resp); err != nil { + return nil, fmt.Errorf("qdrant: decode scroll response: %w", err) + } + + allPoints := resp.Result.Points + + // Apply offset. + if offset > 0 && offset < len(allPoints) { + allPoints = allPoints[offset:] + } else if offset >= len(allPoints) { + allPoints = nil + } + + // Apply limit. + if len(allPoints) > limit { + allPoints = allPoints[:limit] + } + + hits := make([]searchindex.SearchHit, 0, len(allPoints)) + for _, p := range allPoints { + hit, err := convertPayloadToHit(p.Payload, 0) + if err != nil { + return nil, err + } + hits = append(hits, hit) + } + + // If we couldn't use order_by (multiple sort fields), do client-side sort. + if len(sortFields) > 1 { + sortHits(hits, sortFields) + } + + return &searchindex.SearchResult{ + Hits: hits, + TotalCount: len(hits), + }, nil +} + +// buildFilter constructs a Qdrant filter object from the SearchRequest. +func (idx *Index) buildFilter(req searchindex.SearchRequest) map[string]any { + var conditions []map[string]any + + // TypeName filter. + if req.TypeName != "" { + conditions = append(conditions, map[string]any{ + "key": reservedTypeNameField, + "match": map[string]any{ + "value": req.TypeName, + }, + }) + } + + // Structured filter. + if req.Filter != nil { + filterCond := translateFilter(req.Filter) + if filterCond != nil { + conditions = append(conditions, filterCond) + } + } + + switch len(conditions) { + case 0: + return nil + case 1: + // If the single condition is already a compound filter (has must/should/must_not), + // return it directly. Otherwise wrap it in must. + if _, hasMust := conditions[0]["must"]; hasMust { + return conditions[0] + } + if _, hasShould := conditions[0]["should"]; hasShould { + return conditions[0] + } + if _, hasMustNot := conditions[0]["must_not"]; hasMustNot { + return conditions[0] + } + return map[string]any{ + "must": conditions, + } + default: + return map[string]any{ + "must": conditions, + } + } +} + +// translateFilter recursively converts a searchindex.Filter tree to a Qdrant +// filter condition. +func translateFilter(f *searchindex.Filter) map[string]any { + if f == nil { + return nil + } + + // AND + if len(f.And) > 0 { + children := make([]map[string]any, 0, len(f.And)) + for _, child := range f.And { + c := translateFilter(child) + if c != nil { + children = append(children, c) + } + } + if len(children) == 0 { + return nil + } + return map[string]any{ + "must": children, + } + } + + // OR + if len(f.Or) > 0 { + children := make([]map[string]any, 0, len(f.Or)) + for _, child := range f.Or { + c := translateFilter(child) + if c != nil { + children = append(children, c) + } + } + if len(children) == 0 { + return nil + } + return map[string]any{ + "should": children, + } + } + + // NOT + if f.Not != nil { + inner := translateFilter(f.Not) + if inner == nil { + return nil + } + return map[string]any{ + "must_not": []map[string]any{inner}, + } + } + + // Term + if f.Term != nil { + return map[string]any{ + "key": f.Term.Field, + "match": map[string]any{ + "value": f.Term.Value, + }, + } + } + + // Terms (IN) + if f.Terms != nil { + return map[string]any{ + "key": f.Terms.Field, + "match": map[string]any{ + "any": f.Terms.Values, + }, + } + } + + // Range + if f.Range != nil { + rangeMap := make(map[string]any) + if f.Range.GTE != nil { + rangeMap["gte"] = toFloat(f.Range.GTE) + } + if f.Range.HasGT && f.Range.GT != nil { + rangeMap["gt"] = toFloat(f.Range.GT) + } + if f.Range.LTE != nil { + rangeMap["lte"] = toFloat(f.Range.LTE) + } + if f.Range.HasLT && f.Range.LT != nil { + rangeMap["lt"] = toFloat(f.Range.LT) + } + return map[string]any{ + "key": f.Range.Field, + "range": rangeMap, + } + } + + // Prefix + if f.Prefix != nil { + return map[string]any{ + "key": f.Prefix.Field, + "match": map[string]any{ + "text": f.Prefix.Value, + }, + } + } + + // Exists: Qdrant has no direct "exists" condition, so we negate "is_empty". + if f.Exists != nil { + return map[string]any{ + "must_not": []map[string]any{ + { + "is_empty": map[string]any{ + "key": f.Exists.Field, + }, + }, + }, + } + } + + return nil +} + +// toFloat converts an any value to float64 for range filters, returning the +// original value if conversion is not straightforward (Qdrant accepts numbers directly). +func toFloat(v any) any { + switch n := v.(type) { + case float64: + return n + case float32: + return float64(n) + case int: + return float64(n) + case int64: + return float64(n) + case int32: + return float64(n) + case json.Number: + f, err := n.Float64() + if err != nil { + return v + } + return f + default: + return v + } +} + +// convertPayloadToHit converts a Qdrant payload map into a searchindex.SearchHit. +func convertPayloadToHit(payload map[string]interface{}, score float64) (searchindex.SearchHit, error) { + identity, err := extractIdentity(payload) + if err != nil { + return searchindex.SearchHit{}, err + } + + // Build representation from payload, excluding internal fields. + representation := make(map[string]any, len(payload)) + for k, v := range payload { + if k == reservedTypeNameField || k == reservedKeyFieldsField { + continue + } + representation[k] = v + } + + // Add __typename. + representation["__typename"] = identity.TypeName + // Merge key fields into representation. + for k, v := range identity.KeyFields { + representation[k] = v + } + + return searchindex.SearchHit{ + Identity: identity, + Score: score, + Distance: score, // Qdrant returns similarity score which can serve as distance metric + Representation: representation, + }, nil +} + +// extractIdentity reconstructs a DocumentIdentity from a Qdrant payload. +func extractIdentity(payload map[string]interface{}) (searchindex.DocumentIdentity, error) { + typeName, _ := payload[reservedTypeNameField].(string) + keyFieldsRaw, _ := payload[reservedKeyFieldsField].(string) + + var keyFields map[string]any + if keyFieldsRaw != "" { + if err := json.Unmarshal([]byte(keyFieldsRaw), &keyFields); err != nil { + return searchindex.DocumentIdentity{}, fmt.Errorf("qdrant: failed to unmarshal key fields: %w", err) + } + } + if keyFields == nil { + keyFields = make(map[string]any) + } + + return searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + }, nil +} + +// sortHits sorts search hits by the given sort fields (client-side). +func sortHits(hits []searchindex.SearchHit, sortFields []searchindex.SortField) { + sort.SliceStable(hits, func(i, j int) bool { + for _, sf := range sortFields { + vi := hits[i].Representation[sf.Field] + vj := hits[j].Representation[sf.Field] + cmp := compareValues(vi, vj) + if cmp == 0 { + continue + } + if sf.Ascending { + return cmp < 0 + } + return cmp > 0 + } + return false + }) +} + +// compareValues compares two arbitrary values for sorting purposes. +// Returns -1, 0, or 1. +func compareValues(a, b any) int { + fa, aOK := toFloat64(a) + fb, bOK := toFloat64(b) + if aOK && bOK { + switch { + case fa < fb: + return -1 + case fa > fb: + return 1 + default: + return 0 + } + } + + sa := fmt.Sprintf("%v", a) + sb := fmt.Sprintf("%v", b) + switch { + case sa < sb: + return -1 + case sa > sb: + return 1 + default: + return 0 + } +} + +// toFloat64 attempts to convert a value to float64. +func toFloat64(v any) (float64, bool) { + switch n := v.(type) { + case float64: + return n, true + case float32: + return float64(n), true + case int: + return float64(n), true + case int64: + return float64(n), true + case int32: + return float64(n), true + case json.Number: + f, err := n.Float64() + return f, err == nil + default: + return 0, false + } +} + +// Autocomplete is not supported by Qdrant — it has no term dictionary API. +func (idx *Index) Autocomplete(_ context.Context, _ searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + return nil, fmt.Errorf("qdrant: autocomplete is not supported") +} + +// Close releases resources held by the index. For the HTTP-based Qdrant client, +// there is nothing to release. +func (idx *Index) Close() error { + return nil +} + +// doRequest performs an HTTP request to the Qdrant REST API. +func (idx *Index) doRequest(ctx context.Context, method, path string, body any) ([]byte, error) { + url := idx.config.baseURL() + path + + var bodyReader io.Reader + if body != nil { + bodyBytes, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("qdrant: marshal request body: %w", err) + } + bodyReader = bytes.NewReader(bodyBytes) + } + + req, err := http.NewRequestWithContext(ctx, method, url, bodyReader) + if err != nil { + return nil, fmt.Errorf("qdrant: create request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + if idx.config.APIKey != "" { + req.Header.Set("api-key", idx.config.APIKey) + } + + resp, err := idx.client.Do(req) + if err != nil { + return nil, fmt.Errorf("qdrant: request %s %s: %w", method, path, err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("qdrant: read response body: %w", err) + } + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, fmt.Errorf("qdrant: %s %s returned status %d: %s", method, path, resp.StatusCode, string(respBody)) + } + + return respBody, nil +} diff --git a/v2/pkg/searchindex/qdrant/qdrant_test.go b/v2/pkg/searchindex/qdrant/qdrant_test.go new file mode 100644 index 0000000000..265716e9a0 --- /dev/null +++ b/v2/pkg/searchindex/qdrant/qdrant_test.go @@ -0,0 +1,619 @@ +//go:build integration + +package qdrant + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func startQdrant(t *testing.T) (host string, port int) { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "qdrant/qdrant:v1.12.5", + ExposedPorts: []string{"6333/tcp"}, + WaitingFor: wait.ForHTTP("/healthz").WithPort("6333/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + if err != nil { + t.Fatalf("failed to start qdrant container: %v", err) + } + t.Cleanup(func() { + if err := container.Terminate(ctx); err != nil { + t.Logf("failed to terminate qdrant container: %v", err) + } + }) + + mappedHost, err := container.Host(ctx) + if err != nil { + t.Fatalf("failed to get container host: %v", err) + } + mappedPort, err := container.MappedPort(ctx, "6333") + if err != nil { + t.Fatalf("failed to get mapped port: %v", err) + } + + return mappedHost, mappedPort.Int() +} + +func newTestIndex(t *testing.T, host string, port int, name string, fields []searchindex.FieldConfig) searchindex.Index { + t.Helper() + factory := NewFactory() + schema := searchindex.IndexConfig{ + Name: name, + Fields: fields, + } + cfg, _ := json.Marshal(Config{Host: host, Port: port}) + idx, err := factory.CreateIndex(context.Background(), name, schema, cfg) + if err != nil { + t.Fatalf("CreateIndex: %v", err) + } + t.Cleanup(func() { idx.Close() }) + return idx +} + +func TestFullLifecycle(t *testing.T) { + host, port := startQdrant(t) + ctx := context.Background() + + fields := []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + {Name: "embedding", Type: searchindex.FieldTypeVector, Dimensions: 4}, + } + + idx := newTestIndex(t, host, port, "test_products", fields) + + // Index documents + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + Vectors: map[string][]float32{"embedding": {0.1, 0.2, 0.3, 0.4}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + Vectors: map[string][]float32{"embedding": {0.15, 0.25, 0.35, 0.45}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + Vectors: map[string][]float32{"embedding": {0.9, 0.8, 0.7, 0.6}}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + Vectors: map[string][]float32{"embedding": {0.2, 0.3, 0.4, 0.5}}, + }, + } + + if err := idx.IndexDocuments(ctx, docs); err != nil { + t.Fatalf("IndexDocuments: %v", err) + } + + // Wait briefly for indexing to complete. + time.Sleep(1 * time.Second) + + t.Run("vector search", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3, 0.4}, + VectorField: "embedding", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 4 { + t.Errorf("expected 4 hits, got %d", len(result.Hits)) + } + // The closest vector should be Running Shoes (exact match) + if len(result.Hits) > 0 { + t.Logf("top hit: %v (score: %f)", result.Hits[0].Representation["name"], result.Hits[0].Score) + } + }) + + t.Run("vector search with type filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3, 0.4}, + TypeName: "Product", + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 4 { + t.Errorf("expected 4 hits, got %d", len(result.Hits)) + } + for _, hit := range result.Hits { + if hit.Identity.TypeName != "Product" { + t.Errorf("expected TypeName=Product, got %q", hit.Identity.TypeName) + } + } + }) + + t.Run("vector search with term filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3, 0.4}, + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 3 { + t.Errorf("expected 3 hits for category=Footwear, got %d", len(result.Hits)) + } + }) + + t.Run("scroll search with no vector", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 4 { + t.Errorf("expected 4 hits, got %d", len(result.Hits)) + } + }) + + t.Run("term filter on keyword", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 1 { + t.Errorf("expected 1 hit for category=Accessories, got %d", len(result.Hits)) + } + if len(result.Hits) > 0 { + if result.Hits[0].Representation["name"] != "Leather Belt" { + t.Errorf("expected Leather Belt, got %v", result.Hits[0].Representation["name"]) + } + } + }) + + t.Run("boolean filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 1 { + t.Errorf("expected 1 hit for inStock=false, got %d", len(result.Hits)) + } + }) + + t.Run("numeric range filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: 30.0, + LTE: 100.0, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 2 { + t.Errorf("expected 2 hits for price 30-100, got %d", len(result.Hits)) + for _, h := range result.Hits { + t.Logf(" hit: %v price=%v", h.Representation["name"], h.Representation["price"]) + } + } + }) + + t.Run("AND filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 3 { + t.Errorf("expected 3 hits for Footwear AND inStock, got %d", len(result.Hits)) + } + }) + + t.Run("OR filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 4 { + t.Errorf("expected 4 hits for Footwear OR Accessories, got %d", len(result.Hits)) + } + }) + + t.Run("NOT filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 1 { + t.Errorf("expected 1 hit for NOT Footwear, got %d", len(result.Hits)) + } + }) + + t.Run("terms filter", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3, 0.4}, + Filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"Footwear", "Accessories"}, + }, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 4 { + t.Errorf("expected 4 hits for terms filter, got %d", len(result.Hits)) + } + }) + + t.Run("search hit identity", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3, 0.4}, + Limit: 1, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit") + } + hit := result.Hits[0] + if hit.Identity.TypeName != "Product" { + t.Errorf("TypeName = %q, want %q", hit.Identity.TypeName, "Product") + } + if hit.Representation["__typename"] != "Product" { + t.Errorf("__typename = %v, want %q", hit.Representation["__typename"], "Product") + } + }) + + t.Run("prefix filter on category", func(t *testing.T) { + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Prefix: &searchindex.PrefixFilter{Field: "category", Value: "Foot"}, + }, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 3 { + t.Errorf("expected 3 hits for category prefix 'Foot', got %d", len(result.Hits)) + for _, h := range result.Hits { + t.Logf(" hit: %v category=%v", h.Representation["name"], h.Representation["category"]) + } + } + for _, hit := range result.Hits { + cat, _ := hit.Representation["category"].(string) + if cat != "Footwear" { + t.Errorf("expected category=Footwear, got %q", cat) + } + } + }) + + t.Run("index single document", func(t *testing.T) { + singleDoc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "5"}}, + Fields: map[string]any{"name": "Canvas Sneakers", "description": "Casual canvas shoes", "category": "Footwear", "price": 49.99, "inStock": true}, + Vectors: map[string][]float32{"embedding": {0.12, 0.22, 0.32, 0.42}}, + } + if err := idx.IndexDocument(ctx, singleDoc); err != nil { + t.Fatalf("IndexDocument: %v", err) + } + time.Sleep(500 * time.Millisecond) + + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.12, 0.22, 0.32, 0.42}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 5 { + t.Errorf("expected 5 hits after indexing single doc, got %d", len(result.Hits)) + } + found := false + for _, hit := range result.Hits { + if hit.Representation["name"] == "Canvas Sneakers" { + found = true + break + } + } + if !found { + t.Errorf("expected to find Canvas Sneakers in search results") + } + }) + + t.Run("upsert overwrites existing document", func(t *testing.T) { + // Re-index id=5 with different data but same identity. + updatedDoc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "5"}}, + Fields: map[string]any{"name": "Updated Sneakers", "description": "Updated description", "category": "Footwear", "price": 59.99, "inStock": false}, + Vectors: map[string][]float32{"embedding": {0.12, 0.22, 0.32, 0.42}}, + } + if err := idx.IndexDocuments(ctx, []searchindex.EntityDocument{updatedDoc}); err != nil { + t.Fatalf("IndexDocuments (upsert): %v", err) + } + time.Sleep(500 * time.Millisecond) + + // Total count should still be 5 (no duplicate created). + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 5 { + t.Errorf("expected 5 hits after upsert (no duplicate), got %d", len(result.Hits)) + } + + // Verify the document was actually updated. + result, err = idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.12, 0.22, 0.32, 0.42}, + Limit: 1, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) == 0 { + t.Fatal("expected at least 1 hit") + } + top := result.Hits[0] + if top.Representation["name"] != "Updated Sneakers" { + t.Errorf("expected name='Updated Sneakers' after upsert, got %v", top.Representation["name"]) + } + if top.Representation["price"] != 59.99 { + t.Errorf("expected price=59.99 after upsert, got %v", top.Representation["price"]) + } + }) + + t.Run("pagination with offset", func(t *testing.T) { + // Fetch all results to know the full set. + allResult, err := idx.Search(ctx, searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search (all): %v", err) + } + totalCount := len(allResult.Hits) + if totalCount < 3 { + t.Fatalf("expected at least 3 documents for pagination test, got %d", totalCount) + } + + // Fetch page 1: limit=2, offset=0. + page1, err := idx.Search(ctx, searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 0, + }) + if err != nil { + t.Fatalf("Search (page1): %v", err) + } + if len(page1.Hits) != 2 { + t.Errorf("expected 2 hits on page1, got %d", len(page1.Hits)) + } + + // Fetch page 2: limit=2, offset=2. + page2, err := idx.Search(ctx, searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 2, + }) + if err != nil { + t.Fatalf("Search (page2): %v", err) + } + if len(page2.Hits) != 2 { + t.Errorf("expected 2 hits on page2, got %d", len(page2.Hits)) + } + + // Verify no overlap between page1 and page2. + if len(page1.Hits) >= 2 && len(page2.Hits) >= 1 { + page1Names := map[string]bool{} + for _, h := range page1.Hits { + name, _ := h.Representation["name"].(string) + page1Names[name] = true + } + for _, h := range page2.Hits { + name, _ := h.Representation["name"].(string) + if page1Names[name] { + t.Errorf("page2 hit %q also appeared in page1 (overlap)", name) + } + } + } + + // Offset beyond total should return no results. + empty, err := idx.Search(ctx, searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + Offset: 100, + }) + if err != nil { + t.Fatalf("Search (offset beyond total): %v", err) + } + if len(empty.Hits) != 0 { + t.Errorf("expected 0 hits for offset beyond total, got %d", len(empty.Hits)) + } + }) + + // Clean up the extra document before the delete tests proceed. + if err := idx.DeleteDocument(ctx, searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "5"}, + }); err != nil { + t.Fatalf("cleanup delete id=5: %v", err) + } + time.Sleep(500 * time.Millisecond) + + t.Run("delete single document", func(t *testing.T) { + err := idx.DeleteDocument(ctx, searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }) + if err != nil { + t.Fatalf("DeleteDocument: %v", err) + } + time.Sleep(500 * time.Millisecond) + + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3, 0.4}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search after delete: %v", err) + } + if len(result.Hits) != 3 { + t.Errorf("expected 3 hits after deleting one doc, got %d", len(result.Hits)) + } + }) + + t.Run("delete multiple documents", func(t *testing.T) { + err := idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + }) + if err != nil { + t.Fatalf("DeleteDocuments: %v", err) + } + time.Sleep(500 * time.Millisecond) + + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Vector: []float32{0.1, 0.2, 0.3, 0.4}, + Limit: 10, + }) + if err != nil { + t.Fatalf("Search after batch delete: %v", err) + } + if len(result.Hits) != 1 { + t.Errorf("expected 1 hit after batch delete, got %d", len(result.Hits)) + } + }) +} + +func TestDocumentIDHashDeterministic(t *testing.T) { + id := searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123", "sku": "ABC"}, + } + + h1 := documentIDHash(id) + h2 := documentIDHash(id) + if h1 != h2 { + t.Errorf("documentIDHash not deterministic: %d != %d", h1, h2) + } + + // Same fields in different insertion order should produce the same hash. + id2 := searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"sku": "ABC", "id": "123"}, + } + h3 := documentIDHash(id2) + if h1 != h3 { + t.Errorf("documentIDHash not stable across key order: %d != %d", h1, h3) + } +} + +func TestDocumentIDString(t *testing.T) { + id := searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123", "sku": "ABC"}, + } + s := documentIDString(id) + expected := "Product:id=123,sku=ABC" + if s != expected { + t.Errorf("documentIDString = %q, want %q", s, expected) + } +} + +func TestNoVectorFields(t *testing.T) { + host, port := startQdrant(t) + ctx := context.Background() + + fields := []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true}, + } + + idx := newTestIndex(t, host, port, fmt.Sprintf("test_no_vector_%d", time.Now().UnixNano()), fields) + + // Should be able to index documents without vectors. + err := idx.IndexDocument(ctx, searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Article", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Test Article", "category": "News"}, + }) + if err != nil { + t.Fatalf("IndexDocument: %v", err) + } + + time.Sleep(500 * time.Millisecond) + + // Scroll search should work without vectors. + result, err := idx.Search(ctx, searchindex.SearchRequest{ + Limit: 10, + }) + if err != nil { + t.Fatalf("Search: %v", err) + } + if len(result.Hits) != 1 { + t.Errorf("expected 1 hit, got %d", len(result.Hits)) + } +} diff --git a/v2/pkg/searchindex/registry.go b/v2/pkg/searchindex/registry.go new file mode 100644 index 0000000000..9204684a22 --- /dev/null +++ b/v2/pkg/searchindex/registry.go @@ -0,0 +1,68 @@ +package searchindex + +import ( + "fmt" + "sync" +) + +// IndexFactoryRegistry maps backend names to IndexFactory implementations. +type IndexFactoryRegistry struct { + mu sync.RWMutex + factories map[string]IndexFactory +} + +// NewIndexFactoryRegistry creates a new empty registry. +func NewIndexFactoryRegistry() *IndexFactoryRegistry { + return &IndexFactoryRegistry{ + factories: make(map[string]IndexFactory), + } +} + +// Register adds an IndexFactory for the given backend name. +func (r *IndexFactoryRegistry) Register(backend string, factory IndexFactory) { + r.mu.Lock() + defer r.mu.Unlock() + r.factories[backend] = factory +} + +// Get returns the IndexFactory for the given backend name. +func (r *IndexFactoryRegistry) Get(backend string) (IndexFactory, error) { + r.mu.RLock() + defer r.mu.RUnlock() + f, ok := r.factories[backend] + if !ok { + return nil, fmt.Errorf("searchindex: unknown backend %q", backend) + } + return f, nil +} + +// EmbedderRegistry maps model names to Embedder instances. +type EmbedderRegistry struct { + mu sync.RWMutex + embedders map[string]Embedder +} + +// NewEmbedderRegistry creates a new empty embedder registry. +func NewEmbedderRegistry() *EmbedderRegistry { + return &EmbedderRegistry{ + embedders: make(map[string]Embedder), + } +} + +// Register adds an Embedder for the given model name. +func (r *EmbedderRegistry) Register(model string, embedder Embedder) { + r.mu.Lock() + defer r.mu.Unlock() + r.embedders[model] = embedder +} + +// Get returns the Embedder for the given model name. +func (r *EmbedderRegistry) Get(model string) (Embedder, error) { + r.mu.RLock() + defer r.mu.RUnlock() + e, ok := r.embedders[model] + if !ok { + return nil, fmt.Errorf("searchindex: unknown embedder model %q", model) + } + return e, nil +} diff --git a/v2/pkg/searchindex/registry_test.go b/v2/pkg/searchindex/registry_test.go new file mode 100644 index 0000000000..6f44e2e296 --- /dev/null +++ b/v2/pkg/searchindex/registry_test.go @@ -0,0 +1,70 @@ +package searchindex + +import ( + "context" + "testing" +) + +type mockFactory struct{} + +func (f *mockFactory) CreateIndex(_ context.Context, _ string, _ IndexConfig, _ []byte) (Index, error) { + return nil, nil +} + +type mockEmbedder struct { + dims int +} + +func (e *mockEmbedder) Embed(_ context.Context, texts []string) ([][]float32, error) { + result := make([][]float32, len(texts)) + for i := range result { + result[i] = make([]float32, e.dims) + } + return result, nil +} + +func (e *mockEmbedder) EmbedSingle(_ context.Context, _ string) ([]float32, error) { + return make([]float32, e.dims), nil +} + +func (e *mockEmbedder) Dimensions() int { return e.dims } + +func TestIndexFactoryRegistry(t *testing.T) { + reg := NewIndexFactoryRegistry() + + // Get non-existent + _, err := reg.Get("bleve") + if err == nil { + t.Fatal("expected error for non-existent backend") + } + + // Register and get + reg.Register("bleve", &mockFactory{}) + f, err := reg.Get("bleve") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if f == nil { + t.Fatal("expected non-nil factory") + } +} + +func TestEmbedderRegistry(t *testing.T) { + reg := NewEmbedderRegistry() + + // Get non-existent + _, err := reg.Get("text-embedding-3-small") + if err == nil { + t.Fatal("expected error for non-existent model") + } + + // Register and get + reg.Register("text-embedding-3-small", &mockEmbedder{dims: 1536}) + e, err := reg.Get("text-embedding-3-small") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if e.Dimensions() != 1536 { + t.Errorf("Dimensions() = %d, want 1536", e.Dimensions()) + } +} diff --git a/v2/pkg/searchindex/template_transformer.go b/v2/pkg/searchindex/template_transformer.go new file mode 100644 index 0000000000..a718d6cea0 --- /dev/null +++ b/v2/pkg/searchindex/template_transformer.go @@ -0,0 +1,69 @@ +package searchindex + +import ( + "bytes" + "fmt" + "strings" + "text/template" +) + +// TemplateTransformer is the default TextTransformer, driven by the @embedding template string. +// Template uses Go text/template syntax: "{{.title}}. Topic: {{.topic}}. {{.body}}" +type TemplateTransformer struct { + tmpl *template.Template +} + +// NewTemplateTransformer creates a TemplateTransformer from a template string. +// The template string uses the syntax "{{title}}. Topic: {{topic}}. {{body}}" +// which is automatically converted to Go template syntax "{{.title}}. Topic: {{.topic}}. {{.body}}". +func NewTemplateTransformer(templateStr string) (*TemplateTransformer, error) { + // Convert shorthand {{fieldName}} to Go template syntax {{.fieldName}} + goTemplate := convertToGoTemplate(templateStr) + tmpl, err := template.New("embedding").Parse(goTemplate) + if err != nil { + return nil, fmt.Errorf("searchindex: invalid embedding template: %w", err) + } + return &TemplateTransformer{tmpl: tmpl}, nil +} + +// Transform applies the template to the entity fields and returns the resulting string. +func (t *TemplateTransformer) Transform(fields map[string]any) string { + var buf bytes.Buffer + if err := t.tmpl.Execute(&buf, fields); err != nil { + return "" + } + return buf.String() +} + +// convertToGoTemplate converts shorthand {{fieldName}} to {{.fieldName}}. +// It handles the case where the user writes templates without the dot prefix. +func convertToGoTemplate(s string) string { + var result strings.Builder + i := 0 + for i < len(s) { + if i+1 < len(s) && s[i] == '{' && s[i+1] == '{' { + // Find the closing }} + end := strings.Index(s[i+2:], "}}") + if end == -1 { + result.WriteString(s[i:]) + break + } + content := strings.TrimSpace(s[i+2 : i+2+end]) + // Only add dot prefix if content doesn't already start with a dot or special character + if len(content) > 0 && content[0] != '.' && content[0] != '$' { + result.WriteString("{{.") + result.WriteString(content) + result.WriteString("}}") + } else { + result.WriteString("{{") + result.WriteString(content) + result.WriteString("}}") + } + i = i + 2 + end + 2 + } else { + result.WriteByte(s[i]) + i++ + } + } + return result.String() +} diff --git a/v2/pkg/searchindex/template_transformer_test.go b/v2/pkg/searchindex/template_transformer_test.go new file mode 100644 index 0000000000..b7373de2e1 --- /dev/null +++ b/v2/pkg/searchindex/template_transformer_test.go @@ -0,0 +1,85 @@ +package searchindex + +import "testing" + +func TestTemplateTransformer(t *testing.T) { + tests := []struct { + name string + template string + fields map[string]any + expected string + }{ + { + name: "simple template", + template: "{{title}}. {{body}}", + fields: map[string]any{"title": "Hello", "body": "World"}, + expected: "Hello. World", + }, + { + name: "template with topic", + template: "{{title}}. Topic: {{topic}}. {{body}}", + fields: map[string]any{"title": "Running Shoes", "topic": "Footwear", "body": "Great for jogging"}, + expected: "Running Shoes. Topic: Footwear. Great for jogging", + }, + { + name: "already has dot prefix", + template: "{{.title}} - {{.body}}", + fields: map[string]any{"title": "Test", "body": "Content"}, + expected: "Test - Content", + }, + { + name: "missing field produces no-value placeholder", + template: "{{title}}", + fields: map[string]any{}, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + transformer, err := NewTemplateTransformer(tt.template) + if err != nil { + t.Fatalf("NewTemplateTransformer(%q): %v", tt.template, err) + } + got := transformer.Transform(tt.fields) + if got != tt.expected { + t.Errorf("Transform() = %q, want %q", got, tt.expected) + } + }) + } +} + +func TestConvertToGoTemplate(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {"{{title}}", "{{.title}}"}, + {"{{.title}}", "{{.title}}"}, + {"{{title}} {{body}}", "{{.title}} {{.body}}"}, + {"no template", "no template"}, + {"{{ title }}", "{{.title}}"}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + got := convertToGoTemplate(tt.input) + if got != tt.expected { + t.Errorf("convertToGoTemplate(%q) = %q, want %q", tt.input, got, tt.expected) + } + }) + } +} + +func TestFuncTransformer(t *testing.T) { + ft := &FuncTransformer{ + Fn: func(fields map[string]any) string { + return fields["a"].(string) + " " + fields["b"].(string) + }, + } + + got := ft.Transform(map[string]any{"a": "hello", "b": "world"}) + if got != "hello world" { + t.Errorf("FuncTransformer.Transform() = %q, want %q", got, "hello world") + } +} diff --git a/v2/pkg/searchindex/typesense/typesense.go b/v2/pkg/searchindex/typesense/typesense.go new file mode 100644 index 0000000000..a568f99e02 --- /dev/null +++ b/v2/pkg/searchindex/typesense/typesense.go @@ -0,0 +1,1101 @@ +// Package typesense implements the searchindex.Index interface for Typesense. +// +// It uses only the Go standard library (net/http + encoding/json) to talk to +// the Typesense HTTP API. No external Typesense SDK is used. +package typesense + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "sort" + "strconv" + "strings" + "time" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Compile-time interface conformance checks. +var ( + _ searchindex.Index = (*Index)(nil) + _ searchindex.IndexFactory = (*Factory)(nil) +) + +// Internal field names stored alongside user data. +const ( + reservedDocIDField = "_docId" + reservedTypeNameField = "_typeName" + reservedKeyFieldsField = "_keyFieldsJSON" +) + +// Config holds Typesense-specific configuration. +type Config struct { + Host string `json:"host"` + Port int `json:"port,omitempty"` + APIKey string `json:"api_key"` + Protocol string `json:"protocol,omitempty"` +} + +// Factory implements searchindex.IndexFactory for Typesense. +type Factory struct{} + +// NewFactory returns a new Typesense IndexFactory. +func NewFactory() *Factory { + return &Factory{} +} + +// CreateIndex creates a new Typesense collection that mirrors the given schema +// and returns an Index handle for it. +func (f *Factory) CreateIndex(ctx context.Context, name string, schema searchindex.IndexConfig, configJSON []byte) (searchindex.Index, error) { + var cfg Config + if len(configJSON) > 0 { + if err := json.Unmarshal(configJSON, &cfg); err != nil { + return nil, fmt.Errorf("typesense: invalid config: %w", err) + } + } + if cfg.Host == "" { + cfg.Host = "localhost" + } + if cfg.Port == 0 { + cfg.Port = 8108 + } + if cfg.Protocol == "" { + cfg.Protocol = "http" + } + + idx := &Index{ + name: name, + config: cfg, + schema: schema, + client: &http.Client{}, + } + + if err := idx.createCollection(ctx); err != nil { + return nil, err + } + + return idx, nil +} + +// Index implements searchindex.Index backed by a Typesense collection. +type Index struct { + name string + config Config + schema searchindex.IndexConfig + client *http.Client +} + +// --------------------------------------------------------------------------- +// Collection creation +// --------------------------------------------------------------------------- + +// typesenseField is the JSON representation of a Typesense collection field. +type typesenseField struct { + Name string `json:"name"` + Type string `json:"type"` + Facet bool `json:"facet,omitempty"` + Sort bool `json:"sort,omitempty"` + NumDim int `json:"num_dim,omitempty"` +} + +// typesenseSchema is the JSON body sent to POST /collections. +type typesenseSchema struct { + Name string `json:"name"` + Fields []typesenseField `json:"fields"` + DefaultSortingField string `json:"default_sorting_field,omitempty"` +} + +func (idx *Index) createCollection(ctx context.Context) error { + fields := make([]typesenseField, 0, len(idx.schema.Fields)+3) + + var defaultSortingField string + + for _, fc := range idx.schema.Fields { + tf, err := mapField(fc) + if err != nil { + return fmt.Errorf("typesense: field %q: %w", fc.Name, err) + } + if fc.Sortable { + tf.Sort = true + } + fields = append(fields, tf) + + // Pick the first numeric sortable field as default_sorting_field. + if defaultSortingField == "" && fc.Type == searchindex.FieldTypeNumeric && fc.Sortable { + defaultSortingField = fc.Name + } + } + + // Internal metadata fields. + fields = append(fields, + typesenseField{Name: reservedDocIDField, Type: "string"}, + typesenseField{Name: reservedTypeNameField, Type: "string", Facet: true}, + typesenseField{Name: reservedKeyFieldsField, Type: "string"}, + ) + + schema := typesenseSchema{ + Name: idx.name, + Fields: fields, + DefaultSortingField: defaultSortingField, + } + + body, err := json.Marshal(schema) + if err != nil { + return fmt.Errorf("typesense: marshal schema: %w", err) + } + + resp, err := idx.doRequest(ctx, http.MethodPost, "/collections", bytes.NewReader(body)) + if err != nil { + return fmt.Errorf("typesense: create collection: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusConflict { + // Collection already exists; treat as success. + _, _ = io.Copy(io.Discard, resp.Body) + return nil + } + + if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK { + return idx.readError(resp, "create collection") + } + _, _ = io.Copy(io.Discard, resp.Body) + return nil +} + +// mapField converts a searchindex.FieldConfig to a typesenseField. +func mapField(fc searchindex.FieldConfig) (typesenseField, error) { + tf := typesenseField{Name: fc.Name} + switch fc.Type { + case searchindex.FieldTypeText: + tf.Type = "string" + case searchindex.FieldTypeKeyword: + tf.Type = "string" + tf.Facet = true + case searchindex.FieldTypeNumeric: + if fc.Dimensions > 0 { + // Unusual, but guard. + tf.Type = "float" + } else { + // Use float as default numeric type. Callers who want int64 can + // use Dimensions == 0 and provide int-valued floats. + tf.Type = "float" + } + case searchindex.FieldTypeBool: + tf.Type = "bool" + case searchindex.FieldTypeVector: + tf.Type = "float[]" + tf.NumDim = fc.Dimensions + case searchindex.FieldTypeGeo: + // Typesense supports geopoint natively. + tf.Type = "geopoint" + case searchindex.FieldTypeDate, searchindex.FieldTypeDateTime: + // Typesense stores dates as unix timestamps (int64). + tf.Type = "int64" + default: + return typesenseField{}, fmt.Errorf("unsupported field type %v", fc.Type) + } + + if fc.Filterable || fc.Autocomplete { + // For Typesense, all non-vector fields are searchable/filterable by default. + // Facet is only needed for keyword-style fields, but we can set it + // for any filterable field to enable filter_by. Autocomplete fields + // need facet enabled for facet-query based autocomplete. + tf.Facet = true + } + + return tf, nil +} + +// --------------------------------------------------------------------------- +// Indexing +// --------------------------------------------------------------------------- + +// IndexDocument indexes a single document. +func (idx *Index) IndexDocument(ctx context.Context, doc searchindex.EntityDocument) error { + return idx.IndexDocuments(ctx, []searchindex.EntityDocument{doc}) +} + +// IndexDocuments indexes a batch of documents using the JSONL import API. +func (idx *Index) IndexDocuments(ctx context.Context, docs []searchindex.EntityDocument) error { + if len(docs) == 0 { + return nil + } + + var buf bytes.Buffer + enc := json.NewEncoder(&buf) + enc.SetEscapeHTML(false) + + dateFields := idx.dateFieldSet() + + for _, doc := range docs { + flat, err := buildDocument(doc) + if err != nil { + return err + } + if len(dateFields) > 0 { + if err := convertDateFieldsInDoc(flat, dateFields); err != nil { + return err + } + } + if err := enc.Encode(flat); err != nil { + return fmt.Errorf("typesense: encode document: %w", err) + } + } + + path := fmt.Sprintf("/collections/%s/documents/import?action=upsert", url.PathEscape(idx.name)) + resp, err := idx.doRequest(ctx, http.MethodPost, path, &buf) + if err != nil { + return fmt.Errorf("typesense: import documents: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return idx.readError(resp, "import documents") + } + + // The import endpoint returns one JSON object per line. Check for errors. + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("typesense: read import response: %w", err) + } + lines := bytes.Split(bytes.TrimSpace(respBody), []byte("\n")) + for _, line := range lines { + if len(line) == 0 { + continue + } + var result struct { + Success bool `json:"success"` + Error string `json:"error"` + } + if err := json.Unmarshal(line, &result); err != nil { + return fmt.Errorf("typesense: parse import result line: %w", err) + } + if !result.Success { + return fmt.Errorf("typesense: import document failed: %s", result.Error) + } + } + + return nil +} + +// buildDocument creates a flat JSON-serialisable map from an EntityDocument. +func buildDocument(doc searchindex.EntityDocument) (map[string]any, error) { + m := make(map[string]any, len(doc.Fields)+len(doc.Vectors)+3) + + for k, v := range doc.Fields { + m[k] = v + } + for k, v := range doc.Vectors { + m[k] = v + } + + docID := documentID(doc.Identity) + m["id"] = docID + m[reservedDocIDField] = docID + m[reservedTypeNameField] = doc.Identity.TypeName + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return nil, fmt.Errorf("typesense: marshal key fields: %w", err) + } + m[reservedKeyFieldsField] = string(keyFieldsJSON) + + return m, nil +} + +// dateToUnix parses an ISO 8601 date or datetime string and returns the unix timestamp. +// Supported formats: "2024-01-15", "2024-01-15T10:30:00Z", "2024-01-15T10:30:00.000Z", +// "2024-01-15T10:30:00+02:00". +func dateToUnix(s string) (int64, error) { + for _, layout := range []string{ + time.RFC3339Nano, + time.RFC3339, + "2006-01-02T15:04:05", + time.DateOnly, + } { + if t, err := time.Parse(layout, s); err == nil { + return t.Unix(), nil + } + } + return 0, fmt.Errorf("typesense: cannot parse date %q", s) +} + +// dateFieldSet returns the set of field names that are DATE or DATETIME type. +func (idx *Index) dateFieldSet() map[string]bool { + m := make(map[string]bool) + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeDate || fc.Type == searchindex.FieldTypeDateTime { + m[fc.Name] = true + } + } + return m +} + +// convertDateFieldsInDoc converts ISO date strings to unix timestamps for date fields. +func convertDateFieldsInDoc(doc map[string]any, dateFields map[string]bool) error { + for name := range dateFields { + v, ok := doc[name] + if !ok { + continue + } + s, ok := v.(string) + if !ok { + continue + } + ts, err := dateToUnix(s) + if err != nil { + return err + } + doc[name] = ts + } + return nil +} + +// convertDateFilters walks a filter tree and converts string date values to +// unix timestamps for fields that are DATE or DATETIME type. +func convertDateFilters(f *searchindex.Filter, dateFields map[string]bool) { + if f == nil { + return + } + for _, child := range f.And { + convertDateFilters(child, dateFields) + } + for _, child := range f.Or { + convertDateFilters(child, dateFields) + } + if f.Not != nil { + convertDateFilters(f.Not, dateFields) + } + if f.Term != nil && dateFields[f.Term.Field] { + if s, ok := f.Term.Value.(string); ok { + if ts, err := dateToUnix(s); err == nil { + f.Term.Value = ts + } + } + } + if f.Range != nil && dateFields[f.Range.Field] { + convertDateRangeValue := func(v any) any { + if s, ok := v.(string); ok { + if ts, err := dateToUnix(s); err == nil { + return ts + } + } + return v + } + if f.Range.GT != nil { + f.Range.GT = convertDateRangeValue(f.Range.GT) + } + if f.Range.GTE != nil { + f.Range.GTE = convertDateRangeValue(f.Range.GTE) + } + if f.Range.LT != nil { + f.Range.LT = convertDateRangeValue(f.Range.LT) + } + if f.Range.LTE != nil { + f.Range.LTE = convertDateRangeValue(f.Range.LTE) + } + } +} + +// documentID computes a deterministic string ID from a DocumentIdentity. +// Format: TypeName:key1=val1,key2=val2,... (keys sorted alphabetically). +func documentID(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return id.TypeName + } + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(id.TypeName) + b.WriteByte(':') + for i, k := range keys { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(k) + b.WriteByte('=') + fmt.Fprintf(&b, "%v", id.KeyFields[k]) + } + return b.String() +} + +// --------------------------------------------------------------------------- +// Deletion +// --------------------------------------------------------------------------- + +// DeleteDocument deletes a single document by identity. +func (idx *Index) DeleteDocument(ctx context.Context, id searchindex.DocumentIdentity) error { + return idx.DeleteDocuments(ctx, []searchindex.DocumentIdentity{id}) +} + +// DeleteDocuments deletes a batch of documents by identity. +func (idx *Index) DeleteDocuments(ctx context.Context, ids []searchindex.DocumentIdentity) error { + var firstErr error + for _, id := range ids { + docID := documentID(id) + path := fmt.Sprintf("/collections/%s/documents/%s", url.PathEscape(idx.name), url.PathEscape(docID)) + resp, err := idx.doRequest(ctx, http.MethodDelete, path, nil) + if err != nil { + if firstErr == nil { + firstErr = fmt.Errorf("typesense: delete document %q: %w", docID, err) + } + continue + } + _, _ = io.Copy(io.Discard, resp.Body) + resp.Body.Close() + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNotFound { + if firstErr == nil { + firstErr = fmt.Errorf("typesense: delete document %q: HTTP %d", docID, resp.StatusCode) + } + } + } + return firstErr +} + +// --------------------------------------------------------------------------- +// Search +// --------------------------------------------------------------------------- + +// Search performs a search query against the Typesense collection. +func (idx *Index) Search(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + params := url.Values{} + + // Query text. + q := req.TextQuery + if q == "" { + q = "*" + } + params.Set("q", q) + + // query_by: text fields to search, with optional per-field weights. + var queryByNames []string + var queryByWeights []string + hasCustomWeights := false + + if len(req.TextFields) > 0 { + for _, tf := range req.TextFields { + queryByNames = append(queryByNames, tf.Name) + w := tf.Weight + if w == 0 { + w = 1 + } + if w != 1 { + hasCustomWeights = true + } + queryByWeights = append(queryByWeights, fmt.Sprintf("%d", int(w))) + } + } else if req.TextQuery != "" { + // Default to all text fields in the schema. + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeText { + queryByNames = append(queryByNames, fc.Name) + } + } + } + if len(queryByNames) == 0 { + // Must have at least one query_by field for Typesense. + // Fall back to all string-type fields. + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeText || fc.Type == searchindex.FieldTypeKeyword { + queryByNames = append(queryByNames, fc.Name) + } + } + } + if len(queryByNames) > 0 { + params.Set("query_by", strings.Join(queryByNames, ",")) + if hasCustomWeights { + params.Set("query_by_weights", strings.Join(queryByWeights, ",")) + } + } + + // Filters. + filterParts := make([]string, 0, 2) + + if req.TypeName != "" { + filterParts = append(filterParts, fmt.Sprintf("%s:=%s", reservedTypeNameField, escapeFilterValue(req.TypeName))) + } + + if req.Filter != nil { + dateFields := idx.dateFieldSet() + if len(dateFields) > 0 { + convertDateFilters(req.Filter, dateFields) + } + fStr, err := translateFilter(req.Filter) + if err != nil { + return nil, err + } + if fStr != "" { + filterParts = append(filterParts, fStr) + } + } + + if len(filterParts) > 0 { + params.Set("filter_by", strings.Join(filterParts, " && ")) + } + + // Sorting. + if len(req.Sort) > 0 { + sortParts := make([]string, 0, len(req.Sort)) + for _, sf := range req.Sort { + dir := "desc" + if sf.Ascending { + dir = "asc" + } + sortParts = append(sortParts, sf.Field+":"+dir) + } + params.Set("sort_by", strings.Join(sortParts, ",")) + } + + // Pagination. + limit := req.Limit + if limit <= 0 { + limit = 10 + } + params.Set("per_page", strconv.Itoa(limit)) + + if req.Offset > 0 { + // Typesense uses 1-based pages. + page := (req.Offset / limit) + 1 + params.Set("page", strconv.Itoa(page)) + } + + // Fuzziness / typo tolerance. + if req.Fuzziness != nil { + params.Set("num_typos", strconv.Itoa(int(*req.Fuzziness))) + } + + // Facets. + if len(req.Facets) > 0 { + facetFields := make([]string, 0, len(req.Facets)) + for _, fr := range req.Facets { + facetFields = append(facetFields, fr.Field) + } + params.Set("facet_by", strings.Join(facetFields, ",")) + // Use the max facet Size from the requests. + maxSize := 0 + for _, fr := range req.Facets { + if fr.Size > maxSize { + maxSize = fr.Size + } + } + if maxSize > 0 { + params.Set("max_facet_values", strconv.Itoa(maxSize)) + } + } + + // Vector search. + if len(req.Vector) > 0 && req.VectorField != "" { + vecStrs := make([]string, 0, len(req.Vector)) + for _, v := range req.Vector { + vecStrs = append(vecStrs, strconv.FormatFloat(float64(v), 'f', -1, 32)) + } + k := limit + if k <= 0 { + k = 10 + } + vectorQuery := fmt.Sprintf("%s:([%s], k:%d)", req.VectorField, strings.Join(vecStrs, ", "), k) + params.Set("vector_query", vectorQuery) + } + + path := fmt.Sprintf("/collections/%s/documents/search?%s", url.PathEscape(idx.name), params.Encode()) + resp, err := idx.doRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return nil, fmt.Errorf("typesense: search: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, idx.readError(resp, "search") + } + + var tsResp typesenseSearchResponse + if err := json.NewDecoder(resp.Body).Decode(&tsResp); err != nil { + return nil, fmt.Errorf("typesense: decode search response: %w", err) + } + + return idx.convertSearchResponse(&tsResp) +} + +// typesenseSearchResponse mirrors the Typesense search result JSON. +type typesenseSearchResponse struct { + Found int `json:"found"` + Hits []typesenseHit `json:"hits"` + FacetCounts []typesenseFacetCount `json:"facet_counts"` +} + +type typesenseHit struct { + Document map[string]any `json:"document"` + TextMatch json.Number `json:"text_match"` + Highlights []typesenseHighlight `json:"highlights"` + VectorDistance *float64 `json:"vector_distance,omitempty"` +} + +type typesenseHighlight struct { + Field string `json:"field"` + Snippet string `json:"snippet"` + Snippets []string `json:"snippets"` +} + +type typesenseFacetCount struct { + FieldName string `json:"field_name"` + Counts []typesenseFacetVal `json:"counts"` +} + +type typesenseFacetVal struct { + Value string `json:"value"` + Count int `json:"count"` +} + +func (idx *Index) convertSearchResponse(tsResp *typesenseSearchResponse) (*searchindex.SearchResult, error) { + hits := make([]searchindex.SearchHit, 0, len(tsResp.Hits)) + for _, h := range tsResp.Hits { + hit, err := convertHit(&h) + if err != nil { + return nil, err + } + hits = append(hits, hit) + } + + facets := convertFacets(tsResp.FacetCounts) + + return &searchindex.SearchResult{ + Hits: hits, + TotalCount: tsResp.Found, + Facets: facets, + }, nil +} + +func convertHit(h *typesenseHit) (searchindex.SearchHit, error) { + identity, err := extractIdentity(h.Document) + if err != nil { + return searchindex.SearchHit{}, err + } + + // Build representation from the document, excluding internal fields. + representation := make(map[string]any, len(h.Document)) + for k, v := range h.Document { + if k == reservedDocIDField || k == reservedTypeNameField || k == reservedKeyFieldsField || k == "id" { + continue + } + representation[k] = v + } + representation["__typename"] = identity.TypeName + for k, v := range identity.KeyFields { + representation[k] = v + } + + // Score. + var score float64 + if h.TextMatch.String() != "" { + score, _ = h.TextMatch.Float64() + } + + // Distance for vector search. + var distance float64 + if h.VectorDistance != nil { + distance = *h.VectorDistance + } + + // Highlights. + var highlights map[string][]string + if len(h.Highlights) > 0 { + highlights = make(map[string][]string, len(h.Highlights)) + for _, hl := range h.Highlights { + if len(hl.Snippets) > 0 { + highlights[hl.Field] = hl.Snippets + } else if hl.Snippet != "" { + highlights[hl.Field] = []string{hl.Snippet} + } + } + } + + return searchindex.SearchHit{ + Identity: identity, + Score: score, + Distance: distance, + Highlights: highlights, + Representation: representation, + }, nil +} + +func extractIdentity(doc map[string]any) (searchindex.DocumentIdentity, error) { + typeName, _ := doc[reservedTypeNameField].(string) + keyFieldsRaw, _ := doc[reservedKeyFieldsField].(string) + + var keyFields map[string]any + if keyFieldsRaw != "" { + if err := json.Unmarshal([]byte(keyFieldsRaw), &keyFields); err != nil { + return searchindex.DocumentIdentity{}, fmt.Errorf("typesense: unmarshal key fields: %w", err) + } + } + if keyFields == nil { + keyFields = make(map[string]any) + } + + return searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + }, nil +} + +func convertFacets(tsFacets []typesenseFacetCount) map[string]searchindex.FacetResult { + if len(tsFacets) == 0 { + return nil + } + facets := make(map[string]searchindex.FacetResult, len(tsFacets)) + for _, fc := range tsFacets { + values := make([]searchindex.FacetValue, 0, len(fc.Counts)) + for _, cv := range fc.Counts { + values = append(values, searchindex.FacetValue{ + Value: cv.Value, + Count: cv.Count, + }) + } + facets[fc.FieldName] = searchindex.FacetResult{Values: values} + } + return facets +} + +// --------------------------------------------------------------------------- +// Filter translation +// --------------------------------------------------------------------------- + +// translateFilter recursively converts a searchindex.Filter tree to a +// Typesense filter_by string. +func translateFilter(f *searchindex.Filter) (string, error) { + if f == nil { + return "", nil + } + + // AND + if len(f.And) > 0 { + parts := make([]string, 0, len(f.And)) + for _, child := range f.And { + s, err := translateFilter(child) + if err != nil { + return "", err + } + if s != "" { + parts = append(parts, s) + } + } + if len(parts) == 0 { + return "", nil + } + return "(" + strings.Join(parts, " && ") + ")", nil + } + + // OR + if len(f.Or) > 0 { + parts := make([]string, 0, len(f.Or)) + for _, child := range f.Or { + s, err := translateFilter(child) + if err != nil { + return "", err + } + if s != "" { + parts = append(parts, s) + } + } + if len(parts) == 0 { + return "", nil + } + return "(" + strings.Join(parts, " || ") + ")", nil + } + + // NOT — Typesense uses :!= for negation at the field level. + if f.Not != nil { + return translateNotFilter(f.Not) + } + + // Term — exact match. + if f.Term != nil { + return translateTermFilter(f.Term) + } + + // Terms — IN operator. + if f.Terms != nil { + return translateTermsFilter(f.Terms) + } + + // Range. + if f.Range != nil { + return translateRangeFilter(f.Range) + } + + // Prefix. + if f.Prefix != nil { + // Typesense does not have a native prefix filter in filter_by. + // Approximate via range: field:[prefixValue, prefixValue~] is not + // supported. Use a workaround: we cannot perfectly replicate prefix + // with filter_by alone. Return an unsupported error for now. + return "", fmt.Errorf("typesense: prefix filter is not supported in filter_by; use text search instead") + } + + // Exists. + if f.Exists != nil { + // Typesense supports: field:!='' (field is not empty string) for strings. + // For general "exists" semantics, use: field:!=null (Typesense 0.25+). + return fmt.Sprintf("%s:!=%s", f.Exists.Field, "``"), nil + } + + return "", nil +} + +func translateTermFilter(tf *searchindex.TermFilter) (string, error) { + switch v := tf.Value.(type) { + case string: + return fmt.Sprintf("%s:=%s", tf.Field, escapeFilterValue(v)), nil + case bool: + return fmt.Sprintf("%s:=%t", tf.Field, v), nil + case float64: + return fmt.Sprintf("%s:=%s", tf.Field, formatNumber(v)), nil + case float32: + return fmt.Sprintf("%s:=%s", tf.Field, formatNumber(float64(v))), nil + case int: + return fmt.Sprintf("%s:=%d", tf.Field, v), nil + case int64: + return fmt.Sprintf("%s:=%d", tf.Field, v), nil + case json.Number: + return fmt.Sprintf("%s:=%s", tf.Field, v.String()), nil + default: + return fmt.Sprintf("%s:=%s", tf.Field, escapeFilterValue(fmt.Sprintf("%v", v))), nil + } +} + +// translateNotFilter negates a filter using Typesense's :!= operator. +func translateNotFilter(inner *searchindex.Filter) (string, error) { + // Term: field:=value → field:!=value + if inner.Term != nil { + tf := inner.Term + switch v := tf.Value.(type) { + case string: + return fmt.Sprintf("%s:!=%s", tf.Field, escapeFilterValue(v)), nil + case bool: + return fmt.Sprintf("%s:!=%t", tf.Field, v), nil + case float64: + return fmt.Sprintf("%s:!=%s", tf.Field, formatNumber(v)), nil + case float32: + return fmt.Sprintf("%s:!=%s", tf.Field, formatNumber(float64(v))), nil + case int: + return fmt.Sprintf("%s:!=%d", tf.Field, v), nil + case int64: + return fmt.Sprintf("%s:!=%d", tf.Field, v), nil + case json.Number: + return fmt.Sprintf("%s:!=%s", tf.Field, v.String()), nil + default: + return fmt.Sprintf("%s:!=%s", tf.Field, escapeFilterValue(fmt.Sprintf("%v", v))), nil + } + } + + // AND: NOT(a AND b) → NOT(a) || NOT(b) (De Morgan's) + if len(inner.And) > 0 { + parts := make([]string, 0, len(inner.And)) + for _, child := range inner.And { + s, err := translateNotFilter(child) + if err != nil { + return "", err + } + parts = append(parts, s) + } + return "(" + strings.Join(parts, " || ") + ")", nil + } + + // OR: NOT(a OR b) → NOT(a) && NOT(b) (De Morgan's) + if len(inner.Or) > 0 { + parts := make([]string, 0, len(inner.Or)) + for _, child := range inner.Or { + s, err := translateNotFilter(child) + if err != nil { + return "", err + } + parts = append(parts, s) + } + return "(" + strings.Join(parts, " && ") + ")", nil + } + + // Double negation: NOT(NOT(x)) → x + if inner.Not != nil { + return translateFilter(inner.Not) + } + + return "", fmt.Errorf("typesense: NOT filter is not supported for this filter type") +} + +func translateTermsFilter(tf *searchindex.TermsFilter) (string, error) { + if len(tf.Values) == 0 { + return "", nil + } + vals := make([]string, 0, len(tf.Values)) + for _, v := range tf.Values { + vals = append(vals, formatFilterValue(v)) + } + return fmt.Sprintf("%s:[%s]", tf.Field, strings.Join(vals, ", ")), nil +} + +func translateRangeFilter(rf *searchindex.RangeFilter) (string, error) { + parts := make([]string, 0, 2) + + if rf.GTE != nil { + v := formatFilterValue(rf.GTE) + parts = append(parts, fmt.Sprintf("%s:>=%s", rf.Field, v)) + } else if rf.HasGT && rf.GT != nil { + v := formatFilterValue(rf.GT) + parts = append(parts, fmt.Sprintf("%s:>%s", rf.Field, v)) + } + + if rf.LTE != nil { + v := formatFilterValue(rf.LTE) + parts = append(parts, fmt.Sprintf("%s:<=%s", rf.Field, v)) + } else if rf.HasLT && rf.LT != nil { + v := formatFilterValue(rf.LT) + parts = append(parts, fmt.Sprintf("%s:<%s", rf.Field, v)) + } + + if len(parts) == 0 { + return "", nil + } + return strings.Join(parts, " && "), nil +} + +// formatFilterValue formats an arbitrary value for use in a Typesense filter_by string. +func formatFilterValue(v any) string { + switch val := v.(type) { + case string: + return escapeFilterValue(val) + case bool: + if val { + return "true" + } + return "false" + case float64: + return formatNumber(val) + case float32: + return formatNumber(float64(val)) + case int: + return strconv.Itoa(val) + case int64: + return strconv.FormatInt(val, 10) + case json.Number: + return val.String() + default: + return escapeFilterValue(fmt.Sprintf("%v", val)) + } +} + +// escapeFilterValue wraps a string value in backticks for Typesense filter_by syntax. +func escapeFilterValue(s string) string { + // Typesense uses backtick quoting for values that contain special characters. + if strings.ContainsAny(s, " ,[]()&|:!=<>`") { + return "`" + strings.ReplaceAll(s, "`", "\\`") + "`" + } + return s +} + +// formatNumber formats a float64, preferring integer representation when possible. +func formatNumber(f float64) string { + if f == float64(int64(f)) { + return strconv.FormatInt(int64(f), 10) + } + return strconv.FormatFloat(f, 'f', -1, 64) +} + +// --------------------------------------------------------------------------- +// Close +// --------------------------------------------------------------------------- + +// Autocomplete returns terms matching the given prefix using Typesense's facet query. +func (idx *Index) Autocomplete(ctx context.Context, req searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + limit := req.Limit + if limit <= 0 { + limit = 10 + } + + params := url.Values{ + "q": {"*"}, + "query_by": {req.Field}, + "facet_by": {req.Field}, + "facet_query": {req.Field + ":" + strings.ToLower(req.Prefix)}, + "per_page": {"0"}, + "max_facet_values": {strconv.Itoa(limit)}, + } + + path := fmt.Sprintf("/collections/%s/documents/search?%s", url.PathEscape(idx.name), params.Encode()) + resp, err := idx.doRequest(ctx, "GET", path, nil) + if err != nil { + return nil, fmt.Errorf("typesense: autocomplete request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode >= 400 { + return nil, idx.readError(resp, "autocomplete") + } + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("typesense: read autocomplete response: %w", err) + } + + var result struct { + FacetCounts []struct { + Counts []struct { + Value string `json:"value"` + Count int `json:"count"` + } `json:"counts"` + } `json:"facet_counts"` + } + if err := json.Unmarshal(respBody, &result); err != nil { + return nil, fmt.Errorf("typesense: unmarshal autocomplete response: %w", err) + } + + var terms []searchindex.AutocompleteTerm + if len(result.FacetCounts) > 0 { + for _, c := range result.FacetCounts[0].Counts { + terms = append(terms, searchindex.AutocompleteTerm{Term: c.Value, Count: c.Count}) + } + } + + return &searchindex.AutocompleteResult{Terms: terms}, nil +} + +// Close releases resources held by the index. The underlying HTTP client is +// shared and does not need explicit cleanup. +func (idx *Index) Close() error { + return nil +} + +// --------------------------------------------------------------------------- +// HTTP helpers +// --------------------------------------------------------------------------- + +func (idx *Index) baseURL() string { + return fmt.Sprintf("%s://%s:%d", idx.config.Protocol, idx.config.Host, idx.config.Port) +} + +func (idx *Index) doRequest(ctx context.Context, method, path string, body io.Reader) (*http.Response, error) { + u := idx.baseURL() + path + req, err := http.NewRequestWithContext(ctx, method, u, body) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + if idx.config.APIKey != "" { + req.Header.Set("X-TYPESENSE-API-KEY", idx.config.APIKey) + } + return idx.client.Do(req) +} + +func (idx *Index) readError(resp *http.Response, action string) error { + bodyBytes, _ := io.ReadAll(resp.Body) + return fmt.Errorf("typesense: %s failed (HTTP %d): %s", action, resp.StatusCode, string(bodyBytes)) +} diff --git a/v2/pkg/searchindex/typesense/typesense_test.go b/v2/pkg/searchindex/typesense/typesense_test.go new file mode 100644 index 0000000000..0c10aae2b6 --- /dev/null +++ b/v2/pkg/searchindex/typesense/typesense_test.go @@ -0,0 +1,423 @@ +//go:build integration + +package typesense + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +const testAPIKey = "test-api-key" + +// startTypesense launches a Typesense container and returns the host, port, and +// a cleanup function. +func startTypesense(t *testing.T) (host string, port int) { + t.Helper() + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + Image: "typesense/typesense:27.1", + ExposedPorts: []string{"8108/tcp"}, + Env: map[string]string{ + "TYPESENSE_API_KEY": testAPIKey, + "TYPESENSE_DATA_DIR": "/data", + }, + Tmpfs: map[string]string{"/data": ""}, + WaitingFor: wait.ForHTTP("/health").WithPort("8108/tcp").WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + require.NoError(t, err) + t.Cleanup(func() { _ = container.Terminate(ctx) }) + + mappedPort, err := container.MappedPort(ctx, "8108") + require.NoError(t, err) + + hostIP, err := container.Host(ctx) + require.NoError(t, err) + + return hostIP, mappedPort.Int() +} + +func newTestIndex(t *testing.T, host string, port int) searchindex.Index { + t.Helper() + + factory := NewFactory() + schema := searchindex.IndexConfig{ + Name: "test_products", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: false}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + }, + } + + cfgJSON, _ := json.Marshal(Config{ + Host: host, + Port: port, + APIKey: testAPIKey, + Protocol: "http", + }) + + // Use a unique collection name per test to avoid conflicts. + collectionName := fmt.Sprintf("test_%d", time.Now().UnixNano()) + + idx, err := factory.CreateIndex(context.Background(), collectionName, schema, cfgJSON) + require.NoError(t, err) + + t.Cleanup(func() { _ = idx.Close() }) + return idx +} + +func populateTestData(t *testing.T, idx searchindex.Index) { + t.Helper() + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + }, + } + err := idx.IndexDocuments(context.Background(), docs) + require.NoError(t, err) +} + +func TestTypesenseLifecycle(t *testing.T) { + host, port := startTypesense(t) + + t.Run("index and text search", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + Limit: 10, + }) + require.NoError(t, err) + assert.GreaterOrEqual(t, result.TotalCount, 2, "expected at least 2 hits for 'shoes'") + }) + + t.Run("text search with field restriction", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + TextFields: []searchindex.TextFieldWeight{{Name: "name"}}, + Limit: 10, + }) + require.NoError(t, err) + assert.GreaterOrEqual(t, result.TotalCount, 2) + }) + + t.Run("term filter on keyword", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 3, result.TotalCount) + }) + + t.Run("boolean filter", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 1, result.TotalCount) + }) + + t.Run("numeric range filter", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: 30.0, + LTE: 100.0, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 2, result.TotalCount) + }) + + t.Run("AND filter", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 3, result.TotalCount) + }) + + t.Run("OR filter", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + {Range: &searchindex.RangeFilter{Field: "price", GTE: 100.0}}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 2, result.TotalCount) // Belt + Basketball Shoes + }) + + t.Run("sorting", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(result.Hits), 4) + // First hit should be cheapest (Wool Socks at 12.99). + assert.Equal(t, "Wool Socks", result.Hits[0].Representation["name"]) + }) + + t.Run("pagination", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 2, + }) + require.NoError(t, err) + assert.Equal(t, 2, len(result.Hits)) + }) + + t.Run("facets", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Facets: []searchindex.FacetRequest{{Field: "category", Size: 10}}, + Limit: 10, + }) + require.NoError(t, err) + facet, ok := result.Facets["category"] + require.True(t, ok, "expected category facet") + assert.GreaterOrEqual(t, len(facet.Values), 2) + }) + + t.Run("identity roundtrip", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "running shoes", + Limit: 1, + }) + require.NoError(t, err) + require.NotEmpty(t, result.Hits) + + hit := result.Hits[0] + assert.Equal(t, "Product", hit.Identity.TypeName) + assert.Equal(t, "Product", hit.Representation["__typename"]) + }) + + t.Run("delete single document", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + err := idx.DeleteDocument(context.Background(), searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "1"}, + }) + require.NoError(t, err) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + require.NoError(t, err) + assert.Equal(t, 3, result.TotalCount) + }) + + t.Run("delete batch documents", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + err := idx.DeleteDocuments(context.Background(), []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + }) + require.NoError(t, err) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + require.NoError(t, err) + assert.Equal(t, 2, result.TotalCount) + }) + + t.Run("upsert overwrites existing document", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + // Re-index product 1 with a new name. + err := idx.IndexDocument(context.Background(), searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Trail Running Shoes", "description": "Great for trail running", "category": "Footwear", "price": 99.99, "inStock": true}, + }) + require.NoError(t, err) + + // Total count should still be 4. + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + require.NoError(t, err) + assert.Equal(t, 4, result.TotalCount) + + // Search for the updated name. + result, err = idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "trail running", + Limit: 1, + }) + require.NoError(t, err) + require.NotEmpty(t, result.Hits) + assert.Contains(t, result.Hits[0].Representation["name"], "Trail") + }) + + t.Run("terms filter", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Terms: &searchindex.TermsFilter{ + Field: "category", + Values: []any{"Footwear", "Accessories"}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, result.TotalCount) // All products + }) + + t.Run("type name filter", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TypeName: "Product", + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, result.TotalCount) + }) + + t.Run("NOT filter", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 1, result.TotalCount, "expected 1 hit (only Accessories after excluding Footwear)") + }) + + t.Run("prefix filter returns error", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + _, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Prefix: &searchindex.PrefixFilter{Field: "category", Value: "Foot"}, + }, + Limit: 10, + }) + require.Error(t, err, "prefix filter should return an error because Typesense does not support it") + assert.Contains(t, err.Error(), "prefix filter is not supported") + }) + + t.Run("exists filter returns error", func(t *testing.T) { + idx := newTestIndex(t, host, port) + populateTestData(t, idx) + + // Typesense does not support exists filter with empty value check. + _, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Exists: &searchindex.ExistsFilter{Field: "category"}, + }, + Limit: 10, + }) + require.Error(t, err, "exists filter should return an error in Typesense") + }) + + t.Run("single IndexDocument", func(t *testing.T) { + idx := newTestIndex(t, host, port) + + err := idx.IndexDocument(context.Background(), searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "99"}}, + Fields: map[string]any{"name": "Hiking Boots", "description": "Sturdy boots for mountain trails", "category": "Footwear", "price": 149.99, "inStock": true}, + }) + require.NoError(t, err) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "hiking boots", + Limit: 10, + }) + require.NoError(t, err) + require.Equal(t, 1, result.TotalCount, "expected exactly 1 hit for the single indexed document") + assert.Equal(t, "Hiking Boots", result.Hits[0].Representation["name"]) + assert.Equal(t, "Product", result.Hits[0].Identity.TypeName) + }) +} diff --git a/v2/pkg/searchindex/weaviate/weaviate.go b/v2/pkg/searchindex/weaviate/weaviate.go new file mode 100644 index 0000000000..8408c008ad --- /dev/null +++ b/v2/pkg/searchindex/weaviate/weaviate.go @@ -0,0 +1,1288 @@ +// Package weaviate implements the searchindex.Index interface for Weaviate. +// +// Priority: P1 +// Supports: vector-native + BM25 full-text, native hybrid search. +// Filter translation: searchindex.Filter -> Weaviate where clause with operators. +// +// Uses only net/http + encoding/json (no Weaviate SDK). +package weaviate + +import ( + "bytes" + "context" + "crypto/sha1" + "encoding/json" + "fmt" + "io" + "net/http" + "sort" + "strconv" + "strings" + "time" + "unicode" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +// Compile-time interface checks. +var ( + _ searchindex.Index = (*Index)(nil) + _ searchindex.IndexFactory = (*Factory)(nil) +) + +// reservedTypeNameField stores the entity type name for identity reconstruction. +const reservedTypeNameField = "_typeName" + +// reservedKeyFieldsField stores the JSON-encoded key fields map. +const reservedKeyFieldsField = "_keyFieldsJSON" + +// reservedDocIDField stores the deterministic document ID string. +const reservedDocIDField = "_docId" + +// Config holds Weaviate-specific configuration. +type Config struct { + Host string `json:"host"` + Scheme string `json:"scheme,omitempty"` + APIKey string `json:"api_key,omitempty"` +} + +// Index implements searchindex.Index for Weaviate. +type Index struct { + name string + className string + config Config + schema searchindex.IndexConfig + client *http.Client + baseURL string +} + +// Factory implements searchindex.IndexFactory for Weaviate. +type Factory struct{} + +// NewFactory returns a new Weaviate IndexFactory. +func NewFactory() *Factory { + return &Factory{} +} + +// CreateIndex creates a new Weaviate class with properties mapped from the IndexConfig. +func (f *Factory) CreateIndex(ctx context.Context, name string, schema searchindex.IndexConfig, configJSON []byte) (searchindex.Index, error) { + var cfg Config + if len(configJSON) > 0 { + if err := json.Unmarshal(configJSON, &cfg); err != nil { + return nil, fmt.Errorf("weaviate: invalid config: %w", err) + } + } + if cfg.Host == "" { + cfg.Host = "localhost:8080" + } + if cfg.Scheme == "" { + cfg.Scheme = "http" + } + + className := toClassName(name) + baseURL := cfg.Scheme + "://" + cfg.Host + + idx := &Index{ + name: name, + className: className, + config: cfg, + schema: schema, + client: &http.Client{}, + baseURL: baseURL, + } + + // Build class definition. + classDef, err := idx.buildClassDefinition() + if err != nil { + return nil, fmt.Errorf("weaviate: build class definition: %w", err) + } + + body, err := json.Marshal(classDef) + if err != nil { + return nil, fmt.Errorf("weaviate: marshal class definition: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, baseURL+"/v1/schema", bytes.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("weaviate: create schema request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + idx.setAuthHeader(req) + + resp, err := idx.client.Do(req) + if err != nil { + return nil, fmt.Errorf("weaviate: create class: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated { + respBody, _ := io.ReadAll(resp.Body) + // If class already exists (422), treat as success. + if resp.StatusCode == http.StatusUnprocessableEntity && strings.Contains(string(respBody), "already exists") { + return idx, nil + } + return nil, fmt.Errorf("weaviate: create class failed (status %d): %s", resp.StatusCode, string(respBody)) + } + + return idx, nil +} + +// buildClassDefinition builds the Weaviate class JSON body from IndexConfig. +func (idx *Index) buildClassDefinition() (map[string]any, error) { + properties := []map[string]any{ + { + "name": reservedTypeNameField, + "dataType": []string{"text"}, + "tokenization": "field", + }, + { + "name": reservedKeyFieldsField, + "dataType": []string{"text"}, + }, + { + "name": reservedDocIDField, + "dataType": []string{"text"}, + "tokenization": "field", + }, + } + + var vectorDimensions int + + for _, fc := range idx.schema.Fields { + prop := fieldToProperty(fc) + if prop != nil { + properties = append(properties, prop) + } + if fc.Type == searchindex.FieldTypeVector && fc.Dimensions > 0 { + vectorDimensions = fc.Dimensions + } + } + + classDef := map[string]any{ + "class": idx.className, + "properties": properties, + "vectorizer": "none", + } + + if vectorDimensions > 0 { + classDef["vectorIndexConfig"] = map[string]any{ + "distance": "cosine", + } + } + + return classDef, nil +} + +// fieldToProperty converts a FieldConfig to a Weaviate property definition. +// Returns nil for vector fields (handled via the class-level vectorizer). +func fieldToProperty(fc searchindex.FieldConfig) map[string]any { + switch fc.Type { + case searchindex.FieldTypeText: + return map[string]any{ + "name": fc.Name, + "dataType": []string{"text"}, + "tokenization": "word", + } + case searchindex.FieldTypeKeyword: + return map[string]any{ + "name": fc.Name, + "dataType": []string{"text"}, + "tokenization": "field", + } + case searchindex.FieldTypeNumeric: + return map[string]any{ + "name": fc.Name, + "dataType": []string{"number"}, + } + case searchindex.FieldTypeBool: + return map[string]any{ + "name": fc.Name, + "dataType": []string{"boolean"}, + } + case searchindex.FieldTypeVector: + // Vectors are provided at object level, not as a property. + return nil + case searchindex.FieldTypeGeo: + // Weaviate does not support geo fields yet. + return nil + case searchindex.FieldTypeDate, searchindex.FieldTypeDateTime: + return map[string]any{ + "name": fc.Name, + "dataType": []string{"date"}, + } + default: + return nil + } +} + +// IndexDocument indexes a single document with upsert semantics. +// It first tries POST (create). If the object already exists (422), it falls +// back to PUT (update). +func (idx *Index) IndexDocument(ctx context.Context, doc searchindex.EntityDocument) error { + obj, err := idx.buildObject(doc) + if err != nil { + return err + } + + body, err := json.Marshal(obj) + if err != nil { + return fmt.Errorf("weaviate: marshal object: %w", err) + } + + // Try POST first (create new object). + req, err := http.NewRequestWithContext(ctx, http.MethodPost, idx.baseURL+"/v1/objects", bytes.NewReader(body)) + if err != nil { + return fmt.Errorf("weaviate: create request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + idx.setAuthHeader(req) + + resp, err := idx.client.Do(req) + if err != nil { + return fmt.Errorf("weaviate: index document: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusOK || resp.StatusCode == http.StatusCreated { + return nil + } + + respBody, _ := io.ReadAll(resp.Body) + + // If already exists (422), fall back to PUT (update). + if resp.StatusCode == http.StatusUnprocessableEntity && strings.Contains(string(respBody), "already exists") { + id, _ := obj["id"].(string) + putBody, _ := json.Marshal(obj) + putReq, err := http.NewRequestWithContext(ctx, http.MethodPut, idx.baseURL+"/v1/objects/"+idx.className+"/"+id, bytes.NewReader(putBody)) + if err != nil { + return fmt.Errorf("weaviate: create put request: %w", err) + } + putReq.Header.Set("Content-Type", "application/json") + idx.setAuthHeader(putReq) + + putResp, err := idx.client.Do(putReq) + if err != nil { + return fmt.Errorf("weaviate: update document: %w", err) + } + defer putResp.Body.Close() + + if putResp.StatusCode != http.StatusOK && putResp.StatusCode != http.StatusNoContent { + putRespBody, _ := io.ReadAll(putResp.Body) + return fmt.Errorf("weaviate: update document failed (status %d): %s", putResp.StatusCode, string(putRespBody)) + } + return nil + } + + return fmt.Errorf("weaviate: index document failed (status %d): %s", resp.StatusCode, string(respBody)) +} + +// IndexDocuments indexes a batch of documents via the batch API. +func (idx *Index) IndexDocuments(ctx context.Context, docs []searchindex.EntityDocument) error { + if len(docs) == 0 { + return nil + } + if len(docs) == 1 { + return idx.IndexDocument(ctx, docs[0]) + } + + objects := make([]map[string]any, 0, len(docs)) + for _, doc := range docs { + obj, err := idx.buildObject(doc) + if err != nil { + return err + } + // For batch, we need to add the class field. + obj["class"] = idx.className + objects = append(objects, obj) + } + + batchBody := map[string]any{ + "objects": objects, + } + + body, err := json.Marshal(batchBody) + if err != nil { + return fmt.Errorf("weaviate: marshal batch: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, idx.baseURL+"/v1/batch/objects", bytes.NewReader(body)) + if err != nil { + return fmt.Errorf("weaviate: create batch request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + idx.setAuthHeader(req) + + resp, err := idx.client.Do(req) + if err != nil { + return fmt.Errorf("weaviate: batch index: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("weaviate: read batch response: %w", err) + } + + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated { + return fmt.Errorf("weaviate: batch index failed (status %d): %s", resp.StatusCode, string(respBody)) + } + + // Check individual results for errors. + var batchResults []struct { + Result struct { + Errors *struct { + Error []struct { + Message string `json:"message"` + } `json:"error"` + } `json:"errors"` + } `json:"result"` + } + + if err := json.Unmarshal(respBody, &batchResults); err != nil { + // Not all responses are parseable; if the status was OK, accept it. + return nil + } + + for i, r := range batchResults { + if r.Result.Errors != nil && len(r.Result.Errors.Error) > 0 { + return fmt.Errorf("weaviate: batch object %d error: %s", i, r.Result.Errors.Error[0].Message) + } + } + + return nil +} + +// buildObject converts an EntityDocument to a Weaviate object for indexing. +func (idx *Index) buildObject(doc searchindex.EntityDocument) (map[string]any, error) { + docIDStr := documentIDString(doc.Identity) + id := deterministicUUID(docIDStr) + + keyFieldsJSON, err := json.Marshal(doc.Identity.KeyFields) + if err != nil { + return nil, fmt.Errorf("weaviate: marshal key fields: %w", err) + } + + properties := make(map[string]any, len(doc.Fields)+3) + for k, v := range doc.Fields { + properties[k] = v + } + // Weaviate requires all date properties to be RFC 3339 formatted. + // Normalize date-only strings (e.g. "2024-01-15") to RFC 3339. + dateFields := idx.dateFieldSet() + for name := range dateFields { + if s, ok := properties[name].(string); ok { + properties[name] = normalizeDateToRFC3339(s) + } + } + properties[reservedTypeNameField] = doc.Identity.TypeName + properties[reservedKeyFieldsField] = string(keyFieldsJSON) + properties[reservedDocIDField] = docIDStr + + obj := map[string]any{ + "id": id, + "class": idx.className, + "properties": properties, + } + + // If there are vectors, use the first one as the object vector. + if len(doc.Vectors) > 0 { + for _, vec := range doc.Vectors { + obj["vector"] = vec + break + } + } + + return obj, nil +} + +// dateFieldSet returns the set of field names that are DATE or DATETIME type. +func (idx *Index) dateFieldSet() map[string]bool { + m := make(map[string]bool) + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeDate || fc.Type == searchindex.FieldTypeDateTime { + m[fc.Name] = true + } + } + return m +} + +// normalizeDateToRFC3339 ensures a date string is in RFC 3339 format. +// Date-only strings like "2024-01-15" are converted to "2024-01-15T00:00:00Z". +// Already RFC 3339 strings are returned as-is. +func normalizeDateToRFC3339(s string) string { + // Already RFC 3339 + if _, err := time.Parse(time.RFC3339, s); err == nil { + return s + } + if _, err := time.Parse(time.RFC3339Nano, s); err == nil { + return s + } + // Date-only: append time component + if t, err := time.Parse(time.DateOnly, s); err == nil { + return t.UTC().Format(time.RFC3339) + } + return s +} + +// DeleteDocument deletes a single document by its deterministic ID. +func (idx *Index) DeleteDocument(ctx context.Context, id searchindex.DocumentIdentity) error { + docIDStr := documentIDString(id) + uuid := deterministicUUID(docIDStr) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, idx.baseURL+"/v1/objects/"+idx.className+"/"+uuid, nil) + if err != nil { + return fmt.Errorf("weaviate: create delete request: %w", err) + } + idx.setAuthHeader(req) + + resp, err := idx.client.Do(req) + if err != nil { + return fmt.Errorf("weaviate: delete document: %w", err) + } + defer resp.Body.Close() + + // 204 No Content is success; 404 means already gone. + if resp.StatusCode != http.StatusNoContent && resp.StatusCode != http.StatusNotFound { + respBody, _ := io.ReadAll(resp.Body) + return fmt.Errorf("weaviate: delete document failed (status %d): %s", resp.StatusCode, string(respBody)) + } + + return nil +} + +// DeleteDocuments deletes a batch of documents by identity. +func (idx *Index) DeleteDocuments(ctx context.Context, ids []searchindex.DocumentIdentity) error { + // Use batch delete with a where filter matching _docId values. + if len(ids) == 0 { + return nil + } + if len(ids) == 1 { + return idx.DeleteDocument(ctx, ids[0]) + } + + // Delete one by one since batch delete by multiple IDs requires + // constructing an OR filter on _docId, which is straightforward. + operands := make([]map[string]any, 0, len(ids)) + for _, id := range ids { + docIDStr := documentIDString(id) + operands = append(operands, map[string]any{ + "path": []string{reservedDocIDField}, + "operator": "Equal", + "valueText": docIDStr, + }) + } + + var whereFilter map[string]any + if len(operands) == 1 { + whereFilter = operands[0] + } else { + whereFilter = map[string]any{ + "operator": "Or", + "operands": operands, + } + } + + batchDeleteBody := map[string]any{ + "match": map[string]any{ + "class": idx.className, + "where": whereFilter, + }, + "output": "minimal", + } + + body, err := json.Marshal(batchDeleteBody) + if err != nil { + return fmt.Errorf("weaviate: marshal batch delete: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, idx.baseURL+"/v1/batch/objects", bytes.NewReader(body)) + if err != nil { + return fmt.Errorf("weaviate: create batch delete request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + idx.setAuthHeader(req) + + resp, err := idx.client.Do(req) + if err != nil { + return fmt.Errorf("weaviate: batch delete: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNoContent { + respBody, _ := io.ReadAll(resp.Body) + return fmt.Errorf("weaviate: batch delete failed (status %d): %s", resp.StatusCode, string(respBody)) + } + + return nil +} + +// Search performs a search query against Weaviate using its GraphQL API. +func (idx *Index) Search(ctx context.Context, req searchindex.SearchRequest) (*searchindex.SearchResult, error) { + gqlQuery := idx.buildGraphQLQuery(req) + + body, err := json.Marshal(map[string]string{ + "query": gqlQuery, + }) + if err != nil { + return nil, fmt.Errorf("weaviate: marshal graphql query: %w", err) + } + + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, idx.baseURL+"/v1/graphql", bytes.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("weaviate: create graphql request: %w", err) + } + httpReq.Header.Set("Content-Type", "application/json") + idx.setAuthHeader(httpReq) + + resp, err := idx.client.Do(httpReq) + if err != nil { + return nil, fmt.Errorf("weaviate: graphql request: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("weaviate: read graphql response: %w", err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("weaviate: graphql request failed (status %d): %s", resp.StatusCode, string(respBody)) + } + + return idx.parseGraphQLResponse(respBody) +} + +// Autocomplete is not supported by Weaviate — it has no term dictionary API. +func (idx *Index) Autocomplete(_ context.Context, _ searchindex.AutocompleteRequest) (*searchindex.AutocompleteResult, error) { + return nil, fmt.Errorf("weaviate: autocomplete is not supported") +} + +// Close releases resources held by the index. No persistent connection to close. +func (idx *Index) Close() error { + return nil +} + +// buildGraphQLQuery constructs the Weaviate GraphQL query string from a SearchRequest. +func (idx *Index) buildGraphQLQuery(req searchindex.SearchRequest) string { + var b strings.Builder + b.WriteString("{ Get { ") + b.WriteString(idx.className) + + // Build arguments. + var args []string + + // Search operator. + hasText := req.TextQuery != "" + hasVector := len(req.Vector) > 0 + + if hasText && hasVector { + // Hybrid search. + vectorStr := formatVector(req.Vector) + args = append(args, fmt.Sprintf("hybrid: {query: %s, vector: %s}", quoteString(req.TextQuery), vectorStr)) + } else if hasText { + // BM25 text search. + bm25Arg := fmt.Sprintf("bm25: {query: %s", quoteString(req.TextQuery)) + if len(req.TextFields) > 0 { + props := make([]string, len(req.TextFields)) + for i, tf := range req.TextFields { + if tf.Weight != 0 && tf.Weight != 1.0 { + props[i] = fmt.Sprintf("%s^%g", tf.Name, tf.Weight) + } else { + props[i] = tf.Name + } + } + bm25Arg += fmt.Sprintf(", properties: [%s]", quoteStringSlice(props)) + } + bm25Arg += "}" + args = append(args, bm25Arg) + } else if hasVector { + // Vector search. + vectorStr := formatVector(req.Vector) + args = append(args, fmt.Sprintf("nearVector: {vector: %s}", vectorStr)) + } + + // Where filter. + whereClause := idx.buildWhereClause(req) + if whereClause != "" { + args = append(args, "where: "+whereClause) + } + + // Sort. Weaviate does not support sort with BM25 or hybrid search operators; + // only include sort for plain object fetches (no text or vector search). + if len(req.Sort) > 0 && !hasText && !hasVector { + sortArgs := make([]string, 0, len(req.Sort)) + for _, sf := range req.Sort { + order := "desc" + if sf.Ascending { + order = "asc" + } + sortArgs = append(sortArgs, fmt.Sprintf("{path: [%s], order: %s}", quoteString(sf.Field), order)) + } + args = append(args, fmt.Sprintf("sort: [%s]", strings.Join(sortArgs, ", "))) + } + + // Limit. + limit := effectiveLimit(req.Limit) + args = append(args, fmt.Sprintf("limit: %d", limit)) + + // Offset. + if req.Offset > 0 { + args = append(args, fmt.Sprintf("offset: %d", req.Offset)) + } + + if len(args) > 0 { + b.WriteString("(") + b.WriteString(strings.Join(args, ", ")) + b.WriteString(")") + } + + // Fields to return. + b.WriteString(" { ") + + // Request all schema fields plus reserved fields. + fieldNames := make([]string, 0, len(idx.schema.Fields)+3) + for _, fc := range idx.schema.Fields { + if fc.Type == searchindex.FieldTypeVector { + continue // Vectors are not returned as properties. + } + fieldNames = append(fieldNames, fc.Name) + } + fieldNames = append(fieldNames, reservedTypeNameField, reservedKeyFieldsField, reservedDocIDField) + b.WriteString(strings.Join(fieldNames, " ")) + + // Additional fields. + b.WriteString(" _additional { id score distance }") + + b.WriteString(" } ") + b.WriteString("} }") + + return b.String() +} + +// buildWhereClause constructs the Weaviate where filter from the SearchRequest. +func (idx *Index) buildWhereClause(req searchindex.SearchRequest) string { + var parts []string + + // TypeName filter. + if req.TypeName != "" { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: Equal, valueText: %s}", + quoteString(reservedTypeNameField), quoteString(req.TypeName))) + } + + // Structured filter. + if req.Filter != nil { + filterStr := translateFilter(req.Filter) + if filterStr != "" { + parts = append(parts, filterStr) + } + } + + if len(parts) == 0 { + return "" + } + if len(parts) == 1 { + return parts[0] + } + + // Combine with AND. + return fmt.Sprintf("{operator: And, operands: [%s]}", strings.Join(parts, ", ")) +} + +// translateFilter recursively converts a searchindex.Filter to a Weaviate where clause string. +func translateFilter(f *searchindex.Filter) string { + if f == nil { + return "" + } + + // AND + if len(f.And) > 0 { + children := make([]string, 0, len(f.And)) + for _, child := range f.And { + c := translateFilter(child) + if c != "" { + children = append(children, c) + } + } + if len(children) == 0 { + return "" + } + if len(children) == 1 { + return children[0] + } + return fmt.Sprintf("{operator: And, operands: [%s]}", strings.Join(children, ", ")) + } + + // OR + if len(f.Or) > 0 { + children := make([]string, 0, len(f.Or)) + for _, child := range f.Or { + c := translateFilter(child) + if c != "" { + children = append(children, c) + } + } + if len(children) == 0 { + return "" + } + if len(children) == 1 { + return children[0] + } + return fmt.Sprintf("{operator: Or, operands: [%s]}", strings.Join(children, ", ")) + } + + // NOT – Weaviate does not support a "Not" operator. We negate the inner + // filter directly: Term → NotEqual, Bool Term → inverted value, And/Or → + // De Morgan's law (NOT(A AND B) = NOT A OR NOT B, NOT(A OR B) = NOT A AND NOT B). + if f.Not != nil { + return translateNegatedFilter(f.Not) + } + + // Term + if f.Term != nil { + return translateTermFilter(f.Term) + } + + // Terms (IN) - expressed as OR of Equal conditions. + if f.Terms != nil { + return translateTermsFilter(f.Terms) + } + + // Range + if f.Range != nil { + return translateRangeFilter(f.Range) + } + + // Prefix + if f.Prefix != nil { + return fmt.Sprintf("{path: [%s], operator: Like, valueText: %s}", + quoteString(f.Prefix.Field), quoteString(f.Prefix.Value+"*")) + } + + // Exists - use IsNull: false. + if f.Exists != nil { + return fmt.Sprintf("{path: [%s], operator: IsNull, valueBoolean: false}", + quoteString(f.Exists.Field)) + } + + return "" +} + +// translateNegatedFilter converts a filter into its negation without using the +// Weaviate "Not" operator (which is unsupported). Leaf filters are negated +// directly (Equal → NotEqual, bool values inverted), and compound filters use +// De Morgan's law. +func translateNegatedFilter(f *searchindex.Filter) string { + if f == nil { + return "" + } + + // De Morgan: NOT(A AND B) = NOT(A) OR NOT(B) + if len(f.And) > 0 { + children := make([]string, 0, len(f.And)) + for _, child := range f.And { + c := translateNegatedFilter(child) + if c != "" { + children = append(children, c) + } + } + if len(children) == 0 { + return "" + } + if len(children) == 1 { + return children[0] + } + return fmt.Sprintf("{operator: Or, operands: [%s]}", strings.Join(children, ", ")) + } + + // De Morgan: NOT(A OR B) = NOT(A) AND NOT(B) + if len(f.Or) > 0 { + children := make([]string, 0, len(f.Or)) + for _, child := range f.Or { + c := translateNegatedFilter(child) + if c != "" { + children = append(children, c) + } + } + if len(children) == 0 { + return "" + } + if len(children) == 1 { + return children[0] + } + return fmt.Sprintf("{operator: And, operands: [%s]}", strings.Join(children, ", ")) + } + + // Double negation: NOT(NOT(x)) = x + if f.Not != nil { + return translateFilter(f.Not) + } + + // Term: negate the equality check. + if f.Term != nil { + return translateNegatedTermFilter(f.Term) + } + + // Terms (IN): NOT(a IN [x,y]) = a != x AND a != y + if f.Terms != nil { + parts := make([]string, 0, len(f.Terms.Values)) + for _, val := range f.Terms.Values { + p := translateNegatedTermFilter(&searchindex.TermFilter{ + Field: f.Terms.Field, + Value: val, + }) + if p != "" { + parts = append(parts, p) + } + } + if len(parts) == 0 { + return "" + } + if len(parts) == 1 { + return parts[0] + } + return fmt.Sprintf("{operator: And, operands: [%s]}", strings.Join(parts, ", ")) + } + + // Range: negate by inverting bounds. NOT(x >= a AND x <= b) = x < a OR x > b. + // For simplicity, apply De Morgan on the range parts. + if f.Range != nil { + var parts []string + rf := f.Range + if rf.GTE != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: LessThan, %s}", + quoteString(rf.Field), rangeValueClause(rf.GTE))) + } else if rf.HasGT && rf.GT != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: LessThanEqual, %s}", + quoteString(rf.Field), rangeValueClause(rf.GT))) + } + if rf.LTE != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: GreaterThan, %s}", + quoteString(rf.Field), rangeValueClause(rf.LTE))) + } else if rf.HasLT && rf.LT != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: GreaterThanEqual, %s}", + quoteString(rf.Field), rangeValueClause(rf.LT))) + } + if len(parts) == 0 { + return "" + } + if len(parts) == 1 { + return parts[0] + } + return fmt.Sprintf("{operator: Or, operands: [%s]}", strings.Join(parts, ", ")) + } + + // Prefix: NOT(LIKE "foo*") — no clean inverse in Weaviate. Fall back to + // wrapping in a GraphQL-level workaround. For now, not supported. + // Exists: NOT(IsNull: false) → IsNull: true + if f.Exists != nil { + return fmt.Sprintf("{path: [%s], operator: IsNull, valueBoolean: true}", + quoteString(f.Exists.Field)) + } + + return "" +} + +// translateNegatedTermFilter converts a TermFilter to a Weaviate where clause +// with a NotEqual operator instead of Equal, effectively negating the match. +// For boolean values, it inverts the value instead (since NotEqual on booleans +// can be unintuitive in some backends). +func translateNegatedTermFilter(tf *searchindex.TermFilter) string { + switch v := tf.Value.(type) { + case bool: + // Negate by flipping the boolean value with Equal operator. + return fmt.Sprintf("{path: [%s], operator: Equal, valueBoolean: %t}", + quoteString(tf.Field), !v) + case string: + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueText: %s}", + quoteString(tf.Field), quoteString(v)) + case float64: + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueNumber: %s}", + quoteString(tf.Field), formatFloat(v)) + case float32: + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueNumber: %s}", + quoteString(tf.Field), formatFloat(float64(v))) + case int: + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueInt: %d}", + quoteString(tf.Field), v) + case int64: + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueInt: %d}", + quoteString(tf.Field), v) + case json.Number: + if i, err := v.Int64(); err == nil { + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueInt: %d}", + quoteString(tf.Field), i) + } + if f, err := v.Float64(); err == nil { + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueNumber: %s}", + quoteString(tf.Field), formatFloat(f)) + } + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueText: %s}", + quoteString(tf.Field), quoteString(v.String())) + default: + return fmt.Sprintf("{path: [%s], operator: NotEqual, valueText: %s}", + quoteString(tf.Field), quoteString(fmt.Sprintf("%v", v))) + } +} + +// translateTermFilter converts a TermFilter to a Weaviate where clause. +func translateTermFilter(tf *searchindex.TermFilter) string { + switch v := tf.Value.(type) { + case string: + return fmt.Sprintf("{path: [%s], operator: Equal, valueText: %s}", + quoteString(tf.Field), quoteString(v)) + case float64: + return fmt.Sprintf("{path: [%s], operator: Equal, valueNumber: %s}", + quoteString(tf.Field), formatFloat(v)) + case float32: + return fmt.Sprintf("{path: [%s], operator: Equal, valueNumber: %s}", + quoteString(tf.Field), formatFloat(float64(v))) + case int: + return fmt.Sprintf("{path: [%s], operator: Equal, valueInt: %d}", + quoteString(tf.Field), v) + case int64: + return fmt.Sprintf("{path: [%s], operator: Equal, valueInt: %d}", + quoteString(tf.Field), v) + case bool: + return fmt.Sprintf("{path: [%s], operator: Equal, valueBoolean: %t}", + quoteString(tf.Field), v) + case json.Number: + // Try int first, then float. + if i, err := v.Int64(); err == nil { + return fmt.Sprintf("{path: [%s], operator: Equal, valueInt: %d}", + quoteString(tf.Field), i) + } + if f, err := v.Float64(); err == nil { + return fmt.Sprintf("{path: [%s], operator: Equal, valueNumber: %s}", + quoteString(tf.Field), formatFloat(f)) + } + return fmt.Sprintf("{path: [%s], operator: Equal, valueText: %s}", + quoteString(tf.Field), quoteString(v.String())) + default: + return fmt.Sprintf("{path: [%s], operator: Equal, valueText: %s}", + quoteString(tf.Field), quoteString(fmt.Sprintf("%v", v))) + } +} + +// translateTermsFilter converts a TermsFilter (IN) to a Weaviate where clause. +func translateTermsFilter(tf *searchindex.TermsFilter) string { + if len(tf.Values) == 0 { + return "" + } + parts := make([]string, 0, len(tf.Values)) + for _, val := range tf.Values { + p := translateTermFilter(&searchindex.TermFilter{ + Field: tf.Field, + Value: val, + }) + if p != "" { + parts = append(parts, p) + } + } + if len(parts) == 0 { + return "" + } + if len(parts) == 1 { + return parts[0] + } + return fmt.Sprintf("{operator: Or, operands: [%s]}", strings.Join(parts, ", ")) +} + +// rangeValueClause returns the value clause fragment for a Weaviate where filter. +// String values (from date filters) use valueDate; numeric values use valueNumber. +func rangeValueClause(v any) string { + if s, ok := v.(string); ok { + return fmt.Sprintf("valueDate: %s", quoteString(normalizeDateToRFC3339(s))) + } + return fmt.Sprintf("valueNumber: %s", formatAnyNumber(v)) +} + +// translateRangeFilter converts a RangeFilter to Weaviate where clause(s). +func translateRangeFilter(rf *searchindex.RangeFilter) string { + var parts []string + + if rf.GTE != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: GreaterThanEqual, %s}", + quoteString(rf.Field), rangeValueClause(rf.GTE))) + } else if rf.HasGT && rf.GT != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: GreaterThan, %s}", + quoteString(rf.Field), rangeValueClause(rf.GT))) + } + + if rf.LTE != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: LessThanEqual, %s}", + quoteString(rf.Field), rangeValueClause(rf.LTE))) + } else if rf.HasLT && rf.LT != nil { + parts = append(parts, fmt.Sprintf("{path: [%s], operator: LessThan, %s}", + quoteString(rf.Field), rangeValueClause(rf.LT))) + } + + if len(parts) == 0 { + return "" + } + if len(parts) == 1 { + return parts[0] + } + return fmt.Sprintf("{operator: And, operands: [%s]}", strings.Join(parts, ", ")) +} + +// parseGraphQLResponse parses the Weaviate GraphQL response into a SearchResult. +func (idx *Index) parseGraphQLResponse(body []byte) (*searchindex.SearchResult, error) { + var gqlResp struct { + Data map[string]map[string][]json.RawMessage `json:"data"` + Errors []struct { + Message string `json:"message"` + } `json:"errors"` + } + + if err := json.Unmarshal(body, &gqlResp); err != nil { + return nil, fmt.Errorf("weaviate: parse graphql response: %w", err) + } + + if len(gqlResp.Errors) > 0 { + return nil, fmt.Errorf("weaviate: graphql error: %s", gqlResp.Errors[0].Message) + } + + getResult, ok := gqlResp.Data["Get"] + if !ok { + return &searchindex.SearchResult{}, nil + } + + classResults, ok := getResult[idx.className] + if !ok { + return &searchindex.SearchResult{}, nil + } + + hits := make([]searchindex.SearchHit, 0, len(classResults)) + for _, raw := range classResults { + var obj map[string]any + if err := json.Unmarshal(raw, &obj); err != nil { + return nil, fmt.Errorf("weaviate: parse result object: %w", err) + } + + hit, err := idx.convertHit(obj) + if err != nil { + return nil, err + } + hits = append(hits, hit) + } + + return &searchindex.SearchResult{ + Hits: hits, + TotalCount: len(hits), + }, nil +} + +// convertHit converts a single Weaviate result object to a SearchHit. +func (idx *Index) convertHit(obj map[string]any) (searchindex.SearchHit, error) { + identity, err := extractIdentity(obj) + if err != nil { + return searchindex.SearchHit{}, err + } + + // Build representation from fields, excluding internal fields. + representation := make(map[string]any) + for k, v := range obj { + if k == reservedTypeNameField || k == reservedKeyFieldsField || k == reservedDocIDField || k == "_additional" { + continue + } + representation[k] = v + } + representation["__typename"] = identity.TypeName + for k, v := range identity.KeyFields { + representation[k] = v + } + + var score float64 + var distance float64 + + if additional, ok := obj["_additional"].(map[string]any); ok { + if s, ok := additional["score"]; ok { + score = toFloat64Safe(s) + } + if d, ok := additional["distance"]; ok { + distance = toFloat64Safe(d) + } + } + + return searchindex.SearchHit{ + Identity: identity, + Score: score, + Distance: distance, + Representation: representation, + }, nil +} + +// extractIdentity reconstructs a DocumentIdentity from stored fields. +func extractIdentity(obj map[string]any) (searchindex.DocumentIdentity, error) { + typeName, _ := obj[reservedTypeNameField].(string) + keyFieldsRaw, _ := obj[reservedKeyFieldsField].(string) + + var keyFields map[string]any + if keyFieldsRaw != "" { + if err := json.Unmarshal([]byte(keyFieldsRaw), &keyFields); err != nil { + return searchindex.DocumentIdentity{}, fmt.Errorf("weaviate: failed to unmarshal key fields: %w", err) + } + } + if keyFields == nil { + keyFields = make(map[string]any) + } + + return searchindex.DocumentIdentity{ + TypeName: typeName, + KeyFields: keyFields, + }, nil +} + +// setAuthHeader sets the Authorization header if an API key is configured. +func (idx *Index) setAuthHeader(req *http.Request) { + if idx.config.APIKey != "" { + req.Header.Set("Authorization", "Bearer "+idx.config.APIKey) + } +} + +// documentIDString computes a deterministic string ID from a DocumentIdentity. +// Format: TypeName:key1=val1,key2=val2,... (keys sorted alphabetically). +func documentIDString(id searchindex.DocumentIdentity) string { + if len(id.KeyFields) == 0 { + return id.TypeName + } + keys := make([]string, 0, len(id.KeyFields)) + for k := range id.KeyFields { + keys = append(keys, k) + } + sort.Strings(keys) + + var b strings.Builder + b.WriteString(id.TypeName) + b.WriteByte(':') + for i, k := range keys { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(k) + b.WriteByte('=') + fmt.Fprintf(&b, "%v", id.KeyFields[k]) + } + return b.String() +} + +// deterministicUUID generates a UUID v5 using SHA-1 from the given name string. +// Uses the DNS namespace UUID as the base (6ba7b810-9dad-11d1-80b4-00c04fd430c8). +func deterministicUUID(name string) string { + // UUID v5 namespace (DNS namespace from RFC 4122). + namespace := [16]byte{ + 0x6b, 0xa7, 0xb8, 0x10, + 0x9d, 0xad, 0x11, 0xd1, + 0x80, 0xb4, 0x00, 0xc0, + 0x4f, 0xd4, 0x30, 0xc8, + } + + h := sha1.New() + h.Write(namespace[:]) + h.Write([]byte(name)) + sum := h.Sum(nil) + + // Set version 5. + sum[6] = (sum[6] & 0x0f) | 0x50 + // Set variant bits. + sum[8] = (sum[8] & 0x3f) | 0x80 + + return fmt.Sprintf("%08x-%04x-%04x-%04x-%012x", + sum[0:4], sum[4:6], sum[6:8], sum[8:10], sum[10:16]) +} + +// toClassName converts an index name to a valid Weaviate class name. +// Weaviate class names must start with an uppercase letter. +func toClassName(name string) string { + if name == "" { + return "Index" + } + // Replace any non-alphanumeric characters with underscores. + cleaned := strings.Map(func(r rune) rune { + if unicode.IsLetter(r) || unicode.IsDigit(r) { + return r + } + return '_' + }, name) + + // Capitalize first letter. + runes := []rune(cleaned) + runes[0] = unicode.ToUpper(runes[0]) + return string(runes) +} + +// effectiveLimit returns a sensible default if limit is zero or negative. +func effectiveLimit(limit int) int { + if limit <= 0 { + return 10 + } + return limit +} + +// quoteString returns a GraphQL-escaped quoted string. +func quoteString(s string) string { + // Use JSON encoding for proper escaping. + b, _ := json.Marshal(s) + return string(b) +} + +// quoteStringSlice formats a slice of strings as GraphQL list of quoted strings. +func quoteStringSlice(ss []string) string { + parts := make([]string, len(ss)) + for i, s := range ss { + parts[i] = quoteString(s) + } + return strings.Join(parts, ", ") +} + +// formatVector formats a float32 slice as a GraphQL list. +func formatVector(v []float32) string { + parts := make([]string, len(v)) + for i, f := range v { + parts[i] = strconv.FormatFloat(float64(f), 'f', -1, 32) + } + return "[" + strings.Join(parts, ", ") + "]" +} + +// formatFloat formats a float64 for GraphQL. +func formatFloat(f float64) string { + return strconv.FormatFloat(f, 'f', -1, 64) +} + +// formatAnyNumber converts an any value to a numeric string for GraphQL. +func formatAnyNumber(v any) string { + switch n := v.(type) { + case float64: + return formatFloat(n) + case float32: + return formatFloat(float64(n)) + case int: + return strconv.Itoa(n) + case int64: + return strconv.FormatInt(n, 10) + case int32: + return strconv.FormatInt(int64(n), 10) + case json.Number: + return n.String() + default: + return fmt.Sprintf("%v", v) + } +} + +// toFloat64Safe converts an any value to float64, returning 0 on failure. +func toFloat64Safe(v any) float64 { + switch n := v.(type) { + case float64: + return n + case float32: + return float64(n) + case string: + f, _ := strconv.ParseFloat(n, 64) + return f + case json.Number: + f, _ := n.Float64() + return f + default: + return 0 + } +} diff --git a/v2/pkg/searchindex/weaviate/weaviate_test.go b/v2/pkg/searchindex/weaviate/weaviate_test.go new file mode 100644 index 0000000000..65b499203c --- /dev/null +++ b/v2/pkg/searchindex/weaviate/weaviate_test.go @@ -0,0 +1,361 @@ +//go:build integration + +package weaviate + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/searchindex" +) + +func startWeaviate(t *testing.T) string { + t.Helper() + ctx := context.Background() + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: "semitechnologies/weaviate:1.27.0", + ExposedPorts: []string{"8080/tcp"}, + Env: map[string]string{ + "AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED": "true", + "PERSISTENCE_DATA_PATH": "/var/lib/weaviate", + "DEFAULT_VECTORIZER_MODULE": "none", + "CLUSTER_HOSTNAME": "node1", + }, + WaitingFor: wait.ForHTTP("/v1/.well-known/ready").WithPort("8080/tcp").WithStartupTimeout(60 * time.Second), + }, + Started: true, + }) + require.NoError(t, err) + t.Cleanup(func() { container.Terminate(ctx) }) + + host, err := container.Host(ctx) + require.NoError(t, err) + port, err := container.MappedPort(ctx, "8080/tcp") + require.NoError(t, err) + + return fmt.Sprintf("%s:%s", host, port.Port()) +} + +func newTestIndex(t *testing.T, host string) searchindex.Index { + t.Helper() + factory := NewFactory() + schema := searchindex.IndexConfig{ + Name: "test_products", + Fields: []searchindex.FieldConfig{ + {Name: "name", Type: searchindex.FieldTypeText, Filterable: true, Sortable: true}, + {Name: "description", Type: searchindex.FieldTypeText}, + {Name: "category", Type: searchindex.FieldTypeKeyword, Filterable: true, Sortable: true}, + {Name: "price", Type: searchindex.FieldTypeNumeric, Filterable: true, Sortable: true}, + {Name: "inStock", Type: searchindex.FieldTypeBool, Filterable: true}, + }, + } + + configJSON := []byte(fmt.Sprintf(`{"host":%q,"scheme":"http"}`, host)) + idx, err := factory.CreateIndex(context.Background(), "test_products", schema, configJSON) + require.NoError(t, err) + t.Cleanup(func() { idx.Close() }) + return idx +} + +func populateTestData(t *testing.T, idx searchindex.Index) { + t.Helper() + docs := []searchindex.EntityDocument{ + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Running Shoes", "description": "Great for jogging and marathons", "category": "Footwear", "price": 89.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + Fields: map[string]any{"name": "Basketball Shoes", "description": "High-top basketball sneakers", "category": "Footwear", "price": 129.99, "inStock": true}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "3"}}, + Fields: map[string]any{"name": "Leather Belt", "description": "Genuine leather dress belt", "category": "Accessories", "price": 35.00, "inStock": false}, + }, + { + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "4"}}, + Fields: map[string]any{"name": "Wool Socks", "description": "Warm wool socks for winter", "category": "Footwear", "price": 12.99, "inStock": true}, + }, + } + err := idx.IndexDocuments(context.Background(), docs) + require.NoError(t, err) + + // Wait briefly for indexing to complete. + time.Sleep(1 * time.Second) +} + +func TestWeaviateFullLifecycle(t *testing.T) { + host := startWeaviate(t) + idx := newTestIndex(t, host) + populateTestData(t, idx) + + t.Run("text search", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + Limit: 10, + }) + require.NoError(t, err) + assert.GreaterOrEqual(t, len(result.Hits), 2, "expected at least 2 hits for 'shoes'") + }) + + t.Run("text search with field restriction", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "shoes", + TextFields: []searchindex.TextFieldWeight{{Name: "name"}}, + Limit: 10, + }) + require.NoError(t, err) + assert.GreaterOrEqual(t, len(result.Hits), 2, "expected at least 2 hits for 'shoes' in name") + }) + + t.Run("term filter on keyword field", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 3, len(result.Hits), "expected 3 hits for category=Footwear") + }) + + t.Run("boolean filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "inStock", Value: false}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 1, len(result.Hits), "expected 1 hit for inStock=false") + }) + + t.Run("numeric range filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Range: &searchindex.RangeFilter{ + Field: "price", + GTE: 30.0, + LTE: 100.0, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 2, len(result.Hits), "expected 2 hits for price 30-100") + }) + + t.Run("AND filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + And: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "inStock", Value: true}}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 3, len(result.Hits), "expected 3 hits for Footwear AND inStock") + }) + + t.Run("search hit identity", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "running shoes", + Limit: 1, + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(result.Hits), 1) + hit := result.Hits[0] + assert.Equal(t, "Product", hit.Identity.TypeName) + assert.Equal(t, "Product", hit.Representation["__typename"]) + }) + + t.Run("OR filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Or: []*searchindex.Filter{ + {Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}}, + {Term: &searchindex.TermFilter{Field: "category", Value: "Accessories"}}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, len(result.Hits), "expected 4 hits for category=Footwear OR category=Accessories") + }) + + t.Run("NOT filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Not: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 1, len(result.Hits), "expected 1 hit for NOT category=Footwear") + }) + + t.Run("sorting by price ascending", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 10, + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(result.Hits), 1) + assert.Equal(t, "Wool Socks", result.Hits[0].Representation["name"], "cheapest product should be Wool Socks") + }) + + t.Run("pagination", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Sort: []searchindex.SortField{{Field: "price", Ascending: true}}, + Limit: 2, + Offset: 2, + }) + require.NoError(t, err) + assert.Equal(t, 2, len(result.Hits), "expected 2 hits with limit=2 offset=2") + }) + + t.Run("TypeName filter", func(t *testing.T) { + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TypeName: "Product", + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 4, len(result.Hits), "expected 4 hits for TypeName=Product") + }) + + t.Run("single IndexDocument", func(t *testing.T) { + doc := searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "5"}}, + Fields: map[string]any{"name": "Hiking Boots", "description": "Durable boots for trails", "category": "Footwear", "price": 149.99, "inStock": true}, + } + err := idx.IndexDocument(context.Background(), doc) + require.NoError(t, err) + + time.Sleep(1 * time.Second) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "hiking boots", + Limit: 10, + }) + require.NoError(t, err) + require.GreaterOrEqual(t, len(result.Hits), 1, "expected at least 1 hit for 'hiking boots'") + + found := false + for _, hit := range result.Hits { + if hit.Representation["name"] == "Hiking Boots" { + found = true + break + } + } + assert.True(t, found, "expected to find 'Hiking Boots' in search results") + }) + + t.Run("upsert overwrites existing document", func(t *testing.T) { + // Re-index product 1 with a new name. + err := idx.IndexDocument(context.Background(), searchindex.EntityDocument{ + Identity: searchindex.DocumentIdentity{TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + Fields: map[string]any{"name": "Trail Running Shoes", "description": "Great for trail running", "category": "Footwear", "price": 99.99, "inStock": true}, + }) + require.NoError(t, err) + + time.Sleep(1 * time.Second) + + // Total count should still be 5 (4 base + 1 from single IndexDocument test). + result, err := idx.Search(context.Background(), searchindex.SearchRequest{Limit: 10}) + require.NoError(t, err) + assert.Equal(t, 5, len(result.Hits), "expected 5 documents (upsert should not duplicate)") + + // Search for the updated name. + result, err = idx.Search(context.Background(), searchindex.SearchRequest{ + TextQuery: "trail running", + Limit: 1, + }) + require.NoError(t, err) + require.NotEmpty(t, result.Hits) + assert.Contains(t, result.Hits[0].Representation["name"], "Trail") + }) + + t.Run("delete single document", func(t *testing.T) { + err := idx.DeleteDocument(context.Background(), searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "4"}, + }) + require.NoError(t, err) + + time.Sleep(500 * time.Millisecond) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Filter: &searchindex.Filter{ + Term: &searchindex.TermFilter{Field: "category", Value: "Footwear"}, + }, + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 3, len(result.Hits), "expected 3 Footwear hits after deleting Wool Socks") + }) + + t.Run("delete multiple documents", func(t *testing.T) { + err := idx.DeleteDocuments(context.Background(), []searchindex.DocumentIdentity{ + {TypeName: "Product", KeyFields: map[string]any{"id": "1"}}, + {TypeName: "Product", KeyFields: map[string]any{"id": "2"}}, + }) + require.NoError(t, err) + + time.Sleep(500 * time.Millisecond) + + result, err := idx.Search(context.Background(), searchindex.SearchRequest{ + Limit: 10, + }) + require.NoError(t, err) + assert.Equal(t, 2, len(result.Hits), "expected 2 documents after batch delete") + }) +} + +func TestDeterministicUUID(t *testing.T) { + id1 := deterministicUUID("Product:id=1") + id2 := deterministicUUID("Product:id=1") + id3 := deterministicUUID("Product:id=2") + + assert.Equal(t, id1, id2, "same input should produce same UUID") + assert.NotEqual(t, id1, id3, "different inputs should produce different UUIDs") + assert.Len(t, id1, 36, "UUID should be 36 characters") +} + +func TestDocumentIDString(t *testing.T) { + id := documentIDString(searchindex.DocumentIdentity{ + TypeName: "Product", + KeyFields: map[string]any{"id": "123", "sku": "ABC"}, + }) + expected := "Product:id=123,sku=ABC" + assert.Equal(t, expected, id) +} + +func TestToClassName(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {"products", "Products"}, + {"my_index", "My_index"}, + {"test-data", "Test_data"}, + {"Products", "Products"}, + } + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + assert.Equal(t, tt.expected, toClassName(tt.input)) + }) + } +}