Skip to content
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Add analyzer.skip_parser config option for PostgreSQL
This commit adds a new configuration option `analyzer.skip_parser` that
allows sqlc to skip the parser and catalog entirely, relying solely on
the database analyzer for query analysis. This is particularly useful
when:

- Working with complex PostgreSQL syntax not fully supported by the parser
- Wanting to ensure queries are validated against the actual database schema
- Dealing with database-specific features or extensions

Key changes:
- Add `skip_parser` field to `Analyzer` config struct
- Implement `parseQueriesWithAnalyzer` method using sqlfile.Split
- Skip parser and catalog initialization when `skip_parser` is enabled
- Add validation requiring database analyzer when using skip_parser
- Only PostgreSQL is supported for this feature initially

Usage example:
```yaml
version: "2"
sql:
  - name: "example"
    engine: "postgresql"
    queries: "query.sql"
    schema: []
    database:
      uri: "postgresql://user:pass@localhost:5432/db"
    analyzer:
      skip_parser: true
    gen:
      go:
        package: "db"
        out: "db"
```

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
  • Loading branch information
claude committed Oct 23, 2025
commit 823e3e12b6694e706f8b80f04be409345d8c0446
30 changes: 19 additions & 11 deletions internal/cmd/generate.go
Original file line number Diff line number Diff line change
Expand Up @@ -305,20 +305,28 @@ func parse(ctx context.Context, name, dir string, sql config.SQL, combo config.C
fmt.Fprintf(stderr, "error creating compiler: %s\n", err)
return nil, true
}
if err := c.ParseCatalog(sql.Schema); err != nil {
fmt.Fprintf(stderr, "# package %s\n", name)
if parserErr, ok := err.(*multierr.Error); ok {
for _, fileErr := range parserErr.Errs() {
printFileErr(stderr, dir, fileErr)

// Check if skip_parser is enabled
skipParser := sql.Analyzer.SkipParser != nil && *sql.Analyzer.SkipParser

// Skip catalog parsing if skip_parser is enabled
if !skipParser {
if err := c.ParseCatalog(sql.Schema); err != nil {
fmt.Fprintf(stderr, "# package %s\n", name)
if parserErr, ok := err.(*multierr.Error); ok {
for _, fileErr := range parserErr.Errs() {
printFileErr(stderr, dir, fileErr)
}
} else {
fmt.Fprintf(stderr, "error parsing schema: %s\n", err)
}
} else {
fmt.Fprintf(stderr, "error parsing schema: %s\n", err)
return nil, true
}
if parserOpts.Debug.DumpCatalog {
debug.Dump(c.Catalog())
}
return nil, true
}
if parserOpts.Debug.DumpCatalog {
debug.Dump(c.Catalog())
}

if err := c.ParseQueries(sql.Queries, parserOpts); err != nil {
fmt.Fprintf(stderr, "# package %s\n", name)
if parserErr, ok := err.(*multierr.Error); ok {
Expand Down
133 changes: 133 additions & 0 deletions internal/compiler/compile.go
Original file line number Diff line number Diff line change
@@ -1,20 +1,24 @@
package compiler

import (
"context"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strings"

"github.com/sqlc-dev/sqlc/internal/metadata"
"github.com/sqlc-dev/sqlc/internal/migrations"
"github.com/sqlc-dev/sqlc/internal/multierr"
"github.com/sqlc-dev/sqlc/internal/opts"
"github.com/sqlc-dev/sqlc/internal/rpc"
"github.com/sqlc-dev/sqlc/internal/source"
"github.com/sqlc-dev/sqlc/internal/sql/ast"
"github.com/sqlc-dev/sqlc/internal/sql/named"
"github.com/sqlc-dev/sqlc/internal/sql/sqlerr"
"github.com/sqlc-dev/sqlc/internal/sql/sqlfile"
"github.com/sqlc-dev/sqlc/internal/sql/sqlpath"
)

Expand Down Expand Up @@ -118,3 +122,132 @@ func (c *Compiler) parseQueries(o opts.Parser) (*Result, error) {
Queries: q,
}, nil
}

// parseQueriesWithAnalyzer parses queries using only the database analyzer,
// skipping the parser and catalog entirely. Uses sqlfile.Split to extract
// individual queries from .sql files.
func (c *Compiler) parseQueriesWithAnalyzer(o opts.Parser) (*Result, error) {
ctx := context.Background()
var q []*Query
merr := multierr.New()
set := map[string]struct{}{}
files, err := sqlpath.Glob(c.conf.Queries)
if err != nil {
return nil, err
}

if c.analyzer == nil {
return nil, fmt.Errorf("database analyzer is required when skip_parser is enabled")
}

for _, filename := range files {
blob, err := os.ReadFile(filename)
if err != nil {
merr.Add(filename, "", 0, err)
continue
}
src := string(blob)

// Use sqlfile.Split to extract individual queries
queries, err := sqlfile.Split(ctx, strings.NewReader(src))
if err != nil {
merr.Add(filename, src, 0, err)
continue
}

for _, queryText := range queries {
// Extract metadata from comments
name, cmd, err := metadata.ParseQueryNameAndType(queryText, metadata.CommentSyntax{Dash: true})
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}

// Skip queries without names (not marked with sqlc comments)
if name == "" {
continue
}

// Check for duplicate query names
if _, exists := set[name]; exists {
merr.Add(filename, queryText, 0, fmt.Errorf("duplicate query name: %s", name))
continue
}
set[name] = struct{}{}

// Extract additional metadata from comments
cleanedComments, err := source.CleanedComments(queryText, source.CommentSyntax{Dash: true})
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}

md := metadata.Metadata{
Name: name,
Cmd: cmd,
Filename: filepath.Base(filename),
}

md.Params, md.Flags, md.RuleSkiplist, err = metadata.ParseCommentFlags(cleanedComments)
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}

// Use the database analyzer to analyze the query
// We pass an empty AST node since we're not using the parser
result, err := c.analyzer.Analyze(ctx, nil, queryText, c.schema, &named.ParamSet{})
if err != nil {
merr.Add(filename, queryText, 0, err)
// If this rpc unauthenticated error bubbles up, then all future parsing/analysis will fail
if errors.Is(err, rpc.ErrUnauthenticated) {
return nil, merr
}
continue
}

// Convert analyzer results to Query format
var cols []*Column
for _, col := range result.Columns {
cols = append(cols, convertColumn(col))
}

var params []Parameter
for _, p := range result.Params {
params = append(params, Parameter{
Number: int(p.Number),
Column: convertColumn(p.Column),
})
}

// Strip comments from the final SQL
trimmed, comments, err := source.StripComments(queryText)
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}
md.Comments = comments

query := &Query{
SQL: trimmed,
Metadata: md,
Columns: cols,
Params: params,
}

q = append(q, query)
}
}

if len(merr.Errs()) > 0 {
return nil, merr
}
if len(q) == 0 {
return nil, fmt.Errorf("no queries contained in paths %s", strings.Join(c.conf.Queries, ","))
}

return &Result{
Catalog: nil, // No catalog when skip_parser is enabled
Queries: q,
}, nil
}
39 changes: 36 additions & 3 deletions internal/compiler/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,23 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err
c.client = client
}

// Check if skip_parser is enabled
skipParser := conf.Analyzer.SkipParser != nil && *conf.Analyzer.SkipParser

// If skip_parser is enabled, we must have database analyzer enabled
if skipParser {
if conf.Database == nil {
return nil, fmt.Errorf("skip_parser requires database configuration")
}
if conf.Analyzer.Database != nil && !*conf.Analyzer.Database {
return nil, fmt.Errorf("skip_parser requires database analyzer to be enabled")
}
// Only PostgreSQL is supported for now
if conf.Engine != config.EnginePostgreSQL {
return nil, fmt.Errorf("skip_parser is only supported for PostgreSQL")
}
}

switch conf.Engine {
case config.EngineSQLite:
c.parser = sqlite.NewParser()
Expand All @@ -46,8 +63,11 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err
c.catalog = dolphin.NewCatalog()
c.selector = newDefaultSelector()
case config.EnginePostgreSQL:
c.parser = postgresql.NewParser()
c.catalog = postgresql.NewCatalog()
// Skip parser and catalog if skip_parser is enabled
if !skipParser {
c.parser = postgresql.NewParser()
c.catalog = postgresql.NewCatalog()
}
c.selector = newDefaultSelector()
if conf.Database != nil {
if conf.Analyzer.Database == nil || *conf.Analyzer.Database {
Expand All @@ -73,7 +93,20 @@ func (c *Compiler) ParseCatalog(schema []string) error {
}

func (c *Compiler) ParseQueries(queries []string, o opts.Parser) error {
r, err := c.parseQueries(o)
// Check if skip_parser is enabled
skipParser := c.conf.Analyzer.SkipParser != nil && *c.conf.Analyzer.SkipParser

var r *Result
var err error

if skipParser {
// Use database analyzer only, skip parser and catalog
r, err = c.parseQueriesWithAnalyzer(o)
} else {
// Use traditional parser-based approach
r, err = c.parseQueries(o)
}

if err != nil {
return err
}
Expand Down
3 changes: 2 additions & 1 deletion internal/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,8 @@ type SQL struct {
}

type Analyzer struct {
Database *bool `json:"database" yaml:"database"`
Database *bool `json:"database" yaml:"database"`
SkipParser *bool `json:"skip_parser" yaml:"skip_parser"`
}

// TODO: Figure out a better name for this
Expand Down