Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 151 additions & 0 deletions internal/compiler/compile.go
Original file line number Diff line number Diff line change
@@ -1,20 +1,24 @@
package compiler

import (
"context"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strings"

"github.com/sqlc-dev/sqlc/internal/metadata"
"github.com/sqlc-dev/sqlc/internal/migrations"
"github.com/sqlc-dev/sqlc/internal/multierr"
"github.com/sqlc-dev/sqlc/internal/opts"
"github.com/sqlc-dev/sqlc/internal/rpc"
"github.com/sqlc-dev/sqlc/internal/source"
"github.com/sqlc-dev/sqlc/internal/sql/ast"
"github.com/sqlc-dev/sqlc/internal/sql/named"
"github.com/sqlc-dev/sqlc/internal/sql/sqlerr"
"github.com/sqlc-dev/sqlc/internal/sql/sqlfile"
"github.com/sqlc-dev/sqlc/internal/sql/sqlpath"
)

Expand All @@ -30,6 +34,24 @@ func (c *Compiler) parseCatalog(schemas []string) error {
if err != nil {
return err
}

// Check if we're in skip_parser mode
skipParser := c.conf.Analyzer.SkipParser != nil && *c.conf.Analyzer.SkipParser

// If skip_parser is enabled, just read schema files without parsing
if skipParser {
for _, filename := range files {
blob, err := os.ReadFile(filename)
if err != nil {
return fmt.Errorf("reading schema file %s: %w", filename, err)
}
contents := migrations.RemoveRollbackStatements(string(blob))
c.schema = append(c.schema, contents)
}
return nil
}

// Normal path: parse and update catalog
merr := multierr.New()
for _, filename := range files {
blob, err := os.ReadFile(filename)
Expand Down Expand Up @@ -118,3 +140,132 @@ func (c *Compiler) parseQueries(o opts.Parser) (*Result, error) {
Queries: q,
}, nil
}

// parseQueriesWithAnalyzer parses queries using only the database analyzer,
// skipping the parser and catalog entirely. Uses sqlfile.Split to extract
// individual queries from .sql files.
func (c *Compiler) parseQueriesWithAnalyzer(o opts.Parser) (*Result, error) {
ctx := context.Background()
var q []*Query
merr := multierr.New()
set := map[string]struct{}{}
files, err := sqlpath.Glob(c.conf.Queries)
if err != nil {
return nil, err
}

if c.analyzer == nil {
return nil, fmt.Errorf("database analyzer is required when skip_parser is enabled")
}

for _, filename := range files {
blob, err := os.ReadFile(filename)
if err != nil {
merr.Add(filename, "", 0, err)
continue
}
src := string(blob)

// Use sqlfile.Split to extract individual queries
queries, err := sqlfile.Split(ctx, strings.NewReader(src))
if err != nil {
merr.Add(filename, src, 0, err)
continue
}

for _, queryText := range queries {
// Extract metadata from comments
name, cmd, err := metadata.ParseQueryNameAndType(queryText, metadata.CommentSyntax{Dash: true})
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}

// Skip queries without names (not marked with sqlc comments)
if name == "" {
continue
}

// Check for duplicate query names
if _, exists := set[name]; exists {
merr.Add(filename, queryText, 0, fmt.Errorf("duplicate query name: %s", name))
continue
}
set[name] = struct{}{}

// Extract additional metadata from comments
cleanedComments, err := source.CleanedComments(queryText, source.CommentSyntax{Dash: true})
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}

md := metadata.Metadata{
Name: name,
Cmd: cmd,
Filename: filepath.Base(filename),
}

md.Params, md.Flags, md.RuleSkiplist, err = metadata.ParseCommentFlags(cleanedComments)
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}

// Use the database analyzer to analyze the query
// We pass an empty AST node since we're not using the parser
result, err := c.analyzer.Analyze(ctx, nil, queryText, c.schema, &named.ParamSet{})
if err != nil {
merr.Add(filename, queryText, 0, err)
// If this rpc unauthenticated error bubbles up, then all future parsing/analysis will fail
if errors.Is(err, rpc.ErrUnauthenticated) {
return nil, merr
}
continue
}

// Convert analyzer results to Query format
var cols []*Column
for _, col := range result.Columns {
cols = append(cols, convertColumn(col))
}

var params []Parameter
for _, p := range result.Params {
params = append(params, Parameter{
Number: int(p.Number),
Column: convertColumn(p.Column),
})
}

// Strip comments from the final SQL
trimmed, comments, err := source.StripComments(queryText)
if err != nil {
merr.Add(filename, queryText, 0, err)
continue
}
md.Comments = comments

query := &Query{
SQL: trimmed,
Metadata: md,
Columns: cols,
Params: params,
}

q = append(q, query)
}
}

if len(merr.Errs()) > 0 {
return nil, merr
}
if len(q) == 0 {
return nil, fmt.Errorf("no queries contained in paths %s", strings.Join(c.conf.Queries, ","))
}

return &Result{
Catalog: nil, // No catalog when skip_parser is enabled
Queries: q,
}, nil
}
39 changes: 36 additions & 3 deletions internal/compiler/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,23 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err
c.client = client
}

// Check if skip_parser is enabled
skipParser := conf.Analyzer.SkipParser != nil && *conf.Analyzer.SkipParser

// If skip_parser is enabled, we must have database analyzer enabled
if skipParser {
if conf.Database == nil {
return nil, fmt.Errorf("skip_parser requires database configuration")
}
if conf.Analyzer.Database != nil && !*conf.Analyzer.Database {
return nil, fmt.Errorf("skip_parser requires database analyzer to be enabled")
}
// Only PostgreSQL is supported for now
if conf.Engine != config.EnginePostgreSQL {
return nil, fmt.Errorf("skip_parser is only supported for PostgreSQL")
}
}

switch conf.Engine {
case config.EngineSQLite:
c.parser = sqlite.NewParser()
Expand All @@ -46,8 +63,11 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err
c.catalog = dolphin.NewCatalog()
c.selector = newDefaultSelector()
case config.EnginePostgreSQL:
c.parser = postgresql.NewParser()
c.catalog = postgresql.NewCatalog()
// Skip parser and catalog if skip_parser is enabled
if !skipParser {
c.parser = postgresql.NewParser()
c.catalog = postgresql.NewCatalog()
}
c.selector = newDefaultSelector()
if conf.Database != nil {
if conf.Analyzer.Database == nil || *conf.Analyzer.Database {
Expand All @@ -73,7 +93,20 @@ func (c *Compiler) ParseCatalog(schema []string) error {
}

func (c *Compiler) ParseQueries(queries []string, o opts.Parser) error {
r, err := c.parseQueries(o)
// Check if skip_parser is enabled
skipParser := c.conf.Analyzer.SkipParser != nil && *c.conf.Analyzer.SkipParser

var r *Result
var err error

if skipParser {
// Use database analyzer only, skip parser and catalog
r, err = c.parseQueriesWithAnalyzer(o)
} else {
// Use traditional parser-based approach
r, err = c.parseQueries(o)
}

if err != nil {
return err
}
Expand Down
3 changes: 2 additions & 1 deletion internal/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,8 @@ type SQL struct {
}

type Analyzer struct {
Database *bool `json:"database" yaml:"database"`
Database *bool `json:"database" yaml:"database"`
SkipParser *bool `json:"skip_parser" yaml:"skip_parser"`
}

// TODO: Figure out a better name for this
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"contexts": ["managed-db"]
}
32 changes: 32 additions & 0 deletions internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading