mistral

package
v0.5.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 21, 2025 License: MIT Imports: 13 Imported by: 0

Documentation

Index

Examples

Constants

This section is empty.

Variables

View Source
var (
	ErrInvalidModelInput = fmt.Errorf("invalid model input")
)
View Source
var (
	ErrNoEmbeddings = fmt.Errorf("no embeddings returned by the model")
)

Functions

func SanitizeToolName added in v0.5.0

func SanitizeToolName(name string) string

SanitizeToolName formats a function name to be used as a reference in a tool call.

func StringFromParts added in v0.4.0

func StringFromParts(content []*ai.Part) string

StringFromParts returns the content of a multi-parts message as a string. The multiple parts are concatenated with a newline character.

Types

type Config added in v0.2.0

type Config struct {
	Client mistralclient.Config
}

func NewConfig added in v0.2.0

func NewConfig(opts ...Option) *Config

type EmbeddingOptions added in v0.4.0

type EmbeddingOptions struct {
	VectorSize int `json:"vectorSize,omitempty"`
}

type Option added in v0.2.0

type Option func(*Config)

func WithClientConfig added in v0.4.0

func WithClientConfig(cfg mistralclient.Config) Option

type Plugin

type Plugin struct {
	sync.Mutex

	APIKey string
	Client mistralclient.Client
	// contains filtered or unexported fields
}

func NewPlugin

func NewPlugin(apiKey string, opts ...Option) *Plugin
Example (InitPluginAndComputeAnEmbedding)
package main

import (
	"context"
	"fmt"

	"github.com/firebase/genkit/go/ai"
	"github.com/firebase/genkit/go/genkit"
	"github.com/thomas-marquis/genkit-mistral/mistral"
)

func main() {
	mistralApiKey := "your_api_key"
	ctx := context.Background()
	g := genkit.Init(ctx,
		genkit.WithPlugins(
			mistral.NewPlugin(mistralApiKey),
		),
	)

	docToEmbed := ai.DocumentFromText("Is scribe a good situation?", nil)
	res, err := genkit.Embed(ctx, g,
		ai.WithDocs(docToEmbed),
		ai.WithEmbedderName("mistral/fake-embed"),
	)

	if err == nil {
		fmt.Printf("Embedding succeeded with vector length %d\n", len(res.Embeddings[0].Embedding))
	} else {
		fmt.Printf("Embedding failed with reason %s\n", err)
	}
}
Output:

Embedding succeeded with vector length 1024
Example (InitPluginAndGenerateText)
package main

import (
	"context"
	"fmt"

	"github.com/firebase/genkit/go/ai"
	"github.com/firebase/genkit/go/genkit"
	"github.com/thomas-marquis/genkit-mistral/mistral"
)

func main() {
	mistralApiKey := "your_api_key"
	ctx := context.Background()
	g := genkit.Init(ctx,
		genkit.WithPlugins(
			mistral.NewPlugin(mistralApiKey),
		),
		genkit.WithDefaultModel("mistral/fake-completion"),
	)

	res, err := genkit.Generate(ctx, g,
		ai.WithSystem("you are a helpful assistant"),
		ai.WithPrompt("Tell me a joke"),
	)

	if err == nil {
		fmt.Printf("Generation succeeded, AI respond with role %s\n", res.Message.Role)
	} else {
		fmt.Printf("Generation failed with reason %s\n", err)
	}
}
Output:

Generation succeeded, AI respond with role model
Example (UnderConstraintTextGenerationWithAMockedModel)
package main

import (
	"context"
	"fmt"

	"github.com/firebase/genkit/go/ai"
	"github.com/firebase/genkit/go/core"
	"github.com/firebase/genkit/go/genkit"
	"github.com/thomas-marquis/genkit-mistral/mistral"
)

func main() {
	// This example may be useful if you want to mock a model for unit testing purpose.
	// Please notice that you can do this with all model providers.

	// We start by initializing genkit, as usual
	mistralApiKey := "your_api_key"
	ctx := context.Background()
	g := genkit.Init(ctx,
		genkit.WithPlugins(
			mistral.NewPlugin(mistralApiKey),
		),
		genkit.WithDefaultModel("mistral/fake-completion"),
	)

	// Then, we need to create a mock model under a custom provider namespace (here: myapp)
	genkit.DefineModel(g, "myapp/mock-completion",
		&ai.ModelOptions{
			Supports: &ai.ModelSupports{
				Constrained: ai.ConstrainedSupportAll, // in our example, we want the "model" supports constrained generation
				Multiturn:   true,
			},
		},
		func(ctx context.Context, request *ai.ModelRequest, s core.StreamCallback[*ai.ModelResponseChunk]) (*ai.ModelResponse, error) {
			// Here is the function that will be called when the model is called.
			return &ai.ModelResponse{
				Message: ai.NewModelMessage(ai.NewJSONPart(`{"joke_content": "le mec à un phare, il s'appelle On, ....", "lol_level": 10000000000}`)),
			}, nil
		})

	type expectedOutput struct {
		JokeContent string `json:"joke_content"`
		LolLevel    int    `json:"lol_level"`
	}

	res, err := genkit.Generate(ctx, g,
		ai.WithSystem("you are a helpful assistant"),
		ai.WithPrompt("Tell me a joke"),
		ai.WithOutputType(expectedOutput{}),
		ai.WithModelName("myapp/mock-completion"), // we can override the default model here
	)

	if err == nil {
		var joke expectedOutput
		if err := res.Output(&joke); err != nil {
			fmt.Printf("Failed to parse output: %s\n", err)
		} else {
			fmt.Printf("Is this \"%s\" really level %d???!!\n", joke.JokeContent, joke.LolLevel)
		}
	} else {
		fmt.Printf("Generation failed with reason %s\n", err)
	}
}
Output:

Is this "le mec à un phare, il s'appelle On, ...." really level 10000000000???!!

func (*Plugin) Init

func (p *Plugin) Init(ctx context.Context) []api.Action

func (*Plugin) Name

func (p *Plugin) Name() string