Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
70 commits
Select commit Hold shift + click to select a range
035ed40
Big refactoring: break everything, and rebuild simpler
mattbit Mar 7, 2024
ed615b8
Start clean up of evaluators
mattbit Mar 11, 2024
261a5bf
Update prompt for harmful content detector
mattbit Mar 11, 2024
2d1257e
Fixing base evaluator
mattbit Mar 11, 2024
9860b6b
Enforce JSON on OpenAI
mattbit Mar 11, 2024
0ca7cef
Fix response format in OpenAI client
mattbit Mar 14, 2024
9259921
Refactoring of generators and evaluators
mattbit Mar 14, 2024
ba407b4
Merge branch 'main' into feature/llm-support
mattbit Mar 15, 2024
6e7251e
Fix format of evaluation results
mattbit Mar 15, 2024
edab964
Update test_coherency_evaluator.py
mattbit Mar 15, 2024
5478529
More bug fixing
mattbit Mar 15, 2024
0ff3ff4
Fix LLMClient test
mattbit Mar 15, 2024
7f37c6b
Add language support
mattbit Mar 15, 2024
55a94ff
Fix sycophancy test
mattbit Mar 15, 2024
1385205
Improve error for missing mistralai dependency
mattbit Mar 15, 2024
ec153a3
Test result from evaluators
mattbit Mar 15, 2024
dd823f8
Update tests
mattbit Mar 15, 2024
419434e
Fix faithfulness detector
mattbit Mar 15, 2024
3923435
Fix correctness test
mattbit Mar 15, 2024
977b540
Fix LLM seed
mattbit Mar 15, 2024
9737b45
Shorten info disclosure category
mattbit Mar 15, 2024
f2aa8e2
Improve mistral support
mattbit Mar 15, 2024
bbba96d
Adding logger
mattbit Mar 15, 2024
907dfb7
Fix experimental output formatting in sycophancy
mattbit Mar 15, 2024
2a8248d
Fix tests
mattbit Mar 18, 2024
4fb2a85
Conditionally enable json response for Mistral models
mattbit Mar 18, 2024
e81f2c4
Merge branch 'main' into feature/llm-support
mattbit Mar 18, 2024
0f15588
Remove extraneous files
mattbit Mar 18, 2024
7dcdb46
Fix type hint on CoherencyEvaluator
mattbit Mar 18, 2024
7e562d0
Fixing type hints
mattbit Mar 18, 2024
e9c6be6
Added `details` serialization to allow upload of data to the Hub
kevinmessiaen Mar 19, 2024
b6164e5
Fixed typo
kevinmessiaen Mar 19, 2024
bbab3e2
Fixed LLM tests
kevinmessiaen Mar 19, 2024
3c0eb3f
Merge branch 'main' into feature/llm-support
mattbit Mar 20, 2024
ad0e0c3
Extracted serialization and updated structure
kevinmessiaen Mar 21, 2024
05c646f
Simplified structure
kevinmessiaen Mar 21, 2024
a60f17a
Simplified structure
kevinmessiaen Mar 21, 2024
e3715ac
Adapted `EvaluationResult` to existing `TestDetails` so that Hub feat…
kevinmessiaen Mar 22, 2024
920a5b9
Fixed tests
kevinmessiaen Mar 22, 2024
c098d03
Fixed tests
kevinmessiaen Mar 22, 2024
d4313e1
Merge branch 'main' into feature/llm-support
mattbit Mar 27, 2024
647cc55
Update ChatMessage in RAG toolkit
mattbit Mar 28, 2024
d4cc9e6
Add embeddings support
mattbit Mar 28, 2024
2f57c2a
Use embeddings in the RAG knowledge base
mattbit Mar 28, 2024
9412535
Remove torch import from openai embeddings
mattbit Mar 28, 2024
f99517e
Improve embeddings
mattbit Mar 28, 2024
3c0fac7
Update RAG toolkit tests
mattbit Mar 28, 2024
ac9faf1
Load default embedding model with knowledge base
mattbit Mar 28, 2024
aa25458
Add test for Mistral
mattbit Mar 28, 2024
54c6eea
Regenerating pdm.lock
Mar 28, 2024
9e9aeb3
Cleanup
mattbit Mar 28, 2024
841357f
Merge branch 'feature/llm-support' of github.com:Giskard-AI/giskard i…
mattbit Mar 28, 2024
deda5e4
Fix simple test dataset generation
mattbit Mar 28, 2024
36f6dc6
Add hosted requirement evaluator
luca-rossi Mar 28, 2024
18f81ea
Remove uppercas F in examples, so model return proper JSON
Hartorn Apr 5, 2024
e4c0acb
Merge pull request #1877 from Giskard-AI/fix-typo
mattbit Apr 5, 2024
2dd9a7a
Better handling of json format option
mattbit Apr 5, 2024
3f72f47
Merge branch 'main' into feature/llm-support
mattbit Apr 5, 2024
5ba3d04
Regenerating pdm.lock
Apr 5, 2024
3e9232b
Updating LLM scan outputs
mattbit Apr 5, 2024
85b21a9
Small refactoring
mattbit Apr 5, 2024
575272f
Better requirement parsing to tolerate for LLM errors
mattbit Apr 5, 2024
58ba03c
Fix type hint
mattbit Apr 5, 2024
fa68891
Merge branch 'refs/heads/main' into feature/llm-support
andreybavt Apr 9, 2024
ed850f8
post merge `main`
andreybavt Apr 9, 2024
269437a
Merge branch 'main' into feature/llm-support
andreybavt Apr 9, 2024
cdea946
Regenerating pdm.lock
Apr 9, 2024
05622ed
Merge branch 'main' into feature/llm-support
kevinmessiaen Apr 10, 2024
d254e50
Small improvements
kevinmessiaen Apr 10, 2024
7a9a9b7
Fixed coherency test
kevinmessiaen Apr 10, 2024
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 5 additions & 10 deletions giskard/llm/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
import os

from .base import LLMClient, LLMFunctionCall, LLMMessage, LLMToolCall
from .base import ChatMessage, LLMClient
from .logger import LLMLogger

_default_client = None
Expand Down Expand Up @@ -58,7 +58,7 @@ def get_default_client() -> LLMClient:
return _default_client

# Setup the default client
from .openai import LegacyOpenAIClient, OpenAIClient
from .openai import OpenAIClient

default_llm_api = get_default_llm_api()

Expand All @@ -68,21 +68,16 @@ def get_default_client() -> LLMClient:

client = AzureOpenAI() if default_llm_api == "azure" else OpenAI()

_default_client = OpenAIClient(_default_llm_model, client)
_default_client = OpenAIClient(model=_default_llm_model, client=client)
except ImportError:
# Fallback for openai<=0.28.1
if default_llm_api != "openai":
raise ValueError(f"LLM scan using {default_llm_api.name} require openai>=1.0.0")
_default_client = LegacyOpenAIClient(_default_llm_model)
raise ValueError(f"LLM scan using {default_llm_api.name} require openai>=1.0.0")

return _default_client


__all__ = [
"LLMClient",
"LLMFunctionCall",
"LLMToolCall",
"LLMMessage",
"ChatMessage",
"LLMLogger",
"get_default_client",
"set_llm_model",
Expand Down
46 changes: 10 additions & 36 deletions giskard/llm/client/base.py
Original file line number Diff line number Diff line change
@@ -1,59 +1,33 @@
from typing import Any, Dict, List, Optional, Sequence
from typing import Optional, Sequence

from abc import ABC, abstractmethod
from dataclasses import dataclass

import numpy as np

from .logger import LLMLogger


@dataclass
class LLMFunctionCall:
name: str
arguments: Any


@dataclass
class LLMToolCall:
id: str
type: str
function: LLMFunctionCall


@dataclass
class LLMMessage:
class ChatMessage:
role: str
content: Optional[str] = None
function_call: Optional[LLMFunctionCall] = None
tool_calls: Optional[List[LLMToolCall]] = None

@staticmethod
def create_message(role: str, content: str):
return LLMMessage(role=role, content=content, function_call=None, tool_calls=None)

_logger = LLMLogger()


class LLMClient(ABC):
@property
@abstractmethod
def logger(self) -> LLMLogger:
...
return _logger

@abstractmethod
def complete(
self,
messages: Sequence[LLMMessage],
functions=None,
temperature=0.5,
max_tokens=None,
function_call: Optional[Dict] = None,
messages: Sequence[ChatMessage],
temperature: float = 1,
max_tokens: Optional[int] = None,
caller_id: Optional[str] = None,
tools=None,
tool_choice=None,
seed: Optional[int] = None,
) -> LLMMessage:
...

@abstractmethod
def embeddings(self, text) -> np.ndarray:
format=None,
) -> ChatMessage:
...
66 changes: 66 additions & 0 deletions giskard/llm/client/mistral.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
from typing import Optional, Sequence

from dataclasses import asdict
from logging import warning

from ..config import LLMConfigurationError
from ..errors import LLMImportError
from . import LLMClient
from .base import ChatMessage

try:
from mistralai.client import MistralClient as _MistralClient
from mistralai.models.chat_completion import ChatMessage as MistralChatMessage
except ImportError as err:
raise LLMImportError(
flavor="llm", msg="To use Mistral models, please install the `mistralai` package with `pip install mistralai`"
) from err


class MistralClient(LLMClient):
def __init__(self, model: str = "mistral-large-latest", client: _MistralClient = None):
self.model = model
self._client = client or _MistralClient()

def complete(
self,
messages: Sequence[ChatMessage],
temperature: float = 1.0,
max_tokens: Optional[int] = None,
caller_id: Optional[str] = None,
seed: Optional[int] = None,
format: str = None,
) -> ChatMessage:
extra_params = dict()
if seed is not None:
extra_params["random_seed"] = seed

if format not in (None, "json", "json_object") and "large" not in self.model:
Copy link
Contributor

@andreybavt andreybavt Apr 8, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

format could be of Literal type

also pydantic's validate_call could do the validation job

warning(f"Unsupported format '{format}', ignoring.")
format = None

if format == "json" or format == "json_object":
extra_params["response_format"] = {"type": "json_object"}

try:
completion = self._client.chat(
model=self.model,
messages=[MistralChatMessage(**asdict(m)) for m in messages],
temperature=temperature,
max_tokens=max_tokens,
**extra_params,
)
except RuntimeError as err:
raise LLMConfigurationError("Could not get response from Mistral API") from err

self.logger.log_call(
prompt_tokens=completion.usage.prompt_tokens,
sampled_tokens=completion.usage.completion_tokens,
model=self.model,
client_class=self.__class__.__name__,
caller_id=caller_id,
)

msg = completion.choices[0].message

return ChatMessage(role=msg.role, content=msg.content)
Loading