@@ -97,14 +97,92 @@ export function convertMessagesToPrompt(
9797 * Services (AWS). It uses AWS credentials for authentication and can be
9898 * configured with various parameters such as the model to use, the AWS
9999 * region, and the maximum number of tokens to generate.
100+ *
101+ * The `BedrockChat` class supports both synchronous and asynchronous interactions with the model,
102+ * allowing for streaming responses and handling new token callbacks. It can be configured with
103+ * optional parameters like temperature, stop sequences, and guardrail settings for enhanced control
104+ * over the generated responses.
105+ *
100106 * @example
101107 * ```typescript
102- * const model = new BedrockChat({
103- * model: "anthropic.claude-v2",
104- * region: "us-east-1",
105- * });
106- * const res = await model.invoke([{ content: "Tell me a joke" }]);
107- * console.log(res);
108+ * import { BedrockChat } from 'path-to-your-bedrock-chat-module';
109+ * import { HumanMessage } from '@langchain/core/messages';
110+ *
111+ * async function run() {
112+ * // Instantiate the BedrockChat model with the desired configuration
113+ * const model = new BedrockChat({
114+ * model: "anthropic.claude-v2",
115+ * region: "us-east-1",
116+ * credentials: {
117+ * accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID!,
118+ * secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY!,
119+ * },
120+ * maxTokens: 150,
121+ * temperature: 0.7,
122+ * stopSequences: ["\n", " Human:", " Assistant:"],
123+ * streaming: false,
124+ * trace: "ENABLED",
125+ * guardrailIdentifier: "your-guardrail-id",
126+ * guardrailVersion: "1.0",
127+ * guardrailConfig: {
128+ * tagSuffix: "example",
129+ * streamProcessingMode: "SYNCHRONOUS",
130+ * },
131+ * });
132+ *
133+ * // Prepare the message to be sent to the model
134+ * const message = new HumanMessage("Tell me a joke");
135+ *
136+ * // Invoke the model with the message
137+ * const res = await model.invoke([message]);
138+ *
139+ * // Output the response from the model
140+ * console.log(res);
141+ * }
142+ *
143+ * run().catch(console.error);
144+ * ```
145+ *
146+ * For streaming responses, use the following example:
147+ * @example
148+ * ```typescript
149+ * import { BedrockChat } from 'path-to-your-bedrock-chat-module';
150+ * import { HumanMessage } from '@langchain/core/messages';
151+ *
152+ * async function runStreaming() {
153+ * // Instantiate the BedrockChat model with the desired configuration
154+ * const model = new BedrockChat({
155+ * model: "anthropic.claude-3-sonnet-20240229-v1:0",
156+ * region: "us-east-1",
157+ * credentials: {
158+ * accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID!,
159+ * secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY!,
160+ * },
161+ * maxTokens: 150,
162+ * temperature: 0.7,
163+ * stopSequences: ["\n", " Human:", " Assistant:"],
164+ * streaming: true,
165+ * trace: "ENABLED",
166+ * guardrailIdentifier: "your-guardrail-id",
167+ * guardrailVersion: "1.0",
168+ * guardrailConfig: {
169+ * tagSuffix: "example",
170+ * streamProcessingMode: "SYNCHRONOUS",
171+ * },
172+ * });
173+ *
174+ * // Prepare the message to be sent to the model
175+ * const message = new HumanMessage("Tell me a joke");
176+ *
177+ * // Stream the response from the model
178+ * const stream = await model.stream([message]);
179+ * for await (const chunk of stream) {
180+ * // Output each chunk of the response
181+ * console.log(chunk);
182+ * }
183+ * }
184+ *
185+ * runStreaming().catch(console.error);
108186 * ```
109187 */
110188export class BedrockChat extends BaseChatModel implements BaseBedrockInput {
@@ -135,6 +213,17 @@ export class BedrockChat extends BaseChatModel implements BaseBedrockInput {
135213
136214 lc_serializable = true ;
137215
216+ trace ?: "ENABLED" | "DISABLED" ;
217+
218+ guardrailIdentifier = "" ;
219+
220+ guardrailVersion = "" ;
221+
222+ guardrailConfig ?: {
223+ tagSuffix : string ;
224+ streamProcessingMode : "SYNCHRONOUS" | "ASYNCHRONOUS" ;
225+ } ;
226+
138227 get lc_aliases ( ) : Record < string , string > {
139228 return {
140229 model : "model_id" ,
@@ -209,11 +298,16 @@ export class BedrockChat extends BaseChatModel implements BaseBedrockInput {
209298 this . modelKwargs = fields ?. modelKwargs ;
210299 this . streaming = fields ?. streaming ?? this . streaming ;
211300 this . usesMessagesApi = canUseMessagesApi ( this . model ) ;
301+ this . trace = fields ?. trace ?? this . trace ;
302+ this . guardrailVersion = fields ?. guardrailVersion ?? this . guardrailVersion ;
303+ this . guardrailIdentifier =
304+ fields ?. guardrailIdentifier ?? this . guardrailIdentifier ;
305+ this . guardrailConfig = fields ?. guardrailConfig ;
212306 }
213307
214308 async _generate (
215309 messages : BaseMessage [ ] ,
216- options : this [ "ParsedCallOptions" ] ,
310+ options : Partial < BaseChatModelParams > ,
217311 runManager ?: CallbackManagerForLLMRun
218312 ) : Promise < ChatResult > {
219313 const service = "bedrock-runtime" ;
@@ -285,7 +379,8 @@ export class BedrockChat extends BaseChatModel implements BaseBedrockInput {
285379 this . maxTokens ,
286380 this . temperature ,
287381 options . stop ?? this . stopSequences ,
288- this . modelKwargs
382+ this . modelKwargs ,
383+ this . guardrailConfig
289384 )
290385 : BedrockLLMInputOutputAdapter . prepareInput (
291386 provider ,
@@ -294,7 +389,8 @@ export class BedrockChat extends BaseChatModel implements BaseBedrockInput {
294389 this . temperature ,
295390 options . stop ?? this . stopSequences ,
296391 this . modelKwargs ,
297- fields . bedrockMethod
392+ fields . bedrockMethod ,
393+ this . guardrailConfig
298394 ) ;
299395
300396 const url = new URL (
@@ -313,6 +409,13 @@ export class BedrockChat extends BaseChatModel implements BaseBedrockInput {
313409 host : url . host ,
314410 accept : "application/json" ,
315411 "content-type" : "application/json" ,
412+ ...( this . trace &&
413+ this . guardrailIdentifier &&
414+ this . guardrailVersion && {
415+ "X-Amzn-Bedrock-Trace" : this . trace ,
416+ "X-Amzn-Bedrock-GuardrailIdentifier" : this . guardrailIdentifier ,
417+ "X-Amzn-Bedrock-GuardrailVersion" : this . guardrailVersion ,
418+ } ) ,
316419 } ,
317420 } ) ;
318421
0 commit comments