Skip to content

Commit 64699e3

Browse files
committed
Fixed shorten prompt bug from merge
1 parent 629489a commit 64699e3

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

‎src/hackingBuddyGPT/usecases/web_api_testing/utils/llm_handler.py‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def call_model(prompt: List[Dict[str, Any]]) -> Any:
5252

5353
try:
5454
adjusted_prompt = self.adjust_prompt(prompt, num_prompts=3) if len(
55-
prompt) > 20 else self.adjust_prompt_based_on_token(prompt)
55+
prompt) >= 20 else self.adjust_prompt_based_on_token(prompt)
5656
print(f'Adjusted prompt: {adjusted_prompt}')
5757
return call_model(adjusted_prompt)
5858

@@ -77,7 +77,7 @@ def adjust_prompt(self, prompt: List[Dict[str, Any]], num_prompts: int = 5) -> L
7777

7878
print(f"Adjusted prompt length: {len(adjusted_prompt)}")
7979
print(f"adjusted prompt:{adjusted_prompt}")
80-
return prompt
80+
return adjusted_prompt
8181

8282
def add_created_object(self, created_object: Any, object_type: str) -> None:
8383
"""

0 commit comments

Comments
 (0)