Skip to content
Merged

V3 #2

Changes from 1 commit
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
only output JSON if there was a parsing error
  • Loading branch information
andreashappe committed Sep 14, 2023
commit b892a9f2bd541caf5ce936756f4a7cc5d1d884ec
11 changes: 7 additions & 4 deletions llm_with_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def get_next_cmd(self):

def analyze_result(self, cmd, result):
result = self.create_and_ask_prompt('successfull.txt', cmd=cmd, resp=result, facts=self.state)

self.tmp_state = result.result["facts"]
return result

Expand All @@ -49,7 +49,10 @@ def create_and_ask_prompt(self, template_file, **params):
tic = time.perf_counter()
result, tok_query, tok_res = self.llm_connection.exec_query(prompt)
toc = time.perf_counter()
print("debug[the plain result]: " + str(result))
json_answer = json.loads(result)

try:
json_answer = json.loads(result)
except Exception as e:
print("there as an exception with JSON parsing: " + str(e))
print("debug[the plain result]: " + str(result))

return LLMResult(json_answer, toc-tic, tok_query, tok_res)