Skip to content
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ class MinimalLinuxPrivesc(RoundBasedUseCase, UseCase, abc.ABC):

# get the next command from the LLM
answer = self.llm.get_response(template_next_cmd, _capabilities=self._capabilities, history=history, conn=self.conn)
cmd = llm_util.cmd_output_fixer(cmd.result)
cmd = llm_util.cmd_output_fixer(answer.result)

with self.console.status("[bold green]Executing that command..."):
if answer.result.startswith("test_credential"):
Expand Down
10 changes: 7 additions & 3 deletions capabilities/ssh_run_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from invoke import Responder

from io import StringIO
from utils import SSHConnection
from .capability import Capability

Expand All @@ -28,15 +29,18 @@ def __call__(self, command: str) -> Tuple[str, bool]:
response=self.conn.password + '\n',
)

out = StringIO()

try:
stdout, stderr, rc = self.conn.run(command, pty=True, warn=True, watchers=[sudo_pass], timeout=10)
resp = self.conn.run(command, pty=True, warn=True, out_stream=out, watchers=[sudo_pass], timeout=10)
except Exception as e:
print("TIMEOUT! Could we have become root?")
stdout, stderr, rc = "", "", -1
out.seek(0)
tmp = ""
last_line = ""
for line in stdout.splitlines():
for line in out.readlines():
if not line.startswith('[sudo] password for ' + self.conn.username + ':'):
line.replace("\r", "")
last_line = line
tmp = tmp + line

Expand Down
7 changes: 3 additions & 4 deletions usecases/minimal/minimal.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import abc
import pathlib
from dataclasses import dataclass, field
from typing import Dict
Expand All @@ -7,8 +6,8 @@
from rich.panel import Panel

from capabilities import Capability, SSHRunCommand, SSHTestCredential
from utils import SSHConnection, llm_util, ui
from usecases.usecase import use_case, UseCase
from utils import SSHConnection, llm_util
from usecases.usecase import use_case
from usecases.usecase.roundbased import RoundBasedUseCase
from utils.cli_history import SlidingCliHistory

Expand All @@ -17,7 +16,7 @@

@use_case("minimal_linux_privesc", "Showcase Minimal Linux Priv-Escalation")
@dataclass
class MinimalLinuxPrivesc(RoundBasedUseCase, UseCase, abc.ABC):
class MinimalLinuxPrivesc(RoundBasedUseCase):

conn: SSHConnection = None

Expand Down
1 change: 1 addition & 0 deletions usecases/usecase/roundbased.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def setup(self):
pass

# callback
@abc.abstractmethod
def perform_round(self):
pass

Expand Down
9 changes: 7 additions & 2 deletions utils/openai/openai_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from utils.configurable import configurable, parameter
from utils.llm_util import LLMResult, LLM


@configurable("openai-compatible-llm-api", "OpenAI-compatible LLM API")
@dataclass
class OpenAIConnection(LLM):
Expand Down Expand Up @@ -69,7 +68,13 @@ def get_response(self, prompt, *, retry: int = 0, **kwargs) -> LLMResult:
return LLMResult(result, prompt, result, toc - tic, tok_query, tok_res)

def encode(self, query) -> list[int]:
return tiktoken.encoding_for_model(self.model).encode(query)
# I know this is crappy for all non-openAI models but sadly this
# has to be good enough for now
if self.model.startswith("gpt-"):
encoding = tiktoken.encoding_for_model(self.model)
else:
encoding = tiktoken.encoding_for_model("gpt-3.5-turbo")
return encoding.encode(query)


@configurable("openai/gpt-3.5-turbo", "OpenAI GPT-3.5 Turbo")
Expand Down