diff --git a/llm_sandbox/docker.py b/llm_sandbox/docker.py index 4c66c59..80d73ef 100644 --- a/llm_sandbox/docker.py +++ b/llm_sandbox/docker.py @@ -7,6 +7,7 @@ from docker.models.images import Image from docker.models.containers import Container +from docker.types import Mount from llm_sandbox.utils import ( image_exists, get_libraries_installation_command, @@ -31,6 +32,7 @@ def __init__( lang: str = SupportedLanguage.PYTHON, keep_template: bool = False, verbose: bool = False, + mounts: Optional[list[Mount]] = None, ): """ Create a new sandbox session @@ -71,6 +73,7 @@ def __init__( self.keep_template = keep_template self.is_create_template: bool = False self.verbose = verbose + self.mounts = mounts def open(self): warning_str = ( @@ -105,7 +108,9 @@ def open(self): if self.verbose: print(f"Using image {self.image.tags[-1]}") - self.container = self.client.containers.run(self.image, detach=True, tty=True) + self.container = self.client.containers.run( + self.image, detach=True, tty=True, mounts=self.mounts + ) def close(self): if self.container: diff --git a/llm_sandbox/micromamba.py b/llm_sandbox/micromamba.py new file mode 100644 index 0000000..9228952 --- /dev/null +++ b/llm_sandbox/micromamba.py @@ -0,0 +1,71 @@ +from typing import Optional +import docker +from llm_sandbox.session import SandboxDockerSession +from llm_sandbox.docker import ConsoleOutput +from llm_sandbox.const import SupportedLanguage +from docker.types import Mount + + +class MicromambaSession(SandboxDockerSession): + """ + SandboxDockerSession does not allow activation of micromamba environment, + this class extends it and allows that which makes it possible for LLM agents to installed conda dependencies. + """ + + def __init__( + self, + client: Optional[docker.DockerClient] = None, + image: Optional[str] = "mambaorg/micromamba:latest", + dockerfile: Optional[str] = None, + lang: str = SupportedLanguage.PYTHON, + keep_template: bool = False, + verbose: bool = False, + mounts: Optional[list[Mount]] = None, + environment: str = "base", + ): + super().__init__( + client=client, + image=image, + dockerfile=dockerfile, + lang=lang, + keep_template=keep_template, + verbose=verbose, + mounts=mounts, + ) + self.environment = environment + + def execute_command( + self, command: Optional[str], workdir: Optional[str] = None + ) -> ConsoleOutput: + if not command: + raise ValueError("Command cannot be empty") + + if not self.container: + raise RuntimeError( + "Session is not open. Please call open() method before executing commands." + ) + command = f"micromamba run -n {self.environment} {command}" + + if self.verbose: + print(f"Executing command: {command}") + + if workdir: + exit_code, exec_log = self.container.exec_run( + command, stream=True, tty=True, workdir=workdir + ) + else: + exit_code, exec_log = self.container.exec_run( + command, stream=True, tty=True + ) + + output = "" + if self.verbose: + print("Output:", end=" ") + + for chunk in exec_log: + chunk_str = chunk.decode("utf-8") + output += chunk_str + if self.verbose: + print(chunk_str, end="") + + return ConsoleOutput(output)