ollama

package module
v0.33.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Aug 20, 2024 License: MIT Imports: 9 Imported by: 0

Documentation

Index

Examples

Constants

View Source
const DefaultOllamaImage = "ollama/ollama:0.1.25"

Deprecated: it will be removed in the next major version.

Variables

This section is empty.

Functions

This section is empty.

Types

type OllamaContainer

type OllamaContainer struct {
	testcontainers.Container
}

OllamaContainer represents the Ollama container type used in the module

func Run added in v0.32.0

func Run(ctx context.Context, img string, opts ...testcontainers.ContainerCustomizer) (*OllamaContainer, error)

Run creates an instance of the Ollama container type

Example
package main

import (
	"context"
	"fmt"
	"log"

	tcollama "github.com/testcontainers/testcontainers-go/modules/ollama"
)

func main() {
	// runOllamaContainer {
	ctx := context.Background()

	ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.1.25")
	if err != nil {
		log.Fatalf("failed to start container: %s", err)
	}

	// Clean up the container
	defer func() {
		if err := ollamaContainer.Terminate(ctx); err != nil {
			log.Fatalf("failed to terminate container: %s", err) // nolint:gocritic
		}
	}()
	// }

	state, err := ollamaContainer.State(ctx)
	if err != nil {
		log.Fatalf("failed to get container state: %s", err) // nolint:gocritic
	}

	fmt.Println(state.Running)

}
Output:

true
Example (WithModel_llama2_http)
package main

import (
	"context"
	"fmt"
	"log"
	"net/http"
	"strings"

	tcollama "github.com/testcontainers/testcontainers-go/modules/ollama"
)

func main() {
	// withHTTPModelLlama2 {
	ctx := context.Background()

	ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.1.25")
	if err != nil {
		log.Fatalf("failed to start container: %s", err)
	}
	defer func() {
		if err := ollamaContainer.Terminate(ctx); err != nil {
			log.Fatalf("failed to terminate container: %s", err) // nolint:gocritic
		}
	}()

	model := "llama2"

	_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "pull", model})
	if err != nil {
		log.Fatalf("failed to pull model %s: %s", model, err) // nolint:gocritic
	}

	_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model})
	if err != nil {
		log.Fatalf("failed to run model %s: %s", model, err) // nolint:gocritic
	}

	connectionStr, err := ollamaContainer.ConnectionString(ctx)
	if err != nil {
		log.Fatalf("failed to get connection string: %s", err) // nolint:gocritic
	}

	httpClient := &http.Client{}

	// generate a response
	payload := `{
	"model": "llama2",
	"prompt":"Why is the sky blue?"
}`

	req, err := http.NewRequest("POST", fmt.Sprintf("%s/api/generate", connectionStr), strings.NewReader(payload))
	if err != nil {
		log.Fatalf("failed to create request: %s", err) // nolint:gocritic
	}

	resp, err := httpClient.Do(req)
	if err != nil {
		log.Fatalf("failed to get response: %s", err) // nolint:gocritic
	}
	// }

	fmt.Println(resp.StatusCode)

	// Intentionally not asserting the output, as we don't want to run this example in the tests.
}
Output:

Example (WithModel_llama2_langchain)
package main

import (
	"context"
	"fmt"
	"log"
	"strings"

	"github.com/tmc/langchaingo/llms"
	langchainollama "github.com/tmc/langchaingo/llms/ollama"

	tcollama "github.com/testcontainers/testcontainers-go/modules/ollama"
)

func main() {
	// withLangchainModelLlama2 {
	ctx := context.Background()

	ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.1.25")
	if err != nil {
		log.Fatalf("failed to start container: %s", err)
	}
	defer func() {
		if err := ollamaContainer.Terminate(ctx); err != nil {
			log.Fatalf("failed to terminate container: %s", err) // nolint:gocritic
		}
	}()

	model := "llama2"

	_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "pull", model})
	if err != nil {
		log.Fatalf("failed to pull model %s: %s", model, err) // nolint:gocritic
	}

	_, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model})
	if err != nil {
		log.Fatalf("failed to run model %s: %s", model, err) // nolint:gocritic
	}

	connectionStr, err := ollamaContainer.ConnectionString(ctx)
	if err != nil {
		log.Fatalf("failed to get connection string: %s", err) // nolint:gocritic
	}

	var llm *langchainollama.LLM
	if llm, err = langchainollama.New(
		langchainollama.WithModel(model),
		langchainollama.WithServerURL(connectionStr),
	); err != nil {
		log.Fatalf("failed to create langchain ollama: %s", err) // nolint:gocritic
	}

	completion, err := llm.Call(
		context.Background(),
		"how can Testcontainers help with testing?",
		llms.WithSeed(42),         // the lower the seed, the more deterministic the completion
		llms.WithTemperature(0.0), // the lower the temperature, the more creative the completion
	)
	if err != nil {
		log.Fatalf("failed to create langchain ollama: %s", err) // nolint:gocritic
	}

	words := []string{
		"easy", "isolation", "consistency",
	}
	lwCompletion := strings.ToLower(completion)

	for _, word := range words {
		if strings.Contains(lwCompletion, word) {
			fmt.Println(true)
		}
	}

	// }

	// Intentionally not asserting the output, as we don't want to run this example in the tests.
}
Output:

func RunContainer deprecated

func RunContainer(ctx context.Context, opts ...testcontainers.ContainerCustomizer) (*OllamaContainer, error)

Deprecated: use Run instead RunContainer creates an instance of the Ollama container type

func (*OllamaContainer) Commit

func (c *OllamaContainer) Commit(ctx context.Context, targetImage string) error

Commit it commits the current file system changes in the container into a new target image. The target image name should be unique, as this method will commit the current state of the container into a new image with the given name, so it doesn't override existing images. It should be used for creating an image that contains a loaded model.

func (*OllamaContainer) ConnectionString

func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error)

ConnectionString returns the connection string for the Ollama container, using the default port 11434.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL