llm_test.go 1.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. //go:build integration
  2. package integration
  3. import (
  4. "context"
  5. "testing"
  6. "time"
  7. "github.com/ollama/ollama/api"
  8. )
  9. // TODO - this would ideally be in the llm package, but that would require some refactoring of interfaces in the server
  10. // package to avoid circular dependencies
  11. var (
  12. stream = false
  13. req = [2]api.GenerateRequest{
  14. {
  15. Model: "orca-mini",
  16. Prompt: "why is the ocean blue?",
  17. Stream: &stream,
  18. Options: map[string]interface{}{
  19. "seed": 42,
  20. "temperature": 0.0,
  21. },
  22. }, {
  23. Model: "orca-mini",
  24. Prompt: "what is the origin of the us thanksgiving holiday?",
  25. Stream: &stream,
  26. Options: map[string]interface{}{
  27. "seed": 42,
  28. "temperature": 0.0,
  29. },
  30. },
  31. }
  32. resp = [2][]string{
  33. []string{"sunlight"},
  34. []string{"england", "english", "massachusetts", "pilgrims"},
  35. }
  36. )
  37. func TestIntegrationSimpleOrcaMini(t *testing.T) {
  38. ctx, cancel := context.WithTimeout(context.Background(), time.Second*120)
  39. defer cancel()
  40. GenerateTestHelper(ctx, t, req[0], resp[0])
  41. }