diff options
author | Harald Musum <musum@vespa.ai> | 2024-04-22 21:19:16 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-04-22 21:19:16 +0200 |
commit | 1c10a11d4aface7b168be0de1d18801f4c896697 (patch) | |
tree | df7f3584bd868c8e166f34b9870ebe52080cd5d2 | |
parent | 796b99170deed76263e24be1af1d5c2762e9366c (diff) |
Revert "Specifically set number of threads to use in llama unit test"
-rw-r--r-- | model-integration/src/test/java/ai/vespa/llm/clients/LocalLLMTest.java | 9 |
1 files changed, 5 insertions, 4 deletions
diff --git a/model-integration/src/test/java/ai/vespa/llm/clients/LocalLLMTest.java b/model-integration/src/test/java/ai/vespa/llm/clients/LocalLLMTest.java index 95bcfb985bd..a3b260f3fb5 100644 --- a/model-integration/src/test/java/ai/vespa/llm/clients/LocalLLMTest.java +++ b/model-integration/src/test/java/ai/vespa/llm/clients/LocalLLMTest.java @@ -6,6 +6,7 @@ import ai.vespa.llm.completion.Completion; import ai.vespa.llm.completion.Prompt; import ai.vespa.llm.completion.StringPrompt; import com.yahoo.config.ModelReference; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.util.ArrayList; @@ -32,10 +33,10 @@ public class LocalLLMTest { private static Prompt prompt = StringPrompt.from("A random prompt"); @Test + @Disabled public void testGeneration() { var config = new LlmLocalClientConfig.Builder() .parallelRequests(1) - .threads(1) .model(ModelReference.valueOf(model)); var llm = new LocalLLM(config.build()); @@ -49,12 +50,12 @@ public class LocalLLMTest { } @Test + @Disabled public void testAsyncGeneration() { var sb = new StringBuilder(); var tokenCount = new AtomicInteger(0); var config = new LlmLocalClientConfig.Builder() .parallelRequests(1) - .threads(1) .model(ModelReference.valueOf(model)); var llm = new LocalLLM(config.build()); @@ -77,6 +78,7 @@ public class LocalLLMTest { } @Test + @Disabled public void testParallelGeneration() { var prompts = testPrompts(); var promptsToUse = prompts.size(); @@ -88,7 +90,6 @@ public class LocalLLMTest { var config = new LlmLocalClientConfig.Builder() .parallelRequests(parallelRequests) - .threads(1) .model(ModelReference.valueOf(model)); var llm = new LocalLLM(config.build()); @@ -116,6 +117,7 @@ public class LocalLLMTest { } @Test + @Disabled public void testRejection() { var prompts = testPrompts(); var promptsToUse = prompts.size(); @@ -128,7 +130,6 @@ public class LocalLLMTest { var config = new LlmLocalClientConfig.Builder() .parallelRequests(parallelRequests) - .threads(1) .maxQueueSize(additionalQueue) .model(ModelReference.valueOf(model)); var llm = new LocalLLM(config.build()); |