Skip to content

Commit b1f4f86

Browse files
committed
New Llama4 model for TogetherAI + example
1 parent b9ac83f commit b1f4f86

File tree

2 files changed

+12
-3
lines changed

2 files changed

+12
-3
lines changed

openai-core/src/main/scala/io/cequence/openaiscala/domain/NonOpenAIModelId.scala

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,18 @@ object NonOpenAIModelId {
3232
val amazon_nova_micro_v1_0 = "amazon.nova-micro-v1:0"
3333

3434
// Llama
35-
// 400B params, 1 mil context
35+
36+
// 17B x 128E, 400B total params, 1M context window (500k currently supported)
37+
val llama_4_maverick_17B_128E_instruct_fp8 =
38+
"meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8" // Together AI
39+
// 17B x 16E, 109B total params, 10M token context (300k currently supported)
40+
val llama_4_scout_17B_16E_instruct =
41+
"meta-llama/Llama-4-Scout-17B-16E-Instruct" // Together AI
42+
// 17B x 128E, 400B params, 1 mil context
3643
val llama4_maverick_instruct_basic = "llama4-maverick-instruct-basic" // Fireworks AI
37-
// 107B params, 128k context
44+
// 17B x 16E, 107B params, 128k context
3845
val llama4_scout_instruct_basic = "llama4-scout-instruct-basic" // Fireworks AI
46+
// 17B x 16E, 107B params
3947
val llama_4_scout_17b_16e_instruct = "meta-llama/llama-4-scout-17b-16e-instruct" // Groq
4048
val llama_3_3_70b_versatile = "llama-3.3-70b-versatile" // Groq
4149
val llama_3_3_70b_specdec = "llama-3.3-70b-specdec" // Groq

openai-examples/src/main/scala/io/cequence/openaiscala/examples/togetherai/TogetherAICreateChatCompletion.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,9 @@ object TogetherAICreateChatCompletion extends ExampleBase[OpenAIChatCompletionSe
1919
UserMessage("What is the weather like in Norway?")
2020
)
2121

22+
// deepseek_ai_deepseek_r1_distill_llama_70b_free // deepseek_ai_deepseek_v3
2223
private val modelId =
23-
NonOpenAIModelId.deepseek_ai_deepseek_r1_distill_llama_70b_free // deepseek_ai_deepseek_v3
24+
NonOpenAIModelId.llama_4_maverick_17B_128E_instruct_fp8
2425

2526
override protected def run: Future[_] =
2627
service

0 commit comments

Comments
 (0)