diff --git a/openai-client/src/main/scala/io/cequence/openaiscala/service/impl/OpenAIServiceImpl.scala b/openai-client/src/main/scala/io/cequence/openaiscala/service/impl/OpenAIServiceImpl.scala index 71bd8f59..4e4ec953 100644 --- a/openai-client/src/main/scala/io/cequence/openaiscala/service/impl/OpenAIServiceImpl.scala +++ b/openai-client/src/main/scala/io/cequence/openaiscala/service/impl/OpenAIServiceImpl.scala @@ -114,8 +114,7 @@ private[service] trait OpenAIServiceImpl Param.assistant_id -> Some(assistantId), Param.thread -> thread.map(Json.toJson(_)), Param.instructions -> Some(instructions), - // TODO: tools are ignored? - // Param.tools -> Some(Json.toJson(tools)), + Param.tools -> Some(Json.toJson(tools)), Param.tool_resources -> toolResources.map(Json.toJson(_)), Param.tool_choice -> toolChoice.map(Json.toJson(_)) ) diff --git a/openai-client/src/test/scala/io/cequence/openaiscala/JsonFormatsSpec.scala b/openai-client/src/test/scala/io/cequence/openaiscala/JsonFormatsSpec.scala index 6b1f7a85..383abbfd 100644 --- a/openai-client/src/test/scala/io/cequence/openaiscala/JsonFormatsSpec.scala +++ b/openai-client/src/test/scala/io/cequence/openaiscala/JsonFormatsSpec.scala @@ -46,29 +46,22 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers { private val codeInterpreterResourcesJson = """{ | "code_interpreter" : { - | "file_ids" : [ { - | "file_id" : "file-id-1" - | }, { - | "file_id" : "file-id-2" - | } ] + | "file_ids" : [ "file-id-1", "file-id-2" ] | } |}""".stripMargin private val fileSearchResourcesJson1 = """{ | "file_search" : { - | "vector_store_ids" : [ "vs_xxx" ] + | "vector_store_ids" : [ "vs_xxx", "vs_yyy" ] | } |}""".stripMargin private val fileSearchResourcesJson2 = """{ | "file_search" : { - | "vector_store_ids" : [ ], | "vector_stores" : [ { - | "file_ids" : [ { - | "file_id" : "file-id-1" - | } ], + | "file_ids" : [ "file-id-1" ], | "metadata" : { | "key" : "value" | } @@ -116,9 +109,7 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers { private val attachmentJson = """{ - | "file_id" : { - | "file_id" : "file-id-1" - | }, + | "file_id" : "file-id-1", | "tools" : [ { | "type" : "code_interpreter" | }, { @@ -187,43 +178,38 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers { } "serialize and deserialize code interpreter's resources" in { - testCodec[AssistantToolResource]( + prettyTestCodec[AssistantToolResource]( AssistantToolResource( CodeInterpreterResources( Seq(FileId("file-id-1"), FileId("file-id-2")) ) ), - codeInterpreterResourcesJson, - Pretty + codeInterpreterResourcesJson ) } -// // TODO -// "serialize and deserialize file search's resources with vector store ids" in { -// testCodec[AssistantToolResource]( -// AssistantToolResource( -// FileSearchResources( -// vectorStoreIds = Seq("vs_xxx") -// ) -// ), -// fileSearchResourcesJson1, -// Pretty -// ) -// } -// -// // TODO -// "serialize and deserialize file search's resources with (new) vector stores" in { -// testCodec[AssistantToolResource]( -// AssistantToolResource( -// FileSearchResources( -// vectorStoreIds = Nil, -// vectorStores = Seq(VectorStore(Seq(FileId("file-id-1")), Map("key" -> "value"))) -// ) -// ), -// fileSearchResourcesJson2, -// Pretty -// ) -// } + "serialize and deserialize file search's resources with vector store ids" in { + prettyTestCodec[AssistantToolResource]( + AssistantToolResource( + FileSearchResources( + vectorStoreIds = Seq("vs_xxx", "vs_yyy") + ) + ), + fileSearchResourcesJson1 + ) + } + + "serialize and deserialize file search's resources with (new) vector stores" in { + prettyTestCodec[AssistantToolResource]( + AssistantToolResource( + FileSearchResources( + vectorStoreIds = Nil, + vectorStores = Seq(VectorStore(Seq(FileId("file-id-1")), Map("key" -> "value"))) + ) + ), + fileSearchResourcesJson2 + ) + } "serialize and deserialize run tools" in { testCodec[RunTool]( @@ -356,9 +342,7 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers { testCodec[VectorStore]( vectorStore, """{ - | "file_ids" : [ { - | "file_id" : "file-123" - | } ], + | "file_ids" : [ "file-123" ], | "metadata" : { | "key" : "value" | } @@ -493,8 +477,6 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers { fineTuneJob.id shouldBe "xxx" fineTuneJob.model shouldBe "gpt-4o-2024-08-06" -// fineTuneJob.created_at.toString shouldBe "Tue Sep 10 17:52:12 CEST 2024" // TODO: - fineTuneJob.created_at.toString shouldBe "Tue Sep 10 15:52:12 UTC 2024" fineTuneJob.finished_at shouldBe None fineTuneJob.fine_tuned_model shouldBe None fineTuneJob.organization_id shouldBe "org-xxx" @@ -629,6 +611,15 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers { json2 shouldBe value } + private def prettyTestCodec[A]( + value: A, + json: String, + justSemantics: Boolean = false + )( + implicit format: Format[A] + ): Unit = + testCodec(value, json, Pretty, justSemantics) + private def testSerialization[A]( value: A, json: String, diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/JsonFormats.scala b/openai-core/src/main/scala/io/cequence/openaiscala/JsonFormats.scala index 1131a849..ce868d7f 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/JsonFormats.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/JsonFormats.scala @@ -260,13 +260,7 @@ object JsonFormats { case None => json.as[UserSeqMessage] } - case ChatRole.Tool => - json.as[ToolMessage] - // TODO: fixed.... originally was -// json.asOpt[AssistantToolMessage] match { -// case Some(assistantToolMessage) => assistantToolMessage -// case None => json.as[ToolMessage] -// } + case ChatRole.Tool => json.as[ToolMessage] case ChatRole.Assistant => // if contains tool_calls, then it is AssistantToolMessage @@ -650,7 +644,13 @@ object JsonFormats { implicit lazy val fileSearchResourcesReads : Reads[AssistantToolResource.FileSearchResources] = { implicit val config: JsonConfiguration = JsonConfiguration(JsonNaming.SnakeCase) - Json.reads[AssistantToolResource.FileSearchResources] + + ( + (__ \ "vector_store_ids").readNullable[Seq[String]].map(_.getOrElse(Seq.empty)) and + (__ \ "vector_stores") + .readNullable[Seq[AssistantToolResource.VectorStore]] + .map(_.getOrElse(Seq.empty)) + )(AssistantToolResource.FileSearchResources.apply _) } implicit lazy val assistantToolResourceReads: Reads[AssistantToolResource] = ( @@ -698,10 +698,10 @@ object JsonFormats { (__ \ "metadata").read[Map[String, String]].orElse(Reads.pure(Map())) )(Thread.apply _) -// implicit lazy val threadWrites: Writes[Thread] = Json.writes[Thread] - - implicit lazy val fileIdFormat: Format[FileId] = - Json.format[FileId] + implicit val fileIdFormat: Format[FileId] = Format( + Reads.StringReads.map(FileId.apply), + Writes[FileId](fileId => JsString(fileId.file_id)) + ) implicit lazy val threadMessageContentTypeFormat: Format[ThreadMessageContentType] = enumFormat[ThreadMessageContentType]( diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/domain/AssistantToolResource.scala b/openai-core/src/main/scala/io/cequence/openaiscala/domain/AssistantToolResource.scala index 456a9a7e..f0788199 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/domain/AssistantToolResource.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/domain/AssistantToolResource.scala @@ -7,6 +7,8 @@ final case class AssistantToolResource( object AssistantToolResource { + def empty: AssistantToolResource = AssistantToolResource(None, None) + def apply(): AssistantToolResource = AssistantToolResource(None, None) def apply(codeInterpreterResources: CodeInterpreterResources): AssistantToolResource = AssistantToolResource(Some(codeInterpreterResources), None) diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/domain/ThreadAndRun.scala b/openai-core/src/main/scala/io/cequence/openaiscala/domain/ThreadAndRun.scala index c9de6272..1f27dd8d 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/domain/ThreadAndRun.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/domain/ThreadAndRun.scala @@ -5,7 +5,7 @@ import io.cequence.openaiscala.domain.ThreadAndRun.Content.ContentBlock final case class ThreadAndRun( // TODO: check whether the message model is restrictive enough messages: Seq[ThreadAndRun.Message], - toolResources: Seq[AssistantToolResource], + toolResources: AssistantToolResource, metadata: Map[String, Any] ) diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala b/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala index 297a399b..40e3587d 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala @@ -1227,12 +1227,12 @@ trait OpenAIService extends OpenAICoreService { /** * Create a vector store. * - * @param file_ids + * @param fileIds * A list of File IDs that the vector store should use (optional). Useful for tools like * file_search that can access files. * @param name * The name of the vector store. - * @param expires_after + * @param metadata * The expiration policy for a vector store. TODO maximum of 64 characters long and values * can be a maximum of 512 characters long. * @return @@ -1244,7 +1244,7 @@ trait OpenAIService extends OpenAICoreService { def createVectorStore( fileIds: Seq[String] = Nil, name: Option[String] = None, - metadata: Map[String, Any] = Map() // TODO: expires after + metadata: Map[String, Any] = Map.empty // TODO: expires after ): Future[VectorStore] /** diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala index 8da2c72d..65fb8e79 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala @@ -1,6 +1,9 @@ package io.cequence.openaiscala.examples -import io.cequence.openaiscala.domain.settings.{ChatCompletionResponseFormatType, CreateChatCompletionSettings} +import io.cequence.openaiscala.domain.settings.{ + ChatCompletionResponseFormatType, + CreateChatCompletionSettings +} import io.cequence.openaiscala.domain._ import scala.concurrent.Future diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateThreadAndRun.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateThreadAndRun.scala index 19cd2bad..954de0a6 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateThreadAndRun.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateThreadAndRun.scala @@ -1,7 +1,11 @@ package io.cequence.openaiscala.examples +import io.cequence.openaiscala.domain.AssistantToolResource.{ + CodeInterpreterResources, + FileSearchResources +} import io.cequence.openaiscala.domain.ThreadAndRun.Message.{AssistantMessage, UserMessage} -import io.cequence.openaiscala.domain.ThreadAndRun +import io.cequence.openaiscala.domain.{AssistantToolResource, FileId, ThreadAndRun} import scala.concurrent.Future @@ -10,7 +14,7 @@ object CreateThreadAndRun extends Example { override protected def run: Future[Unit] = for { thread <- service.createThreadAndRun( - assistantId = "assistant-abc123", + assistantId = "asst_GEKjNc6lewoiulFt32mWSqKl", thread = Some( ThreadAndRun( messages = Seq( @@ -20,7 +24,58 @@ object CreateThreadAndRun extends Example { ), UserMessage("Could you please provide even simpler explanation?") ), - toolResources = Seq.empty, + toolResources = AssistantToolResource.empty, + metadata = Map.empty + ) + ), + stream = false + ) + +// Vector Store: CUSTOMER RELATIONSHIP AGREEMENT[vs_sRwpBFIFYyfWQ3og8X9CQs3A] (3 files) +// - file-y5Q8IgmBvQ547z7vi9PDOzZQ (vector_store.file) +// - file-9pb59EqrMCRpDxivmDQ6AxqW (vector_store.file) +// - file-DQQrxLykRzcA54rqMyyfygyV (vector_store.file) + + threadWithCodeInterpreter <- service.createThreadAndRun( + assistantId = "asst_GEKjNc6lewoiulFt32mWSqKl", + thread = Some( + ThreadAndRun( + messages = Seq( + UserMessage("Tell me about usage of FP in Cequence."), + AssistantMessage( + "Cequence does use functional programming." + ), + UserMessage("Could you please provide more comprehensive answer?") + ), + toolResources = AssistantToolResource( + CodeInterpreterResources(fileIds = + Seq( + FileId("file-y5Q8IgmBvQ547z7vi9PDOzZQ"), + FileId("file-9pb59EqrMCRpDxivmDQ6AxqW"), + FileId("file-DQQrxLykRzcA54rqMyyfygyV") + ) + ) + ), + metadata = Map.empty + ) + ), + stream = false + ) + + threadWithFileSearch <- service.createThreadAndRun( + assistantId = "asst_GEKjNc6lewoiulFt32mWSqKl", + thread = Some( + ThreadAndRun( + messages = Seq( + UserMessage("Tell me about usage of FP in Cequence."), + AssistantMessage( + "Cequence does use functional programming." + ), + UserMessage("Could you please provide more comprehensive answer?") + ), + toolResources = AssistantToolResource( + FileSearchResources(vectorStoreIds = Seq("vs_sRwpBFIFYyfWQ3og8X9CQs3A")) + ), metadata = Map.empty ) ), @@ -28,6 +83,8 @@ object CreateThreadAndRun extends Example { ) } yield { println(thread) + println(threadWithCodeInterpreter) + println(threadWithFileSearch) } } diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/ListVectorStoreFiles.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/ListVectorStoreFiles.scala index ec710184..f09da3aa 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/ListVectorStoreFiles.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/ListVectorStoreFiles.scala @@ -1,5 +1,7 @@ package io.cequence.openaiscala.examples +import io.cequence.openaiscala.domain.VectorStoreFile + import scala.concurrent.Future object ListVectorStoreFiles extends Example { @@ -9,14 +11,20 @@ object ListVectorStoreFiles extends Example { vectorStores <- service.listVectorStores() vectorStoreChunks = vectorStores.sliding(10, 10).toList - _ = vectorStoreChunks.map(_.map(x => (x.id, x.name))).foreach(println) - files <- Future.traverse(vectorStoreChunks) { vectorStoresChunk => + vsAndFiles <- Future.traverse(vectorStoreChunks) { vectorStoresChunk => Future.traverse(vectorStoresChunk) { vectorStore => - service.listVectorStoreFiles(vectorStore.id).map(file => (vectorStore.name, file)) + service + .listVectorStoreFiles(vectorStore.id) + .map((files: Seq[VectorStoreFile]) => (vectorStore, files)) } } } yield { - files.foreach(println) + vsAndFiles.flatten.foreach { case (vs, files) => + println(s"Vector Store: ${vs.name}[${vs.id}] (${files.length} files)") + files.foreach { file => + println(s" - ${file.id} (${file.`object`})") + } + } } } diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStore.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStore.scala index c8a82a6c..5c8f953a 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStore.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStore.scala @@ -7,7 +7,7 @@ object RetrieveVectorStore extends Example { override protected def run: Future[_] = for { assistant <- service.retrieveVectorStore( - vectorStoreId = "vs_xxx" + vectorStoreId = "vs_9pl9kTn3ggjzDKYX5AT9JuIG" ) } yield { println(assistant) diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStoreFile.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStoreFile.scala index 0edd5125..db195524 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStoreFile.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/RetrieveVectorStoreFile.scala @@ -9,7 +9,7 @@ object RetrieveVectorStoreFile extends Example { override protected def run: Future[_] = for { assistant <- service.retrieveVectorStoreFile( - vectorStoreId = "vs_xxx", + vectorStoreId = "vs_9pl9kTn3ggjzDKYX5AT9JuIG", fileId = FileId("vsf_xxx") ) } yield {