diff --git a/.vscode/settings.json b/.vscode/settings.json index 1716d01..2e4e115 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,4 @@ { - "cSpell.words": ["Epoches", "openai"], + "cSpell.words": ["Epoches", "openai", "Probs"], "editor.acceptSuggestionOnEnter": "off" } diff --git a/example/lib/chat_completion_with_log_probs_example.dart b/example/lib/chat_completion_with_log_probs_example.dart new file mode 100644 index 0000000..c38251b --- /dev/null +++ b/example/lib/chat_completion_with_log_probs_example.dart @@ -0,0 +1,45 @@ +import 'package:dart_openai/dart_openai.dart'; + +import 'env/env.dart'; + +void main() async { + // Set the OpenAI API key from the .env file. + OpenAI.apiKey = Env.apiKey; + + final systemMessage = OpenAIChatCompletionChoiceMessageModel( + content: [ + OpenAIChatCompletionChoiceMessageContentItemModel.text( + "return any message you are given as JSON.", + ), + ], + role: OpenAIChatMessageRole.assistant, + ); + + final userMessage = OpenAIChatCompletionChoiceMessageModel( + content: [ + OpenAIChatCompletionChoiceMessageContentItemModel.text( + "Hello, I am a chatbot created by OpenAI. How are you today?", + ), + ], + role: OpenAIChatMessageRole.user, + name: "anas", + ); + + final requestMessages = [ + systemMessage, + userMessage, + ]; + + OpenAIChatCompletionModel chatCompletion = await OpenAI.instance.chat.create( + model: "gpt-3.5-turbo-1106", + responseFormat: {"type": "json_object"}, + seed: 6, + messages: requestMessages, + temperature: 0.2, + maxTokens: 500, + logprobs: true, + topLogprobs: 2, + ); + + print(chatCompletion.choices.first.logprobs?.content.first.bytes); // +} diff --git a/lib/src/core/models/chat/sub_models/choices/choices.dart b/lib/src/core/models/chat/sub_models/choices/choices.dart index b47521b..c547daf 100644 --- a/lib/src/core/models/chat/sub_models/choices/choices.dart +++ b/lib/src/core/models/chat/sub_models/choices/choices.dart @@ -1,3 +1,4 @@ +import 'sub_models/log_probs/log_probs.dart'; import 'sub_models/message.dart'; /// {@template openai_chat_completion_choice} @@ -15,6 +16,9 @@ final class OpenAIChatCompletionChoiceModel { /// The [finishReason] of the choice. final String? finishReason; + /// The log probability of the choice. + final OpenAIChatCompletionChoiceLogProbsModel? logprobs; + /// Weither the choice have a finish reason. bool get haveFinishReason => finishReason != null; @@ -28,6 +32,7 @@ final class OpenAIChatCompletionChoiceModel { required this.index, required this.message, required this.finishReason, + required this.logprobs, }); /// This is used to convert a [Map] object to a [OpenAIChatCompletionChoiceModel] object. @@ -39,6 +44,9 @@ final class OpenAIChatCompletionChoiceModel { : int.tryParse(json['index'].toString()) ?? json['index'], message: OpenAIChatCompletionChoiceMessageModel.fromMap(json['message']), finishReason: json['finish_reason'], + logprobs: json['logprobs'] != null + ? OpenAIChatCompletionChoiceLogProbsModel.fromMap(json['logprobs']) + : null, ); } @@ -48,6 +56,7 @@ final class OpenAIChatCompletionChoiceModel { "index": index, "message": message.toMap(), "finish_reason": finishReason, + "logprobs": logprobs?.toMap(), }; } diff --git a/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/log_probs.dart b/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/log_probs.dart new file mode 100644 index 0000000..5ce90cf --- /dev/null +++ b/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/log_probs.dart @@ -0,0 +1,31 @@ +// ignore_for_file: public_member_api_docs, sort_constructors_first +import 'sub_models/content.dart'; + +class OpenAIChatCompletionChoiceLogProbsModel { + OpenAIChatCompletionChoiceLogProbsModel({ + required this.content, + }); + + final List content; + + factory OpenAIChatCompletionChoiceLogProbsModel.fromMap( + Map json, + ) { + return OpenAIChatCompletionChoiceLogProbsModel( + content: json["content"] != null + ? List.from( + json["content"].map( + (x) => + OpenAIChatCompletionChoiceLogProbsContentModel.fromMap(x), + ), + ) + : [], + ); + } + + Map toMap() { + return { + "content": content.map((x) => x.toMap()).toList(), + }; + } +} diff --git a/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/sub_models/content.dart b/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/sub_models/content.dart new file mode 100644 index 0000000..6751903 --- /dev/null +++ b/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/sub_models/content.dart @@ -0,0 +1,41 @@ +import 'top_prob.dart'; + +class OpenAIChatCompletionChoiceLogProbsContentModel { + final String? token; + + final double? logprob; + + final List? bytes; + + final List? topLogprobs; + + OpenAIChatCompletionChoiceLogProbsContentModel({ + this.token, + this.logprob, + this.bytes, + this.topLogprobs, + }); + + factory OpenAIChatCompletionChoiceLogProbsContentModel.fromMap( + Map map, + ) { + return OpenAIChatCompletionChoiceLogProbsContentModel( + token: map['token'], + logprob: map['logprob'], + bytes: List.from(map['bytes']), + topLogprobs: List.from( + map['top_logprobs']?.map( + (x) => OpenAIChatCompletionChoiceTopLogProbsContentModel.fromMap(x), + ), + ), + ); + } + + Map toMap() { + return { + 'token': token, + 'logprob': logprob, + 'bytes': bytes, + }; + } +} diff --git a/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/sub_models/top_prob.dart b/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/sub_models/top_prob.dart new file mode 100644 index 0000000..78e7d1b --- /dev/null +++ b/lib/src/core/models/chat/sub_models/choices/sub_models/log_probs/sub_models/top_prob.dart @@ -0,0 +1,28 @@ +import 'content.dart'; + +class OpenAIChatCompletionChoiceTopLogProbsContentModel + extends OpenAIChatCompletionChoiceLogProbsContentModel { + OpenAIChatCompletionChoiceTopLogProbsContentModel({ + super.token, + super.logprob, + super.bytes, + }); + + factory OpenAIChatCompletionChoiceTopLogProbsContentModel.fromMap( + Map map, + ) { + return OpenAIChatCompletionChoiceTopLogProbsContentModel( + token: map['token'], + logprob: map['logprob'], + bytes: List.from(map['bytes']), + ); + } + + Map toMap() { + return { + 'token': token, + 'logprob': logprob, + 'bytes': bytes, + }; + } +} diff --git a/lib/src/instance/chat/chat.dart b/lib/src/instance/chat/chat.dart index 5ec58c7..d9504e7 100644 --- a/lib/src/instance/chat/chat.dart +++ b/lib/src/instance/chat/chat.dart @@ -82,6 +82,8 @@ interface class OpenAIChat implements OpenAIChatBase { String? user, Map? responseFormat, int? seed, + bool? logprobs, + int? topLogprobs, http.Client? client, }) async { return await OpenAINetworkingClient.post( @@ -103,6 +105,8 @@ interface class OpenAIChat implements OpenAIChatBase { if (user != null) "user": user, if (seed != null) "seed": seed, if (responseFormat != null) "response_format": responseFormat, + if (logprobs != null) "logprobs": logprobs, + if (topLogprobs != null) "top_logprobs": topLogprobs, }, onSuccess: (Map response) { return OpenAIChatCompletionModel.fromMap(response);