[ Edit ] testing changes for chat completion stream, addition of completers wiating for streaming tests so testing process will not got further while he stream methods are running in parallel

This commit is contained in:
Anas Fikhi
2024-02-21 23:27:06 +01:00
parent 6ffb1166b9
commit 7c4712a93a
4 changed files with 42 additions and 13 deletions

View File

@ -60,8 +60,8 @@ Stream<ItemBodyCompletion> itemBodyCompletionStream(
bodyCompletion.listen((event) {
final content = event.choices[0].delta.content;
return content != null && content.first.text != null
? stream.add(ItemBodyCompletion(body: content.first.text!))
return content != null && content.first?.text != null
? stream.add(ItemBodyCompletion(body: content.first!.text!))
: () {};
}, onDone: () {
stream.close();

View File

@ -19,6 +19,6 @@ Future<void> main() async {
);
chatStream.listen((event) {
print(event.choices.first.delta.content?.map((e) => e.toMap()));
print(event.choices.first.delta.content?.map((e) => e?.toMap()));
});
}

View File

@ -10,7 +10,7 @@ final class OpenAIStreamChatCompletionChoiceDeltaModel {
final OpenAIChatMessageRole? role;
/// The [content] of the message.
final List<OpenAIChatCompletionChoiceMessageContentItemModel>? content;
final List<OpenAIChatCompletionChoiceMessageContentItemModel?>? content;
/// The [toolCalls] of the message.
final List<OpenAIResponseToolCall>? toolCalls;

View File

@ -1,5 +1,6 @@
// ignore_for_file: avoid-passing-async-when-sync-expected
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:developer' as dev;
@ -185,10 +186,9 @@ void main() async {
expect(completion.choices.first.text, isA<String?>());
});
test('create with a stream', () {
final Stream<OpenAIStreamCompletionModel> completion =
test('create with a stream', () async {
final Stream<OpenAIStreamCompletionModel> completionStream =
OpenAI.instance.completion.createStream(
// in case the previous test didn't run, we will use a default model id.
model: "davinci-002",
prompt: "Dart tests are made to ensure that a function w",
maxTokens: 5,
@ -199,7 +199,22 @@ void main() async {
bestOf: 1,
n: 1,
);
expect(completion, isA<Stream<OpenAIStreamCompletionModel>>());
final completer = Completer<bool>();
expect(completionStream, isA<Stream<OpenAIStreamCompletionModel>>());
completionStream.listen(
(event) {
var val = event.choices.first.text;
expect(val, isA<String>());
},
onDone: () {
completer.complete(true);
},
);
await completer.future;
});
});
@ -313,7 +328,9 @@ void main() async {
},
);
test('create with a stream', () {
test('create with a stream', () async {
OpenAI.apiKey = "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";
final chatStream = OpenAI.instance.chat.createStream(
model: "gpt-3.5-turbo",
messages: [
@ -327,11 +344,23 @@ void main() async {
),
],
);
final completer = Completer<bool>();
expect(chatStream, isA<Stream<OpenAIStreamChatCompletionModel>>());
chatStream.listen((streamEvent) {
expect(streamEvent, isA<OpenAIStreamChatCompletionModel>());
expect(streamEvent.choices.first.delta.content, isA<String?>());
});
chatStream.listen(
(streamEvent) {
expect(streamEvent, isA<OpenAIStreamChatCompletionModel>());
expect(streamEvent.choices.first.delta.content,
isA<List<OpenAIChatCompletionChoiceMessageContentItemModel>?>());
expect(streamEvent.choices.first.delta.content?.first?.text,
isA<String?>());
},
onDone: () {
completer.complete(true);
},
);
await completer.future;
});
});