completeStream method
Sends a completion request and returns a stream of chunks.
Implementation
@override
Stream<AIStreamChunk> completeStream(
List<AIMessage> messages, {
int? maxTokens,
double? temperature,
List<AITool>? tools,
}) async* {
final body = _buildRequestBody(messages, maxTokens, temperature,
stream: true, tools: tools);
final request =
http.Request('POST', Uri.parse('$_baseUrl/chat/completions'));
request.headers.addAll(_headers());
request.body = jsonEncode(body);
final streamedResponse = await _httpClient.send(request);
if (streamedResponse.statusCode != 200) {
final responseBody = await streamedResponse.stream.bytesToString();
final json = jsonDecode(responseBody) as Map<String, dynamic>;
throw _parseError(streamedResponse.statusCode, json);
}
await for (final chunk in streamedResponse.stream
.transform(utf8.decoder)
.transform(const LineSplitter())) {
if (chunk.isEmpty || chunk == 'data: [DONE]') continue;
if (!chunk.startsWith('data: ')) continue;
final jsonStr = chunk.substring(6);
try {
final json = jsonDecode(jsonStr) as Map<String, dynamic>;
final choices = json['choices'] as List;
if (choices.isEmpty) continue;
final delta = (choices.first as Map<String, dynamic>)['delta']
as Map<String, dynamic>?;
final finishReason =
(choices.first as Map<String, dynamic>)['finish_reason'];
final content = delta?['content'] as String? ?? '';
if (content.isNotEmpty || finishReason != null) {
yield AIStreamChunk(
text: content,
isComplete: finishReason != null,
finishReason: finishReason as String?,
provider: name,
model: json['model'] as String?,
);
}
} on FormatException catch (_) {
// Skip malformed SSE chunks (e.g. partial JSON)
continue;
}
}
}