Skip to content

Commit 03164f9

Browse files
committed
Add onCancelCallback, onFailureCallback, cancelMessage, errorMessage
1 parent b1fbc7c commit 03164f9

File tree

3 files changed

+156
-12
lines changed

3 files changed

+156
-12
lines changed
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
// Copyright 2024 The Flutter Authors. All rights reserved.
2+
// Use of this source code is governed by a BSD-style license that can be
3+
// found in the LICENSE file.
4+
5+
import 'package:flutter/material.dart';
6+
import 'package:flutter_ai_toolkit/flutter_ai_toolkit.dart';
7+
import 'package:google_generative_ai/google_generative_ai.dart';
8+
9+
import '../gemini_api_key.dart';
10+
11+
void main() => runApp(const App());
12+
13+
class App extends StatelessWidget {
14+
static const title = 'Example: Welcome Message';
15+
16+
const App({super.key});
17+
18+
@override
19+
Widget build(BuildContext context) => const MaterialApp(
20+
title: title,
21+
home: ChatPage(),
22+
);
23+
}
24+
25+
class ChatPage extends StatelessWidget {
26+
const ChatPage({super.key});
27+
28+
void _onCancel(BuildContext context) {
29+
ScaffoldMessenger.of(context).showSnackBar(
30+
const SnackBar(content: Text('Chat cancelled')),
31+
);
32+
}
33+
34+
@override
35+
Widget build(BuildContext context) => Scaffold(
36+
appBar: AppBar(title: const Text(App.title)),
37+
body: LlmChatView(
38+
onCancelCallback: _onCancel,
39+
cancelMessage: 'Request cancelled',
40+
provider: GeminiProvider(
41+
model: GenerativeModel(
42+
model: 'gemini-1.5-flash',
43+
apiKey: geminiApiKey,
44+
),
45+
),
46+
),
47+
);
48+
}
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
// Copyright 2024 The Flutter Authors. All rights reserved.
2+
// Use of this source code is governed by a BSD-style license that can be
3+
// found in the LICENSE file.
4+
5+
import 'package:flutter/material.dart';
6+
import 'package:flutter_ai_toolkit/flutter_ai_toolkit.dart';
7+
import 'package:google_generative_ai/google_generative_ai.dart';
8+
9+
import '../gemini_api_key.dart';
10+
11+
void main() => runApp(const App());
12+
13+
class App extends StatelessWidget {
14+
static const title = 'Example: Welcome Message';
15+
16+
const App({super.key});
17+
18+
@override
19+
Widget build(BuildContext context) => const MaterialApp(
20+
title: title,
21+
home: ChatPage(),
22+
);
23+
}
24+
25+
class ChatPage extends StatelessWidget {
26+
const ChatPage({super.key});
27+
28+
void _onError(BuildContext context, LlmException error) {
29+
ScaffoldMessenger.of(context).showSnackBar(
30+
SnackBar(content: Text('Error: ${error.message}')),
31+
);
32+
}
33+
34+
@override
35+
Widget build(BuildContext context) => Scaffold(
36+
appBar: AppBar(title: const Text(App.title)),
37+
body: LlmChatView(
38+
onErrorCallback: _onError,
39+
errorMessage: 'An error occurred',
40+
provider: GeminiProvider(
41+
model: GenerativeModel(
42+
model: 'gemini-1.5-flash',
43+
apiKey: geminiApiKey,
44+
),
45+
),
46+
),
47+
);
48+
}

lib/src/views/llm_chat_view/llm_chat_view.dart

Lines changed: 60 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,27 @@ class LlmChatView extends StatefulWidget {
6666
/// when the chat history is empty. Defaults to an empty list.
6767
/// - [welcomeMessage]: Optional. A welcome message to display when the chat
6868
/// is first opened.
69+
/// - [onCancelCallback]: Optional. The action to perform when the user
70+
/// cancels a chat operation. By default, a snackbar is displayed with the
71+
/// canceled message.
72+
/// - [onErrorCallback]: Optional. The action to perform when an
73+
/// error occurs during a chat operation. By default, an alert dialog is
74+
/// displayed with the error message.
75+
/// - [cancelMessage]: Optional. The message to display when the user cancels
76+
/// a chat operation. Defaults to 'CANCEL'.
77+
/// - [errorMessage]: Optional. The message to display when an error occurs
78+
/// during a chat operation. Defaults to 'ERROR'.
6979
LlmChatView({
7080
required LlmProvider provider,
7181
LlmChatViewStyle? style,
7282
ResponseBuilder? responseBuilder,
7383
LlmStreamGenerator? messageSender,
7484
this.suggestions = const [],
7585
String? welcomeMessage,
86+
this.onCancelCallback,
87+
this.onErrorCallback,
88+
this.cancelMessage = 'CANCEL',
89+
this.errorMessage = 'ERROR',
7690
super.key,
7791
}) : viewModel = ChatViewModel(
7892
provider: provider,
@@ -96,6 +110,27 @@ class LlmChatView extends StatefulWidget {
96110
/// It encapsulates the core data and functionality needed for the chat view.
97111
late final ChatViewModel viewModel;
98112

113+
/// The action to perform when the user cancels a chat operation.
114+
///
115+
/// By default, a snackbar is displayed with the canceled message.
116+
final void Function(BuildContext context)? onCancelCallback;
117+
118+
/// The action to perform when an error occurs during a chat operation.
119+
///
120+
/// By default, an alert dialog is displayed with the error message.
121+
final void Function(BuildContext context, LlmException error)?
122+
onErrorCallback;
123+
124+
/// The text message to display when the user cancels a chat operation.
125+
///
126+
/// Defaults to 'CANCEL'.
127+
final String cancelMessage;
128+
129+
/// The text message to display when an error occurs during a chat operation.
130+
///
131+
/// Defaults to 'ERROR'.
132+
final String errorMessage;
133+
99134
@override
100135
State<LlmChatView> createState() => _LlmChatViewState();
101136
}
@@ -281,24 +316,37 @@ class _LlmChatViewState extends State<LlmChatView>
281316
// empty LLM message.
282317
final llmMessage = widget.viewModel.provider.history.last;
283318
if (llmMessage.text == null) {
284-
llmMessage.append(error is LlmCancelException ? 'CANCEL' : 'ERROR');
319+
llmMessage.append(
320+
error is LlmCancelException
321+
? widget.cancelMessage
322+
: widget.errorMessage,
323+
);
285324
}
286325

287326
switch (error) {
288327
case LlmCancelException():
289-
AdaptiveSnackBar.show(context, 'LLM operation canceled by user');
328+
if (widget.onCancelCallback != null) {
329+
widget.onCancelCallback!(context);
330+
} else {
331+
AdaptiveSnackBar.show(context, 'LLM operation canceled by user');
332+
}
333+
break;
290334
case LlmFailureException():
291335
case LlmException():
292-
await AdaptiveAlertDialog.show(
293-
context: context,
294-
content: Text(error.toString()),
295-
actions: [
296-
AdaptiveDialogAction(
297-
onPressed: () => Navigator.pop(context),
298-
child: const Text('OK'),
299-
),
300-
],
301-
);
336+
if (widget.onErrorCallback != null) {
337+
widget.onErrorCallback!(context, error);
338+
} else {
339+
await AdaptiveAlertDialog.show(
340+
context: context,
341+
content: Text(error.toString()),
342+
actions: [
343+
AdaptiveDialogAction(
344+
onPressed: () => Navigator.pop(context),
345+
child: const Text('OK'),
346+
),
347+
],
348+
);
349+
}
302350
}
303351
}
304352

0 commit comments

Comments
 (0)