4040/******************************************************************************
4141 UNIFIED IRQ HANDLER WRAPPING
4242
43- All IRQ handlers are converted to generator-compatible objects at registration
44- time. This eliminates type checks in the hot dispatch path.
43+ For hard IRQ: All handlers are converted to generator-compatible objects at
44+ registration time. This eliminates type checks in the hot dispatch path.
4545
46- Handler types:
46+ Handler types for hard IRQ :
4747 - Real generators: Used as-is after priming (run to first yield)
4848 - Bytecode functions: Wrapped as mp_type_gen_instance with IRQ_FUNC_BC sentinel
4949 - Native functions: Wrapped as mp_type_gen_instance with IRQ_FUNC_NAT sentinel
5050 - Viper functions: Wrapped as mp_type_gen_instance with IRQ_VIPER sentinel
51+ - Other callables: Wrapped with IRQ_CALLABLE sentinel, calls via mp_call_function_1
52+
53+ For soft IRQ: Generators are instantiated and primed, but other callables are
54+ passed directly to mp_sched_schedule without wrapping.
5155
5256 The sentinel value in exc_sp_idx tells mp_obj_gen_resume_irq() how to handle it.
5357 ******************************************************************************/
5458
55- // Get pointer to extra data stored after the state[] + exc_stack[] arrays.
56- // This requires knowing n_state and n_exc_stack (stored in code_state).
57- static inline mp_irq_handler_extra_t * mp_irq_get_extra (mp_obj_gen_instance_t * gen , size_t n_exc_stack ) {
58- size_t state_size = gen -> code_state .n_state * sizeof (mp_obj_t );
59- size_t exc_size = n_exc_stack * sizeof (mp_exc_stack_t );
60- return (mp_irq_handler_extra_t * )((byte * )gen -> code_state .state + state_size + exc_size );
61- }
62-
6359// Wrap a bytecode function as a generator-compatible object for fast IRQ dispatch.
6460// Calls mp_setup_code_state once at wrap time to initialize everything.
6561static mp_obj_t mp_irq_wrap_bytecode_function (mp_obj_t func_in ) {
6662 mp_obj_fun_bc_t * fun = MP_OBJ_TO_PTR (func_in );
6763
6864 // Decode state requirements from bytecode prelude
6965 const uint8_t * ip = fun -> bytecode ;
70- size_t n_state , n_pos_args ;
66+ size_t n_state , n_pos_args , scope_flags ;
7167 {
72- size_t n_exc_stack , scope_flags , n_kwonly_args , n_def_args ;
68+ size_t n_exc_stack , n_kwonly_args , n_def_args ;
7369 MP_BC_PRELUDE_SIG_DECODE_INTO (ip , n_state , n_exc_stack , scope_flags , n_pos_args , n_kwonly_args , n_def_args );
7470 (void )n_exc_stack ;
75- (void )scope_flags ;
7671 (void )n_kwonly_args ;
7772 (void )n_def_args ;
7873 }
7974
75+ // Reject generator functions (those with yield) - they should be passed as gen_wrap
76+ if (scope_flags & MP_SCOPE_FLAG_GENERATOR ) {
77+ mp_raise_ValueError (MP_ERROR_TEXT ("use generator function, not plain function with yield" ));
78+ }
79+
8080 // Verify function signature: must accept exactly 1 positional arg
8181 if (n_pos_args != 1 ) {
8282 mp_raise_ValueError (MP_ERROR_TEXT ("IRQ callback must take exactly 1 argument" ));
@@ -120,11 +120,16 @@ static mp_obj_t mp_irq_wrap_bytecode_function(mp_obj_t func_in) {
120120static mp_obj_t mp_irq_wrap_native_function (mp_obj_t func_in ) {
121121 mp_obj_fun_bc_t * fun = MP_OBJ_TO_PTR (func_in );
122122
123- // Get prelude to determine state size
123+ // Get prelude to determine state size and validate signature
124124 const uint8_t * prelude_ptr = mp_obj_fun_native_get_prelude_ptr (fun );
125125 const uint8_t * ip = prelude_ptr ;
126126 MP_BC_PRELUDE_SIG_DECODE (ip );
127127
128+ // Verify function signature: must accept exactly 1 positional arg
129+ if (n_pos_args != 1 ) {
130+ mp_raise_ValueError (MP_ERROR_TEXT ("IRQ callback must take exactly 1 argument" ));
131+ }
132+
128133 // Native functions don't need exception stack
129134 size_t extra_size = sizeof (mp_irq_handler_extra_t );
130135 size_t total_var_size = n_state * sizeof (mp_obj_t ) + extra_size ;
@@ -173,8 +178,37 @@ static mp_obj_t mp_irq_wrap_viper_function(mp_obj_t func_in) {
173178 return MP_OBJ_FROM_PTR (o );
174179}
175180
181+ // Wrap a @viper function with argument count validation.
182+ // Viper functions use a special prelude format that must be parsed differently.
183+ static mp_obj_t mp_irq_wrap_viper_function_validated (mp_obj_t func_in ) {
184+ // Note: viper functions have different prelude structure - validation at call time
185+ // The n_pos_args check would require parsing viper-specific metadata which is complex.
186+ // Since viper is called with correct signature, argument errors will be caught at call time.
187+ return mp_irq_wrap_viper_function (func_in );
188+ }
176189#endif // MICROPY_EMIT_NATIVE
177190
191+ // Wrap any callable as a generator-compatible object for hard IRQ dispatch.
192+ // Uses mp_call_function_1 at dispatch time (slower than specialized wrappers but works
193+ // for any callable including bound methods, closures, etc).
194+ static mp_obj_t mp_irq_wrap_callable (mp_obj_t callable ) {
195+ // Minimal allocation: generator instance with space to store the callable
196+ // We store the callable in state[0], and use IRQ_CALLABLE sentinel
197+ size_t n_state = 2 ; // state[0] = callable, state[1] unused
198+ size_t total_var_size = n_state * sizeof (mp_obj_t );
199+
200+ mp_obj_gen_instance_t * o = mp_obj_malloc_var (mp_obj_gen_instance_t , code_state .state ,
201+ byte , total_var_size , & mp_type_gen_instance );
202+
203+ o -> pend_exc = mp_const_none ;
204+ o -> code_state .fun_bc = NULL ; // Not used for generic callable
205+ o -> code_state .n_state = n_state ;
206+ o -> code_state .exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_IRQ_CALLABLE ;
207+ o -> code_state .state [0 ] = callable ; // Store the actual callable here
208+
209+ return MP_OBJ_FROM_PTR (o );
210+ }
211+
178212/******************************************************************************
179213 DECLARE PUBLIC DATA
180214 ******************************************************************************/
@@ -203,7 +237,7 @@ void mp_irq_init(mp_irq_obj_t *self, const mp_irq_methods_t *methods, mp_obj_t p
203237 self -> ishard = false;
204238}
205239
206- mp_obj_t mp_irq_prepare_handler (mp_obj_t callback , mp_obj_t parent ) {
240+ mp_obj_t mp_irq_prepare_handler (mp_obj_t callback , mp_obj_t parent , bool ishard ) {
207241 // Auto-instantiate generator functions (bytecode or native).
208242 if (mp_obj_is_type (callback , & mp_type_gen_wrap )
209243 #if MICROPY_EMIT_NATIVE
@@ -223,19 +257,29 @@ mp_obj_t mp_irq_prepare_handler(mp_obj_t callback, mp_obj_t parent) {
223257 }
224258 mp_raise_ValueError (MP_ERROR_TEXT ("generator must yield" ));
225259 }
226- } else if (mp_obj_is_type (callback , & mp_type_fun_bc )) {
227- // Wrap bytecode functions as generator-compatible objects for fast IRQ dispatch.
228- callback = mp_irq_wrap_bytecode_function (callback );
229- #if MICROPY_EMIT_NATIVE
230- } else if (mp_obj_is_type (callback , & mp_type_fun_native )) {
231- // Wrap @native functions
232- callback = mp_irq_wrap_native_function (callback );
233- } else if (mp_obj_is_type (callback , & mp_type_fun_viper )) {
234- // Wrap @viper functions
235- callback = mp_irq_wrap_viper_function (callback );
236- #endif
237- } else if (callback != mp_const_none && !mp_obj_is_callable (callback )) {
238- mp_raise_ValueError (MP_ERROR_TEXT ("callback must be None, callable, or generator" ));
260+ // Generator is ready - no wrapping needed (already gen_instance)
261+ } else if (ishard ) {
262+ // Hard IRQ: wrap all callables as generator-compatible objects for unified dispatch
263+ if (mp_obj_is_type (callback , & mp_type_fun_bc )) {
264+ callback = mp_irq_wrap_bytecode_function (callback );
265+ #if MICROPY_EMIT_NATIVE
266+ } else if (mp_obj_is_type (callback , & mp_type_fun_native )) {
267+ callback = mp_irq_wrap_native_function (callback );
268+ } else if (mp_obj_is_type (callback , & mp_type_fun_viper )) {
269+ callback = mp_irq_wrap_viper_function_validated (callback );
270+ #endif
271+ } else if (callback != mp_const_none ) {
272+ // Generic callable (bound method, closure, etc) - wrap for hard IRQ dispatch
273+ if (!mp_obj_is_callable (callback )) {
274+ mp_raise_ValueError (MP_ERROR_TEXT ("callback must be None, callable, or generator" ));
275+ }
276+ callback = mp_irq_wrap_callable (callback );
277+ }
278+ } else {
279+ // Soft IRQ: don't wrap - mp_sched_schedule will call via mp_call_function_1
280+ if (callback != mp_const_none && !mp_obj_is_callable (callback )) {
281+ mp_raise_ValueError (MP_ERROR_TEXT ("callback must be None, callable, or generator" ));
282+ }
239283 }
240284
241285 return callback ;
@@ -270,7 +314,8 @@ int mp_irq_dispatch(mp_obj_t handler, mp_obj_t parent, bool ishard) {
270314 // Only signal error for real generators that have exhausted
271315 if (exc_idx != MP_CODE_STATE_EXC_SP_IDX_IRQ_FUNC_BC
272316 && exc_idx != MP_CODE_STATE_EXC_SP_IDX_IRQ_FUNC_NAT
273- && exc_idx != MP_CODE_STATE_EXC_SP_IDX_IRQ_VIPER ) {
317+ && exc_idx != MP_CODE_STATE_EXC_SP_IDX_IRQ_VIPER
318+ && exc_idx != MP_CODE_STATE_EXC_SP_IDX_IRQ_CALLABLE ) {
274319 result = -1 ; // Generator exhausted
275320 }
276321 } else if (ret == MP_VM_RETURN_EXCEPTION ) {
0 commit comments