@@ -213,6 +213,25 @@ Core.Compiler.code_cache(interp::GPUInterpreter) = WorldView(interp.global_cache
213213Core. Compiler. lock_mi_inference (interp:: GPUInterpreter , mi:: MethodInstance ) = nothing
214214Core. Compiler. unlock_mi_inference (interp:: GPUInterpreter , mi:: MethodInstance ) = nothing
215215
216+ import Core. Compiler: retrieve_code_info, validate_code_in_debug_mode, InferenceState
217+ # Replace usage sites of `retrieve_code_info`, OptimizationState is one such, but in all interesting use-cases
218+ # it is derived from an InferenceState. There is a third one in `typeinf_ext` in case the module forbids inference.
219+ function InferenceState (result:: InferenceResult , cached:: Symbol , interp:: GPUInterpreter )
220+ src = retrieve_code_info (result. linfo)
221+ src === nothing && return nothing
222+ validate_code_in_debug_mode (result. linfo, src, " lowered" )
223+ src = transform (interp, result. linfo, src)
224+ validate_code_in_debug_mode (result. linfo, src, " transformed" )
225+ return InferenceState (result, src, cached, interp)
226+ end
227+
228+ function transform (interp, mi, src)
229+ src = copy (src)
230+ early_transform! (mi, src)
231+ return src
232+ end
233+
234+
216235function Core. Compiler. add_remark! (interp:: GPUInterpreter , sv:: InferenceState , msg)
217236 @safe_debug " Inference remark during GPU compilation of $(sv. linfo) : $msg "
218237end
0 commit comments