A package for interfacing with Julia's compiler caching infrastructure for the purpose
of building custom compilers. It extends the existing InternalCodeCache type with
auxiliary functionality.
using Pkg
Pkg.add(url="path/to/CompilerCaching")Julia's code caches are indexed with method instances, yielding a code instance that keeps track of compilation results. Code instances are owned by a specific compiler, identified by an owner token, and they contain a cache of results specific to that compiler.
The basic usage pattern of working with the compiler cache through CompilerCaching.jl:
- Define a mutable struct with a zero-arg constructor to hold compilation results
- Create a
CacheView{V}(owner_token, world)whereVis your results struct type - Use the cache's
Dictinterface to get or create a code instance for a method instance - Access cached compilation results via
results(cache, ci), populating them if needed
using CompilerCaching
# Define your results struct
mutable struct MyResults
executable::Any
MyResults() = new(nothing)
end
# Compile a method instance
function compile!(cache, mi)
# Get or create code instance
ci = get!(cache, mi) do
create_ci(cache, mi)
end
# Check for cache hit
res = results(cache, ci)
res.executable !== nothing && return res.executable
# Generate an executable.
# Use multiple steps (e.g. IR generation, machine code generation, linking) if needed.
if res.executable === nothing
res.executable = emit_executable(cache, mi, res.code)
end
return res.executable
end
function call(f, args...)
tt = map(Core.Typeof, args)
world = Base.get_world_counter()
mi = @something(method_instance(f, tt; world, method_table),
throw(MethodError(f, args)))
cache = CacheView{MyResults}(:MyCompiler, world)
exe = compile!(cache, mi)
ccall(exe, ...)
endThe create_ci function creates a bare code instance with (initially empty)
compilation results. Most users will want to rely on Julia's type inference
to instead populate the cache with a code instance that knows about dependent
methods for invalidation purposes, and contains inferred source code for further.
compilation This can be done with a custom abstract interpreter and the typeinf!
function from this package:
# Set-up a custom interpreter, and link it to the cache
struct CustomInterpreter <: CC.AbstractInterpreter
cache::CacheView
...
end
@setup_caching CustomInterpreter.cache
function compile!(cache, mi)
# Get CI through inference
ci = get(cache, mi, nothing)
if ci === nothing
interp = CustomInterpreter(cache)
CompilerCaching.typeinf!(cache, interp, mi)
ci = get(cache, mi)
end
# ... further compilation steps
endThe @setup_caching macro defines the necessary methods to connect the interpreter
to the cache:
CC.cache_owner(interp)returning the cache's owner tokenCC.finish!(interp, caller, ...)that stacks a newV()instance in analysis results
It is possible to partition the cache by additional parameters by using a tuple or named tuple as the owner key type:
function call(f, args...; opt_level=1)
# ...
cache = CacheView{MyResults}((:MyCompiler, opt_level), world)
# ...
endDifferent calls with the same owner key will hit the same cache partition.
It is often useful to redefine existing methods for use with the custom compiler. This can be accomplished using overlay methods in a custom method table:
Base.Experimental.@MethodTable method_table
Base.Experimental.@overlay method_table function Base.sin(x::Int)
# custom implementation
end
# Expose the method table to the interpreter
struct CustomInterpreter <: CC.AbstractInterpreter
cache::CacheView
...
end
CC.method_table(interp::CustomInterpreter) = CC.OverlayMethodTable(interp.cache.world, method_table)
function call(f, args...)
tt = map(Core.Typeof, args)
world = Base.get_world_counter()
mi = @something(method_instance(f, tt; world, method_table),
# if needed, look for global methods too
throw(MethodError(f, args)))
# ...
endIf multiple overlay tables are needed, they can be stacked using StackedMethodTable:
MyMethodTableStack(world) = StackedMethodTable(world, overlay_table, base_table)
struct CustomInterpreter <: CC.AbstractInterpreter
world::UInt
end
CC.method_table(interp::CustomInterpreter) = MyMethodTableStack(interp.world)For compilers that define their own IR format that Julia doesn't know about, we cannot rely
on inference to populate the cache, so we need to bring our own code instances using
create_ci:
Base.Experimental.@MethodTable method_table
# Results struct for foreign IR
mutable struct ForeignResults
ir::Any
ForeignResults() = new(nothing)
end
# Only define our special functions in the overlay method table,
# providing our custom IR as the source.
function really_special end
add_method(method_table, really_special, (Int,), MyCustomIR([:a, :b]))
# Compile function using get! do-block pattern
function compile!(cache, mi)
ci = get!(cache, mi) do
source = mi.def.source::MyCustomIR
ir = infer(source)
deps = Core.MethodInstance[]
for callee in ir.callees
callee_mi = method_instance(callee.f, callee.tt; world=cache.world, method_table)
compile!(cache, callee_mi) # recursive compilation
push!(deps, callee_mi)
end
ci = create_ci(cache, mi; deps)
results(cache, ci).ir = ir # cache the inferred IR if needed
return ci
end
# ...
end
function call(f, args...)
tt = Tuple{map(Core.Typeof, args)...}
world = get_world_counter()
mi = @something(method_instance(f, tt; world, method_table),
throw(MethodError(f, args)))
cache = CacheView{ForeignResults}(:MyCompiler, world)
exe = compile!(cache, mi)
ccall(exe, ...)
end