| Line | Exclusive | Inclusive | Code |
|---|---|---|---|
| 1 | # This file is a part of Julia. License is MIT: https://julialang.org/license | ||
| 2 | |||
| 3 | # Base.require is the implementation for the `import` statement | ||
| 4 | const require_lock = ReentrantLock() | ||
| 5 | |||
| 6 | # Cross-platform case-sensitive path canonicalization | ||
| 7 | |||
| 8 | if Sys.isunix() && !Sys.isapple() | ||
| 9 | # assume case-sensitive filesystems, don't have to do anything | ||
| 10 | isfile_casesensitive(path) = isaccessiblefile(path) | ||
| 11 | elseif Sys.iswindows() | ||
| 12 | # GetLongPathName Win32 function returns the case-preserved filename on NTFS. | ||
| 13 | function isfile_casesensitive(path) | ||
| 14 | isaccessiblefile(path) || return false # Fail fast | ||
| 15 | basename(Filesystem.longpath(path)) == basename(path) | ||
| 16 | end | ||
| 17 | elseif Sys.isapple() | ||
| 18 | # HFS+ filesystem is case-preserving. The getattrlist API returns | ||
| 19 | # a case-preserved filename. In the rare event that HFS+ is operating | ||
| 20 | # in case-sensitive mode, this will still work but will be redundant. | ||
| 21 | |||
| 22 | # Constants from <sys/attr.h> | ||
| 23 | const ATRATTR_BIT_MAP_COUNT = 5 | ||
| 24 | const ATTR_CMN_NAME = 1 | ||
| 25 | const BITMAPCOUNT = 1 | ||
| 26 | const COMMONATTR = 5 | ||
| 27 | const FSOPT_NOFOLLOW = 1 # Don't follow symbolic links | ||
| 28 | |||
| 29 | const attr_list = zeros(UInt8, 24) | ||
| 30 | attr_list[BITMAPCOUNT] = ATRATTR_BIT_MAP_COUNT | ||
| 31 | attr_list[COMMONATTR] = ATTR_CMN_NAME | ||
| 32 | |||
| 33 | # This essentially corresponds to the following C code: | ||
| 34 | # attrlist attr_list; | ||
| 35 | # memset(&attr_list, 0, sizeof(attr_list)); | ||
| 36 | # attr_list.bitmapcount = ATTR_BIT_MAP_COUNT; | ||
| 37 | # attr_list.commonattr = ATTR_CMN_NAME; | ||
| 38 | # struct Buffer { | ||
| 39 | # u_int32_t total_length; | ||
| 40 | # u_int32_t filename_offset; | ||
| 41 | # u_int32_t filename_length; | ||
| 42 | # char filename[max_filename_length]; | ||
| 43 | # }; | ||
| 44 | # Buffer buf; | ||
| 45 | # getattrpath(path, &attr_list, &buf, sizeof(buf), FSOPT_NOFOLLOW); | ||
| 46 | function isfile_casesensitive(path) | ||
| 47 | isaccessiblefile(path) || return false | ||
| 48 | path_basename = String(basename(path)) | ||
| 49 | local casepreserved_basename | ||
| 50 | header_size = 12 | ||
| 51 | buf = Vector{UInt8}(undef, length(path_basename) + header_size + 1) | ||
| 52 | while true | ||
| 53 | ret = ccall(:getattrlist, Cint, | ||
| 54 | (Cstring, Ptr{Cvoid}, Ptr{Cvoid}, Csize_t, Culong), | ||
| 55 | path, attr_list, buf, sizeof(buf), FSOPT_NOFOLLOW) | ||
| 56 | systemerror(:getattrlist, ret ≠ 0) | ||
| 57 | filename_length = GC.@preserve buf unsafe_load( | ||
| 58 | convert(Ptr{UInt32}, pointer(buf) + 8)) | ||
| 59 | if (filename_length + header_size) > length(buf) | ||
| 60 | resize!(buf, filename_length + header_size) | ||
| 61 | continue | ||
| 62 | end | ||
| 63 | casepreserved_basename = | ||
| 64 | view(buf, (header_size+1):(header_size+filename_length-1)) | ||
| 65 | break | ||
| 66 | end | ||
| 67 | # Hack to compensate for inability to create a string from a subarray with no allocations. | ||
| 68 | codeunits(path_basename) == casepreserved_basename && return true | ||
| 69 | |||
| 70 | # If there is no match, it's possible that the file does exist but HFS+ | ||
| 71 | # performed unicode normalization. See https://developer.apple.com/library/mac/qa/qa1235/_index.html. | ||
| 72 | isascii(path_basename) && return false | ||
| 73 | codeunits(Unicode.normalize(path_basename, :NFD)) == casepreserved_basename | ||
| 74 | end | ||
| 75 | else | ||
| 76 | # Generic fallback that performs a slow directory listing. | ||
| 77 | function isfile_casesensitive(path) | ||
| 78 | isaccessiblefile(path) || return false | ||
| 79 | dir, filename = splitdir(path) | ||
| 80 | any(readdir(dir) .== filename) | ||
| 81 | end | ||
| 82 | end | ||
| 83 | |||
| 84 | # Check if the file is accessible. If stat fails return `false` | ||
| 85 | |||
| 86 | function isaccessibledir(dir) | ||
| 87 | return try | ||
| 88 | isdir(dir) | ||
| 89 | catch err | ||
| 90 | err isa IOError || rethrow() | ||
| 91 | false | ||
| 92 | end | ||
| 93 | end | ||
| 94 | |||
| 95 | function isaccessiblefile(file) | ||
| 96 | return try | ||
| 97 | isfile(file) | ||
| 98 | catch err | ||
| 99 | err isa IOError || rethrow() | ||
| 100 | false | ||
| 101 | end | ||
| 102 | end | ||
| 103 | |||
| 104 | function isaccessiblepath(path) | ||
| 105 | return try | ||
| 106 | ispath(path) | ||
| 107 | catch err | ||
| 108 | err isa IOError || rethrow() | ||
| 109 | false | ||
| 110 | end | ||
| 111 | end | ||
| 112 | |||
| 113 | ## SHA1 ## | ||
| 114 | |||
| 115 | struct SHA1 | ||
| 116 | bytes::NTuple{20, UInt8} | ||
| 117 | end | ||
| 118 | function SHA1(bytes::Vector{UInt8}) | ||
| 119 | length(bytes) == 20 || | ||
| 120 | throw(ArgumentError("wrong number of bytes for SHA1 hash: $(length(bytes))")) | ||
| 121 | return SHA1(ntuple(i->bytes[i], Val(20))) | ||
| 122 | end | ||
| 123 | SHA1(s::AbstractString) = SHA1(hex2bytes(s)) | ||
| 124 | parse(::Type{SHA1}, s::AbstractString) = SHA1(s) | ||
| 125 | function tryparse(::Type{SHA1}, s::AbstractString) | ||
| 126 | try | ||
| 127 | return parse(SHA1, s) | ||
| 128 | catch e | ||
| 129 | if isa(e, ArgumentError) | ||
| 130 | return nothing | ||
| 131 | end | ||
| 132 | rethrow(e) | ||
| 133 | end | ||
| 134 | end | ||
| 135 | |||
| 136 | string(hash::SHA1) = bytes2hex(hash.bytes) | ||
| 137 | print(io::IO, hash::SHA1) = bytes2hex(io, hash.bytes) | ||
| 138 | show(io::IO, hash::SHA1) = print(io, "SHA1(\"", hash, "\")") | ||
| 139 | |||
| 140 | isless(a::SHA1, b::SHA1) = isless(a.bytes, b.bytes) | ||
| 141 | hash(a::SHA1, h::UInt) = hash((SHA1, a.bytes), h) | ||
| 142 | ==(a::SHA1, b::SHA1) = a.bytes == b.bytes | ||
| 143 | |||
| 144 | # fake uuid5 function (for self-assigned UUIDs) | ||
| 145 | # TODO: delete and use real uuid5 once it's in stdlib | ||
| 146 | |||
| 147 | function uuid5(namespace::UUID, key::String) | ||
| 148 | u::UInt128 = 0 | ||
| 149 | h = hash(namespace) | ||
| 150 | for _ = 1:sizeof(u)÷sizeof(h) | ||
| 151 | u <<= sizeof(h) << 3 | ||
| 152 | u |= (h = hash(key, h)) | ||
| 153 | end | ||
| 154 | u &= 0xffffffffffff0fff3fffffffffffffff | ||
| 155 | u |= 0x00000000000050008000000000000000 | ||
| 156 | return UUID(u) | ||
| 157 | end | ||
| 158 | |||
| 159 | const ns_dummy_uuid = UUID("fe0723d6-3a44-4c41-8065-ee0f42c8ceab") | ||
| 160 | |||
| 161 | function dummy_uuid(project_file::String) | ||
| 162 | @lock require_lock begin | ||
| 163 | cache = LOADING_CACHE[] | ||
| 164 | if cache !== nothing | ||
| 165 | uuid = get(cache.dummy_uuid, project_file, nothing) | ||
| 166 | uuid === nothing || return uuid | ||
| 167 | end | ||
| 168 | project_path = try | ||
| 169 | realpath(project_file) | ||
| 170 | catch ex | ||
| 171 | ex isa IOError || rethrow() | ||
| 172 | project_file | ||
| 173 | end | ||
| 174 | uuid = uuid5(ns_dummy_uuid, project_path) | ||
| 175 | if cache !== nothing | ||
| 176 | cache.dummy_uuid[project_file] = uuid | ||
| 177 | end | ||
| 178 | return uuid | ||
| 179 | end | ||
| 180 | end | ||
| 181 | |||
| 182 | ## package path slugs: turning UUID + SHA1 into a pair of 4-byte "slugs" ## | ||
| 183 | |||
| 184 | const slug_chars = String(['A':'Z'; 'a':'z'; '0':'9']) | ||
| 185 | |||
| 186 | function slug(x::UInt32, p::Int) | ||
| 187 | y::UInt32 = x | ||
| 188 | sprint(sizehint=p) do io | ||
| 189 | n = length(slug_chars) | ||
| 190 | for i = 1:p | ||
| 191 | y, d = divrem(y, n) | ||
| 192 | write(io, slug_chars[1+d]) | ||
| 193 | end | ||
| 194 | end | ||
| 195 | end | ||
| 196 | |||
| 197 | function package_slug(uuid::UUID, p::Int=5) | ||
| 198 | crc = _crc32c(uuid) | ||
| 199 | return slug(crc, p) | ||
| 200 | end | ||
| 201 | |||
| 202 | function version_slug(uuid::UUID, sha1::SHA1, p::Int=5) | ||
| 203 | crc = _crc32c(uuid) | ||
| 204 | crc = _crc32c(sha1.bytes, crc) | ||
| 205 | return slug(crc, p) | ||
| 206 | end | ||
| 207 | |||
| 208 | mutable struct CachedTOMLDict | ||
| 209 | path::String | ||
| 210 | inode::UInt64 | ||
| 211 | mtime::Float64 | ||
| 212 | size::Int64 | ||
| 213 | hash::UInt32 | ||
| 214 | d::Dict{String, Any} | ||
| 215 | end | ||
| 216 | |||
| 217 | function CachedTOMLDict(p::TOML.Parser, path::String) | ||
| 218 | s = stat(path) | ||
| 219 | content = read(path) | ||
| 220 | crc32 = _crc32c(content) | ||
| 221 | TOML.reinit!(p, String(content); filepath=path) | ||
| 222 | d = TOML.parse(p) | ||
| 223 | return CachedTOMLDict( | ||
| 224 | path, | ||
| 225 | s.inode, | ||
| 226 | s.mtime, | ||
| 227 | s.size, | ||
| 228 | crc32, | ||
| 229 | d, | ||
| 230 | ) | ||
| 231 | end | ||
| 232 | |||
| 233 | function get_updated_dict(p::TOML.Parser, f::CachedTOMLDict) | ||
| 234 | s = stat(f.path) | ||
| 235 | # note, this might miss very rapid in-place updates, such that mtime is | ||
| 236 | # identical but that is solvable by not doing in-place updates, and not | ||
| 237 | # rapidly changing these files | ||
| 238 | if s.inode != f.inode || s.mtime != f.mtime || f.size != s.size | ||
| 239 | content = read(f.path) | ||
| 240 | new_hash = _crc32c(content) | ||
| 241 | if new_hash != f.hash | ||
| 242 | f.inode = s.inode | ||
| 243 | f.mtime = s.mtime | ||
| 244 | f.size = s.size | ||
| 245 | f.hash = new_hash | ||
| 246 | TOML.reinit!(p, String(content); filepath=f.path) | ||
| 247 | return f.d = TOML.parse(p) | ||
| 248 | end | ||
| 249 | end | ||
| 250 | return f.d | ||
| 251 | end | ||
| 252 | |||
| 253 | struct LoadingCache | ||
| 254 | load_path::Vector{String} | ||
| 255 | dummy_uuid::Dict{String, UUID} | ||
| 256 | env_project_file::Dict{String, Union{Bool, String}} | ||
| 257 | project_file_manifest_path::Dict{String, Union{Nothing, String}} | ||
| 258 | require_parsed::Set{String} | ||
| 259 | identified_where::Dict{Tuple{PkgId, String}, Union{Nothing, Tuple{PkgId, Union{Nothing, String}}}} | ||
| 260 | identified::Dict{String, Union{Nothing, Tuple{PkgId, Union{Nothing, String}}}} | ||
| 261 | located::Dict{Tuple{PkgId, Union{String, Nothing}}, Union{Tuple{Union{String, Nothing}, Union{String, Nothing}}, Nothing}} | ||
| 262 | end | ||
| 263 | const LOADING_CACHE = Ref{Union{LoadingCache, Nothing}}(nothing) | ||
| 264 | LoadingCache() = LoadingCache(load_path(), Dict(), Dict(), Dict(), Set(), Dict(), Dict(), Dict()) | ||
| 265 | |||
| 266 | |||
| 267 | struct TOMLCache | ||
| 268 | p::TOML.Parser | ||
| 269 | d::Dict{String, CachedTOMLDict} | ||
| 270 | end | ||
| 271 | const TOML_CACHE = TOMLCache(TOML.Parser(), Dict{String, Dict{String, Any}}()) | ||
| 272 | |||
| 273 | parsed_toml(project_file::AbstractString) = parsed_toml(project_file, TOML_CACHE, require_lock) | ||
| 274 | function parsed_toml(project_file::AbstractString, toml_cache::TOMLCache, toml_lock::ReentrantLock) | ||
| 275 | lock(toml_lock) do | ||
| 276 | cache = LOADING_CACHE[] | ||
| 277 | dd = if !haskey(toml_cache.d, project_file) | ||
| 278 | d = CachedTOMLDict(toml_cache.p, project_file) | ||
| 279 | toml_cache.d[project_file] = d | ||
| 280 | d.d | ||
| 281 | else | ||
| 282 | d = toml_cache.d[project_file] | ||
| 283 | # We are in a require call and have already parsed this TOML file | ||
| 284 | # assume that it is unchanged to avoid hitting disk | ||
| 285 | if cache !== nothing && project_file in cache.require_parsed | ||
| 286 | d.d | ||
| 287 | else | ||
| 288 | get_updated_dict(toml_cache.p, d) | ||
| 289 | end | ||
| 290 | end | ||
| 291 | if cache !== nothing | ||
| 292 | push!(cache.require_parsed, project_file) | ||
| 293 | end | ||
| 294 | return dd | ||
| 295 | end | ||
| 296 | end | ||
| 297 | |||
| 298 | ## package identification: determine unique identity of package to be loaded ## | ||
| 299 | |||
| 300 | # Used by Pkg but not used in loading itself | ||
| 301 | function find_package(arg) | ||
| 302 | pkgenv = identify_package_env(arg) | ||
| 303 | pkgenv === nothing && return nothing | ||
| 304 | pkg, env = pkgenv | ||
| 305 | return locate_package(pkg, env) | ||
| 306 | end | ||
| 307 | |||
| 308 | """ | ||
| 309 | Base.identify_package_env(name::String)::Union{Tuple{PkgId, String}, Nothing} | ||
| 310 | Base.identify_package_env(where::Union{Module,PkgId}, name::String)::Union{Tuple{PkgId, String} Nothing} | ||
| 311 | |||
| 312 | Same as [`Base.identify_package`](@ref) except that the path to the environment where the package is identified | ||
| 313 | is also returned. | ||
| 314 | """ | ||
| 315 | identify_package_env(where::Module, name::String) = identify_package_env(PkgId(where), name) | ||
| 316 | function identify_package_env(where::PkgId, name::String) | ||
| 317 | cache = LOADING_CACHE[] | ||
| 318 | if cache !== nothing | ||
| 319 | pkg_env = get(cache.identified_where, (where, name), nothing) | ||
| 320 | pkg_env === nothing || return pkg_env | ||
| 321 | end | ||
| 322 | pkg_env = nothing | ||
| 323 | if where.name === name | ||
| 324 | pkg_env = where, nothing | ||
| 325 | elseif where.uuid === nothing | ||
| 326 | pkg_env = identify_package_env(name) # ignore `where` | ||
| 327 | else | ||
| 328 | for env in load_path() | ||
| 329 | pkgid = manifest_deps_get(env, where, name) | ||
| 330 | pkgid === nothing && continue # not found--keep looking | ||
| 331 | if pkgid.uuid !== nothing | ||
| 332 | pkg_env = pkgid, env # found in explicit environment--use it | ||
| 333 | end | ||
| 334 | break # found in implicit environment--return "not found" | ||
| 335 | end | ||
| 336 | end | ||
| 337 | if cache !== nothing | ||
| 338 | cache.identified_where[(where, name)] = pkg_env | ||
| 339 | end | ||
| 340 | return pkg_env | ||
| 341 | end | ||
| 342 | function identify_package_env(name::String) | ||
| 343 | cache = LOADING_CACHE[] | ||
| 344 | if cache !== nothing | ||
| 345 | pkg_env = get(cache.identified, name, nothing) | ||
| 346 | pkg_env === nothing || return pkg_env | ||
| 347 | end | ||
| 348 | pkg_env = nothing | ||
| 349 | for env in load_path() | ||
| 350 | pkg = project_deps_get(env, name) | ||
| 351 | if pkg !== nothing | ||
| 352 | pkg_env = pkg, env # found--return it | ||
| 353 | break | ||
| 354 | end | ||
| 355 | end | ||
| 356 | if cache !== nothing | ||
| 357 | cache.identified[name] = pkg_env | ||
| 358 | end | ||
| 359 | return pkg_env | ||
| 360 | end | ||
| 361 | |||
| 362 | _nothing_or_first(x) = x === nothing ? nothing : first(x) | ||
| 363 | |||
| 364 | """ | ||
| 365 | Base.identify_package(name::String)::Union{PkgId, Nothing} | ||
| 366 | Base.identify_package(where::Union{Module,PkgId}, name::String)::Union{PkgId, Nothing} | ||
| 367 | |||
| 368 | Identify the package by its name from the current environment stack, returning | ||
| 369 | its `PkgId`, or `nothing` if it cannot be found. | ||
| 370 | |||
| 371 | If only the `name` argument is provided, it searches each environment in the | ||
| 372 | stack and its named direct dependencies. | ||
| 373 | |||
| 374 | There `where` argument provides the context from where to search for the | ||
| 375 | package: in this case it first checks if the name matches the context itself, | ||
| 376 | otherwise it searches all recursive dependencies (from the resolved manifest of | ||
| 377 | each environment) until it locates the context `where`, and from there | ||
| 378 | identifies the dependency with the corresponding name. | ||
| 379 | |||
| 380 | ```julia-repl | ||
| 381 | julia> Base.identify_package("Pkg") # Pkg is a dependency of the default environment | ||
| 382 | Pkg [44cfe95a-1eb2-52ea-b672-e2afdf69b78f] | ||
| 383 | |||
| 384 | julia> using LinearAlgebra | ||
| 385 | |||
| 386 | julia> Base.identify_package(LinearAlgebra, "Pkg") # Pkg is not a dependency of LinearAlgebra | ||
| 387 | ``` | ||
| 388 | """ | ||
| 389 | identify_package(where::Module, name::String) = _nothing_or_first(identify_package_env(where, name)) | ||
| 390 | identify_package(where::PkgId, name::String) = _nothing_or_first(identify_package_env(where, name)) | ||
| 391 | identify_package(name::String) = _nothing_or_first(identify_package_env(name)) | ||
| 392 | |||
| 393 | function locate_package_env(pkg::PkgId, stopenv::Union{String, Nothing}=nothing) | ||
| 394 | cache = LOADING_CACHE[] | ||
| 395 | if cache !== nothing | ||
| 396 | pathenv = get(cache.located, (pkg, stopenv), nothing) | ||
| 397 | pathenv === nothing || return pathenv | ||
| 398 | end | ||
| 399 | path = nothing | ||
| 400 | env′ = nothing | ||
| 401 | if pkg.uuid === nothing | ||
| 402 | for env in load_path() | ||
| 403 | env′ = env | ||
| 404 | # look for the toplevel pkg `pkg.name` in this entry | ||
| 405 | found = project_deps_get(env, pkg.name) | ||
| 406 | if found !== nothing | ||
| 407 | @assert found.name == pkg.name | ||
| 408 | if found.uuid === nothing | ||
| 409 | # pkg.name is present in this directory or project file, | ||
| 410 | # return the path the entry point for the code, if it could be found | ||
| 411 | # otherwise, signal failure | ||
| 412 | path = implicit_manifest_uuid_path(env, pkg) | ||
| 413 | @goto done | ||
| 414 | end | ||
| 415 | end | ||
| 416 | if !(loading_extension || precompiling_extension) | ||
| 417 | stopenv == env && @goto done | ||
| 418 | end | ||
| 419 | end | ||
| 420 | else | ||
| 421 | for env in load_path() | ||
| 422 | env′ = env | ||
| 423 | path = manifest_uuid_path(env, pkg) | ||
| 424 | # missing is used as a sentinel to stop looking further down in envs | ||
| 425 | if path === missing | ||
| 426 | path = nothing | ||
| 427 | @goto done | ||
| 428 | end | ||
| 429 | if path !== nothing | ||
| 430 | path = entry_path(path, pkg.name) | ||
| 431 | @goto done | ||
| 432 | end | ||
| 433 | if !(loading_extension || precompiling_extension) | ||
| 434 | stopenv == env && break | ||
| 435 | end | ||
| 436 | end | ||
| 437 | # Allow loading of stdlibs if the name/uuid are given | ||
| 438 | # e.g. if they have been explicitly added to the project/manifest | ||
| 439 | mbypath = manifest_uuid_path(Sys.STDLIB, pkg) | ||
| 440 | if mbypath isa String | ||
| 441 | path = entry_path(mbypath, pkg.name) | ||
| 442 | @goto done | ||
| 443 | end | ||
| 444 | end | ||
| 445 | @label done | ||
| 446 | if cache !== nothing | ||
| 447 | cache.located[(pkg, stopenv)] = path, env′ | ||
| 448 | end | ||
| 449 | return path, env′ | ||
| 450 | end | ||
| 451 | |||
| 452 | """ | ||
| 453 | Base.locate_package(pkg::PkgId)::Union{String, Nothing} | ||
| 454 | |||
| 455 | The path to the entry-point file for the package corresponding to the identifier | ||
| 456 | `pkg`, or `nothing` if not found. See also [`identify_package`](@ref). | ||
| 457 | |||
| 458 | ```julia-repl | ||
| 459 | julia> pkg = Base.identify_package("Pkg") | ||
| 460 | Pkg [44cfe95a-1eb2-52ea-b672-e2afdf69b78f] | ||
| 461 | |||
| 462 | julia> Base.locate_package(pkg) | ||
| 463 | "/path/to/julia/stdlib/v$(VERSION.major).$(VERSION.minor)/Pkg/src/Pkg.jl" | ||
| 464 | ``` | ||
| 465 | """ | ||
| 466 | function locate_package(pkg::PkgId, stopenv::Union{String, Nothing}=nothing)::Union{Nothing,String} | ||
| 467 | _nothing_or_first(locate_package_env(pkg, stopenv)) | ||
| 468 | end | ||
| 469 | |||
| 470 | """ | ||
| 471 | pathof(m::Module) | ||
| 472 | |||
| 473 | Return the path of the `m.jl` file that was used to `import` module `m`, | ||
| 474 | or `nothing` if `m` was not imported from a package. | ||
| 475 | |||
| 476 | Use [`dirname`](@ref) to get the directory part and [`basename`](@ref) | ||
| 477 | to get the file name part of the path. | ||
| 478 | """ | ||
| 479 | function pathof(m::Module) | ||
| 480 | @lock require_lock begin | ||
| 481 | pkgid = get(module_keys, m, nothing) | ||
| 482 | pkgid === nothing && return nothing | ||
| 483 | origin = get(pkgorigins, pkgid, nothing) | ||
| 484 | origin === nothing && return nothing | ||
| 485 | path = origin.path | ||
| 486 | path === nothing && return nothing | ||
| 487 | return fixup_stdlib_path(path) | ||
| 488 | end | ||
| 489 | end | ||
| 490 | |||
| 491 | """ | ||
| 492 | pkgdir(m::Module[, paths::String...]) | ||
| 493 | |||
| 494 | Return the root directory of the package that declared module `m`, | ||
| 495 | or `nothing` if `m` was not declared in a package. Optionally further | ||
| 496 | path component strings can be provided to construct a path within the | ||
| 497 | package root. | ||
| 498 | |||
| 499 | To get the root directory of the package that implements the current module | ||
| 500 | the form `pkgdir(@__MODULE__)` can be used. | ||
| 501 | |||
| 502 | ```julia-repl | ||
| 503 | julia> pkgdir(Foo) | ||
| 504 | "/path/to/Foo.jl" | ||
| 505 | |||
| 506 | julia> pkgdir(Foo, "src", "file.jl") | ||
| 507 | "/path/to/Foo.jl/src/file.jl" | ||
| 508 | ``` | ||
| 509 | |||
| 510 | !!! compat "Julia 1.7" | ||
| 511 | The optional argument `paths` requires at least Julia 1.7. | ||
| 512 | """ | ||
| 513 | function pkgdir(m::Module, paths::String...) | ||
| 514 | rootmodule = moduleroot(m) | ||
| 515 | path = pathof(rootmodule) | ||
| 516 | path === nothing && return nothing | ||
| 517 | return joinpath(dirname(dirname(path)), paths...) | ||
| 518 | end | ||
| 519 | |||
| 520 | function get_pkgversion_from_path(path) | ||
| 521 | project_file = locate_project_file(path) | ||
| 522 | if project_file isa String | ||
| 523 | d = parsed_toml(project_file) | ||
| 524 | v = get(d, "version", nothing) | ||
| 525 | if v !== nothing | ||
| 526 | return VersionNumber(v::String) | ||
| 527 | end | ||
| 528 | end | ||
| 529 | return nothing | ||
| 530 | end | ||
| 531 | |||
| 532 | """ | ||
| 533 | pkgversion(m::Module) | ||
| 534 | |||
| 535 | Return the version of the package that imported module `m`, | ||
| 536 | or `nothing` if `m` was not imported from a package, or imported | ||
| 537 | from a package without a version field set. | ||
| 538 | |||
| 539 | The version is read from the package's Project.toml during package | ||
| 540 | load. | ||
| 541 | |||
| 542 | To get the version of the package that imported the current module | ||
| 543 | the form `pkgversion(@__MODULE__)` can be used. | ||
| 544 | |||
| 545 | !!! compat "Julia 1.9" | ||
| 546 | This function was introduced in Julia 1.9. | ||
| 547 | """ | ||
| 548 | function pkgversion(m::Module) | ||
| 549 | path = pkgdir(m) | ||
| 550 | path === nothing && return nothing | ||
| 551 | @lock require_lock begin | ||
| 552 | v = get_pkgversion_from_path(path) | ||
| 553 | pkgorigin = get(pkgorigins, PkgId(moduleroot(m)), nothing) | ||
| 554 | # Cache the version | ||
| 555 | if pkgorigin !== nothing && pkgorigin.version === nothing | ||
| 556 | pkgorigin.version = v | ||
| 557 | end | ||
| 558 | return v | ||
| 559 | end | ||
| 560 | end | ||
| 561 | |||
| 562 | ## generic project & manifest API ## | ||
| 563 | |||
| 564 | const project_names = ("JuliaProject.toml", "Project.toml") | ||
| 565 | const manifest_names = ("JuliaManifest.toml", "Manifest.toml") | ||
| 566 | const preferences_names = ("JuliaLocalPreferences.toml", "LocalPreferences.toml") | ||
| 567 | |||
| 568 | function locate_project_file(env::String) | ||
| 569 | for proj in project_names | ||
| 570 | project_file = joinpath(env, proj) | ||
| 571 | if isfile_casesensitive(project_file) | ||
| 572 | return project_file | ||
| 573 | end | ||
| 574 | end | ||
| 575 | return true | ||
| 576 | end | ||
| 577 | |||
| 578 | # classify the LOAD_PATH entry to be one of: | ||
| 579 | # - `false`: nonexistent / nothing to see here | ||
| 580 | # - `true`: `env` is an implicit environment | ||
| 581 | # - `path`: the path of an explicit project file | ||
| 582 | function env_project_file(env::String)::Union{Bool,String} | ||
| 583 | @lock require_lock begin | ||
| 584 | cache = LOADING_CACHE[] | ||
| 585 | if cache !== nothing | ||
| 586 | project_file = get(cache.env_project_file, env, nothing) | ||
| 587 | project_file === nothing || return project_file | ||
| 588 | end | ||
| 589 | if isdir(env) | ||
| 590 | project_file = locate_project_file(env) | ||
| 591 | elseif basename(env) in project_names && isfile_casesensitive(env) | ||
| 592 | project_file = env | ||
| 593 | else | ||
| 594 | project_file = false | ||
| 595 | end | ||
| 596 | if cache !== nothing | ||
| 597 | cache.env_project_file[env] = project_file | ||
| 598 | end | ||
| 599 | return project_file | ||
| 600 | end | ||
| 601 | end | ||
| 602 | |||
| 603 | function project_deps_get(env::String, name::String)::Union{Nothing,PkgId} | ||
| 604 | project_file = env_project_file(env) | ||
| 605 | if project_file isa String | ||
| 606 | pkg_uuid = explicit_project_deps_get(project_file, name) | ||
| 607 | pkg_uuid === nothing || return PkgId(pkg_uuid, name) | ||
| 608 | elseif project_file | ||
| 609 | return implicit_project_deps_get(env, name) | ||
| 610 | end | ||
| 611 | return nothing | ||
| 612 | end | ||
| 613 | |||
| 614 | function manifest_deps_get(env::String, where::PkgId, name::String)::Union{Nothing,PkgId} | ||
| 615 | uuid = where.uuid | ||
| 616 | @assert uuid !== nothing | ||
| 617 | project_file = env_project_file(env) | ||
| 618 | if project_file isa String | ||
| 619 | # first check if `where` names the Project itself | ||
| 620 | proj = project_file_name_uuid(project_file, where.name) | ||
| 621 | if proj == where | ||
| 622 | # if `where` matches the project, use [deps] section as manifest, and stop searching | ||
| 623 | pkg_uuid = explicit_project_deps_get(project_file, name) | ||
| 624 | return PkgId(pkg_uuid, name) | ||
| 625 | end | ||
| 626 | d = parsed_toml(project_file) | ||
| 627 | exts = get(d, "extensions", nothing)::Union{Dict{String, Any}, Nothing} | ||
| 628 | if exts !== nothing | ||
| 629 | # Check if `where` is an extension of the project | ||
| 630 | if where.name in keys(exts) && where.uuid == uuid5(proj.uuid::UUID, where.name) | ||
| 631 | # Extensions can load weak deps... | ||
| 632 | weakdeps = get(d, "weakdeps", nothing)::Union{Dict{String, Any}, Nothing} | ||
| 633 | if weakdeps !== nothing | ||
| 634 | wuuid = get(weakdeps, name, nothing)::Union{String, Nothing} | ||
| 635 | if wuuid !== nothing | ||
| 636 | return PkgId(UUID(wuuid), name) | ||
| 637 | end | ||
| 638 | end | ||
| 639 | # ... and they can load same deps as the project itself | ||
| 640 | mby_uuid = explicit_project_deps_get(project_file, name) | ||
| 641 | mby_uuid === nothing || return PkgId(mby_uuid, name) | ||
| 642 | end | ||
| 643 | end | ||
| 644 | # look for manifest file and `where` stanza | ||
| 645 | return explicit_manifest_deps_get(project_file, where, name) | ||
| 646 | elseif project_file | ||
| 647 | # if env names a directory, search it | ||
| 648 | return implicit_manifest_deps_get(env, where, name) | ||
| 649 | end | ||
| 650 | return nothing | ||
| 651 | end | ||
| 652 | |||
| 653 | function manifest_uuid_path(env::String, pkg::PkgId)::Union{Nothing,String,Missing} | ||
| 654 | project_file = env_project_file(env) | ||
| 655 | if project_file isa String | ||
| 656 | proj = project_file_name_uuid(project_file, pkg.name) | ||
| 657 | if proj == pkg | ||
| 658 | # if `pkg` matches the project, return the project itself | ||
| 659 | return project_file_path(project_file) | ||
| 660 | end | ||
| 661 | mby_ext = project_file_ext_path(project_file, pkg.name) | ||
| 662 | mby_ext === nothing || return mby_ext | ||
| 663 | # look for manifest file and `where` stanza | ||
| 664 | return explicit_manifest_uuid_path(project_file, pkg) | ||
| 665 | elseif project_file | ||
| 666 | # if env names a directory, search it | ||
| 667 | return implicit_manifest_uuid_path(env, pkg) | ||
| 668 | end | ||
| 669 | return nothing | ||
| 670 | end | ||
| 671 | |||
| 672 | |||
| 673 | function find_ext_path(project_path::String, extname::String) | ||
| 674 | extfiledir = joinpath(project_path, "ext", extname, extname * ".jl") | ||
| 675 | isfile(extfiledir) && return extfiledir | ||
| 676 | return joinpath(project_path, "ext", extname * ".jl") | ||
| 677 | end | ||
| 678 | |||
| 679 | function project_file_ext_path(project_file::String, name::String) | ||
| 680 | d = parsed_toml(project_file) | ||
| 681 | p = project_file_path(project_file) | ||
| 682 | exts = get(d, "extensions", nothing)::Union{Dict{String, Any}, Nothing} | ||
| 683 | if exts !== nothing | ||
| 684 | if name in keys(exts) | ||
| 685 | return find_ext_path(p, name) | ||
| 686 | end | ||
| 687 | end | ||
| 688 | return nothing | ||
| 689 | end | ||
| 690 | |||
| 691 | # find project file's top-level UUID entry (or nothing) | ||
| 692 | function project_file_name_uuid(project_file::String, name::String)::PkgId | ||
| 693 | d = parsed_toml(project_file) | ||
| 694 | uuid′ = get(d, "uuid", nothing)::Union{String, Nothing} | ||
| 695 | uuid = uuid′ === nothing ? dummy_uuid(project_file) : UUID(uuid′) | ||
| 696 | name = get(d, "name", name)::String | ||
| 697 | return PkgId(uuid, name) | ||
| 698 | end | ||
| 699 | |||
| 700 | function project_file_path(project_file::String) | ||
| 701 | d = parsed_toml(project_file) | ||
| 702 | joinpath(dirname(project_file), get(d, "path", "")::String) | ||
| 703 | end | ||
| 704 | |||
| 705 | # find project file's corresponding manifest file | ||
| 706 | function project_file_manifest_path(project_file::String)::Union{Nothing,String} | ||
| 707 | @lock require_lock begin | ||
| 708 | cache = LOADING_CACHE[] | ||
| 709 | if cache !== nothing | ||
| 710 | manifest_path = get(cache.project_file_manifest_path, project_file, missing) | ||
| 711 | manifest_path === missing || return manifest_path | ||
| 712 | end | ||
| 713 | dir = abspath(dirname(project_file)) | ||
| 714 | d = parsed_toml(project_file) | ||
| 715 | explicit_manifest = get(d, "manifest", nothing)::Union{String, Nothing} | ||
| 716 | manifest_path = nothing | ||
| 717 | if explicit_manifest !== nothing | ||
| 718 | manifest_file = normpath(joinpath(dir, explicit_manifest)) | ||
| 719 | if isfile_casesensitive(manifest_file) | ||
| 720 | manifest_path = manifest_file | ||
| 721 | end | ||
| 722 | end | ||
| 723 | if manifest_path === nothing | ||
| 724 | for mfst in manifest_names | ||
| 725 | manifest_file = joinpath(dir, mfst) | ||
| 726 | if isfile_casesensitive(manifest_file) | ||
| 727 | manifest_path = manifest_file | ||
| 728 | break | ||
| 729 | end | ||
| 730 | end | ||
| 731 | end | ||
| 732 | if cache !== nothing | ||
| 733 | cache.project_file_manifest_path[project_file] = manifest_path | ||
| 734 | end | ||
| 735 | return manifest_path | ||
| 736 | end | ||
| 737 | end | ||
| 738 | |||
| 739 | # given a directory (implicit env from LOAD_PATH) and a name, | ||
| 740 | # check if it is an implicit package | ||
| 741 | function entry_point_and_project_file_inside(dir::String, name::String)::Union{Tuple{Nothing,Nothing},Tuple{String,Nothing},Tuple{String,String}} | ||
| 742 | path = normpath(joinpath(dir, "src", "$name.jl")) | ||
| 743 | isfile_casesensitive(path) || return nothing, nothing | ||
| 744 | for proj in project_names | ||
| 745 | project_file = normpath(joinpath(dir, proj)) | ||
| 746 | isfile_casesensitive(project_file) || continue | ||
| 747 | return path, project_file | ||
| 748 | end | ||
| 749 | return path, nothing | ||
| 750 | end | ||
| 751 | |||
| 752 | # given a project directory (implicit env from LOAD_PATH) and a name, | ||
| 753 | # find an entry point for `name`, and see if it has an associated project file | ||
| 754 | function entry_point_and_project_file(dir::String, name::String)::Union{Tuple{Nothing,Nothing},Tuple{String,Nothing},Tuple{String,String}} | ||
| 755 | path = normpath(joinpath(dir, "$name.jl")) | ||
| 756 | isfile_casesensitive(path) && return path, nothing | ||
| 757 | dir = joinpath(dir, name) | ||
| 758 | path, project_file = entry_point_and_project_file_inside(dir, name) | ||
| 759 | path === nothing || return path, project_file | ||
| 760 | dir = dir * ".jl" | ||
| 761 | path, project_file = entry_point_and_project_file_inside(dir, name) | ||
| 762 | path === nothing || return path, project_file | ||
| 763 | return nothing, nothing | ||
| 764 | end | ||
| 765 | |||
| 766 | # given a path and a name, return the entry point | ||
| 767 | function entry_path(path::String, name::String)::Union{Nothing,String} | ||
| 768 | isfile_casesensitive(path) && return normpath(path) | ||
| 769 | path = normpath(joinpath(path, "src", "$name.jl")) | ||
| 770 | isfile_casesensitive(path) && return path | ||
| 771 | return nothing # source not found | ||
| 772 | end | ||
| 773 | |||
| 774 | ## explicit project & manifest API ## | ||
| 775 | |||
| 776 | # find project file root or deps `name => uuid` mapping | ||
| 777 | # return `nothing` if `name` is not found | ||
| 778 | function explicit_project_deps_get(project_file::String, name::String)::Union{Nothing,UUID} | ||
| 779 | d = parsed_toml(project_file) | ||
| 780 | root_uuid = dummy_uuid(project_file) | ||
| 781 | if get(d, "name", nothing)::Union{String, Nothing} === name | ||
| 782 | uuid = get(d, "uuid", nothing)::Union{String, Nothing} | ||
| 783 | return uuid === nothing ? root_uuid : UUID(uuid) | ||
| 784 | end | ||
| 785 | deps = get(d, "deps", nothing)::Union{Dict{String, Any}, Nothing} | ||
| 786 | if deps !== nothing | ||
| 787 | uuid = get(deps, name, nothing)::Union{String, Nothing} | ||
| 788 | uuid === nothing || return UUID(uuid) | ||
| 789 | end | ||
| 790 | return nothing | ||
| 791 | end | ||
| 792 | |||
| 793 | function is_v1_format_manifest(raw_manifest::Dict{String}) | ||
| 794 | if haskey(raw_manifest, "manifest_format") | ||
| 795 | mf = raw_manifest["manifest_format"] | ||
| 796 | if mf isa Dict{String} && haskey(mf, "uuid") | ||
| 797 | # the off-chance where an old format manifest has a dep called "manifest_format" | ||
| 798 | return true | ||
| 799 | end | ||
| 800 | return false | ||
| 801 | else | ||
| 802 | return true | ||
| 803 | end | ||
| 804 | end | ||
| 805 | |||
| 806 | # returns a deps list for both old and new manifest formats | ||
| 807 | function get_deps(raw_manifest::Dict) | ||
| 808 | if is_v1_format_manifest(raw_manifest) | ||
| 809 | return raw_manifest | ||
| 810 | else | ||
| 811 | # if the manifest has no deps, there won't be a `deps` field | ||
| 812 | return get(Dict{String, Any}, raw_manifest, "deps")::Dict{String, Any} | ||
| 813 | end | ||
| 814 | end | ||
| 815 | |||
| 816 | # find `where` stanza and return the PkgId for `name` | ||
| 817 | # return `nothing` if it did not find `where` (indicating caller should continue searching) | ||
| 818 | function explicit_manifest_deps_get(project_file::String, where::PkgId, name::String)::Union{Nothing,PkgId} | ||
| 819 | manifest_file = project_file_manifest_path(project_file) | ||
| 820 | manifest_file === nothing && return nothing # manifest not found--keep searching LOAD_PATH | ||
| 821 | d = get_deps(parsed_toml(manifest_file)) | ||
| 822 | found_where = false | ||
| 823 | found_name = false | ||
| 824 | for (dep_name, entries) in d | ||
| 825 | entries::Vector{Any} | ||
| 826 | for entry in entries | ||
| 827 | entry = entry::Dict{String, Any} | ||
| 828 | uuid = get(entry, "uuid", nothing)::Union{String, Nothing} | ||
| 829 | uuid === nothing && continue | ||
| 830 | if UUID(uuid) === where.uuid | ||
| 831 | found_where = true | ||
| 832 | # deps is either a list of names (deps = ["DepA", "DepB"]) or | ||
| 833 | # a table of entries (deps = {"DepA" = "6ea...", "DepB" = "55d..."} | ||
| 834 | deps = get(entry, "deps", nothing)::Union{Vector{String}, Dict{String, Any}, Nothing} | ||
| 835 | if deps isa Vector{String} | ||
| 836 | found_name = name in deps | ||
| 837 | break | ||
| 838 | elseif deps isa Dict{String, Any} | ||
| 839 | deps = deps::Dict{String, Any} | ||
| 840 | for (dep, uuid) in deps | ||
| 841 | uuid::String | ||
| 842 | if dep === name | ||
| 843 | return PkgId(UUID(uuid), name) | ||
| 844 | end | ||
| 845 | end | ||
| 846 | end | ||
| 847 | else # Check for extensions | ||
| 848 | extensions = get(entry, "extensions", nothing) | ||
| 849 | if extensions !== nothing | ||
| 850 | if haskey(extensions, where.name) && where.uuid == uuid5(UUID(uuid), where.name) | ||
| 851 | found_where = true | ||
| 852 | if name == dep_name | ||
| 853 | return PkgId(UUID(uuid), name) | ||
| 854 | end | ||
| 855 | exts = extensions[where.name]::Union{String, Vector{String}} | ||
| 856 | if (exts isa String && name == exts) || (exts isa Vector{String} && name in exts) | ||
| 857 | weakdeps = get(entry, "weakdeps", nothing)::Union{Vector{String}, Dict{String, Any}, Nothing} | ||
| 858 | if weakdeps !== nothing | ||
| 859 | if weakdeps isa Vector{String} | ||
| 860 | found_name = name in weakdeps | ||
| 861 | break | ||
| 862 | elseif weakdeps isa Dict{String, Any} | ||
| 863 | weakdeps = weakdeps::Dict{String, Any} | ||
| 864 | for (dep, uuid) in weakdeps | ||
| 865 | uuid::String | ||
| 866 | if dep === name | ||
| 867 | return PkgId(UUID(uuid), name) | ||
| 868 | end | ||
| 869 | end | ||
| 870 | end | ||
| 871 | end | ||
| 872 | end | ||
| 873 | # `name` is not an ext, do standard lookup as if this was the parent | ||
| 874 | return identify_package(PkgId(UUID(uuid), dep_name), name) | ||
| 875 | end | ||
| 876 | end | ||
| 877 | end | ||
| 878 | end | ||
| 879 | end | ||
| 880 | found_where || return nothing | ||
| 881 | found_name || return PkgId(name) | ||
| 882 | # Only reach here if deps was not a dict which mean we have a unique name for the dep | ||
| 883 | name_deps = get(d, name, nothing)::Union{Nothing, Vector{Any}} | ||
| 884 | if name_deps === nothing || length(name_deps) != 1 | ||
| 885 | error("expected a single entry for $(repr(name)) in $(repr(project_file))") | ||
| 886 | end | ||
| 887 | entry = first(name_deps::Vector{Any})::Dict{String, Any} | ||
| 888 | uuid = get(entry, "uuid", nothing)::Union{String, Nothing} | ||
| 889 | uuid === nothing && return nothing | ||
| 890 | return PkgId(UUID(uuid), name) | ||
| 891 | end | ||
| 892 | |||
| 893 | # find `uuid` stanza, return the corresponding path | ||
| 894 | function explicit_manifest_uuid_path(project_file::String, pkg::PkgId)::Union{Nothing,String,Missing} | ||
| 895 | manifest_file = project_file_manifest_path(project_file) | ||
| 896 | manifest_file === nothing && return nothing # no manifest, skip env | ||
| 897 | |||
| 898 | d = get_deps(parsed_toml(manifest_file)) | ||
| 899 | entries = get(d, pkg.name, nothing)::Union{Nothing, Vector{Any}} | ||
| 900 | if entries !== nothing | ||
| 901 | for entry in entries | ||
| 902 | entry = entry::Dict{String, Any} | ||
| 903 | uuid = get(entry, "uuid", nothing)::Union{Nothing, String} | ||
| 904 | uuid === nothing && continue | ||
| 905 | if UUID(uuid) === pkg.uuid | ||
| 906 | return explicit_manifest_entry_path(manifest_file, pkg, entry) | ||
| 907 | end | ||
| 908 | end | ||
| 909 | end | ||
| 910 | # Extensions | ||
| 911 | for (name, entries) in d | ||
| 912 | entries = entries::Vector{Any} | ||
| 913 | for entry in entries | ||
| 914 | uuid = get(entry, "uuid", nothing)::Union{Nothing, String} | ||
| 915 | extensions = get(entry, "extensions", nothing)::Union{Nothing, Dict{String, Any}} | ||
| 916 | if extensions !== nothing && haskey(extensions, pkg.name) && uuid !== nothing && uuid5(UUID(uuid), pkg.name) == pkg.uuid | ||
| 917 | parent_path = locate_package(PkgId(UUID(uuid), name)) | ||
| 918 | if parent_path === nothing | ||
| 919 | error("failed to find source of parent package: \"$name\"") | ||
| 920 | end | ||
| 921 | p = normpath(dirname(parent_path), "..") | ||
| 922 | return find_ext_path(p, pkg.name) | ||
| 923 | end | ||
| 924 | end | ||
| 925 | end | ||
| 926 | return nothing | ||
| 927 | end | ||
| 928 | |||
| 929 | function explicit_manifest_entry_path(manifest_file::String, pkg::PkgId, entry::Dict{String,Any}) | ||
| 930 | path = get(entry, "path", nothing)::Union{Nothing, String} | ||
| 931 | if path !== nothing | ||
| 932 | path = normpath(abspath(dirname(manifest_file), path)) | ||
| 933 | return path | ||
| 934 | end | ||
| 935 | hash = get(entry, "git-tree-sha1", nothing)::Union{Nothing, String} | ||
| 936 | if hash === nothing | ||
| 937 | mbypath = manifest_uuid_path(Sys.STDLIB, pkg) | ||
| 938 | if mbypath isa String | ||
| 939 | return entry_path(mbypath, pkg.name) | ||
| 940 | end | ||
| 941 | return nothing | ||
| 942 | end | ||
| 943 | hash = SHA1(hash) | ||
| 944 | # Keep the 4 since it used to be the default | ||
| 945 | uuid = pkg.uuid::UUID # checked within `explicit_manifest_uuid_path` | ||
| 946 | for slug in (version_slug(uuid, hash), version_slug(uuid, hash, 4)) | ||
| 947 | for depot in DEPOT_PATH | ||
| 948 | path = joinpath(depot, "packages", pkg.name, slug) | ||
| 949 | ispath(path) && return abspath(path) | ||
| 950 | end | ||
| 951 | end | ||
| 952 | # no depot contains the package, return missing to stop looking | ||
| 953 | return missing | ||
| 954 | end | ||
| 955 | |||
| 956 | ## implicit project & manifest API ## | ||
| 957 | |||
| 958 | # look for an entry point for `name` from a top-level package (no environment) | ||
| 959 | # otherwise return `nothing` to indicate the caller should keep searching | ||
| 960 | function implicit_project_deps_get(dir::String, name::String)::Union{Nothing,PkgId} | ||
| 961 | path, project_file = entry_point_and_project_file(dir, name) | ||
| 962 | if project_file === nothing | ||
| 963 | path === nothing && return nothing | ||
| 964 | return PkgId(name) | ||
| 965 | end | ||
| 966 | proj = project_file_name_uuid(project_file, name) | ||
| 967 | proj.name == name || return nothing | ||
| 968 | return proj | ||
| 969 | end | ||
| 970 | |||
| 971 | # look for an entry-point for `name`, check that UUID matches | ||
| 972 | # if there's a project file, look up `name` in its deps and return that | ||
| 973 | # otherwise return `nothing` to indicate the caller should keep searching | ||
| 974 | function implicit_manifest_deps_get(dir::String, where::PkgId, name::String)::Union{Nothing,PkgId} | ||
| 975 | @assert where.uuid !== nothing | ||
| 976 | project_file = entry_point_and_project_file(dir, where.name)[2] | ||
| 977 | project_file === nothing && return nothing # a project file is mandatory for a package with a uuid | ||
| 978 | proj = project_file_name_uuid(project_file, where.name) | ||
| 979 | proj == where || return nothing # verify that this is the correct project file | ||
| 980 | # this is the correct project, so stop searching here | ||
| 981 | pkg_uuid = explicit_project_deps_get(project_file, name) | ||
| 982 | return PkgId(pkg_uuid, name) | ||
| 983 | end | ||
| 984 | |||
| 985 | # look for an entry-point for `pkg` and return its path if UUID matches | ||
| 986 | function implicit_manifest_uuid_path(dir::String, pkg::PkgId)::Union{Nothing,String} | ||
| 987 | path, project_file = entry_point_and_project_file(dir, pkg.name) | ||
| 988 | if project_file === nothing | ||
| 989 | pkg.uuid === nothing || return nothing | ||
| 990 | return path | ||
| 991 | end | ||
| 992 | proj = project_file_name_uuid(project_file, pkg.name) | ||
| 993 | proj == pkg || return nothing | ||
| 994 | return path | ||
| 995 | end | ||
| 996 | |||
| 997 | ## other code loading functionality ## | ||
| 998 | |||
| 999 | function find_source_file(path::AbstractString) | ||
| 1000 | (isabspath(path) || isfile(path)) && return path | ||
| 1001 | base_path = joinpath(Sys.BINDIR, DATAROOTDIR, "julia", "base", path) | ||
| 1002 | return isfile(base_path) ? normpath(base_path) : nothing | ||
| 1003 | end | ||
| 1004 | |||
| 1005 | cache_file_entry(pkg::PkgId) = joinpath( | ||
| 1006 | "compiled", | ||
| 1007 | "v$(VERSION.major).$(VERSION.minor)", | ||
| 1008 | pkg.uuid === nothing ? "" : pkg.name), | ||
| 1009 | pkg.uuid === nothing ? pkg.name : package_slug(pkg.uuid) | ||
| 1010 | |||
| 1011 | function find_all_in_cache_path(pkg::PkgId) | ||
| 1012 | paths = String[] | ||
| 1013 | entrypath, entryfile = cache_file_entry(pkg) | ||
| 1014 | for path in joinpath.(DEPOT_PATH, entrypath) | ||
| 1015 | isdir(path) || continue | ||
| 1016 | for file in readdir(path, sort = false) # no sort given we sort later | ||
| 1017 | if !((pkg.uuid === nothing && file == entryfile * ".ji") || | ||
| 1018 | (pkg.uuid !== nothing && startswith(file, entryfile * "_") && | ||
| 1019 | endswith(file, ".ji"))) | ||
| 1020 | continue | ||
| 1021 | end | ||
| 1022 | filepath = joinpath(path, file) | ||
| 1023 | isfile_casesensitive(filepath) && push!(paths, filepath) | ||
| 1024 | end | ||
| 1025 | end | ||
| 1026 | if length(paths) > 1 | ||
| 1027 | # allocating the sort vector is less expensive than using sort!(.. by=mtime), which would | ||
| 1028 | # call the relatively slow mtime multiple times per path | ||
| 1029 | p = sortperm(mtime.(paths), rev = true) | ||
| 1030 | return paths[p] | ||
| 1031 | else | ||
| 1032 | return paths | ||
| 1033 | end | ||
| 1034 | end | ||
| 1035 | |||
| 1036 | ocachefile_from_cachefile(cachefile) = string(chopsuffix(cachefile, ".ji"), ".", Base.Libc.dlext) | ||
| 1037 | cachefile_from_ocachefile(cachefile) = string(chopsuffix(cachefile, ".$(Base.Libc.dlext)"), ".ji") | ||
| 1038 | |||
| 1039 | |||
| 1040 | # use an Int counter so that nested @time_imports calls all remain open | ||
| 1041 | const TIMING_IMPORTS = Threads.Atomic{Int}(0) | ||
| 1042 | |||
| 1043 | # these return either the array of modules loaded from the path / content given | ||
| 1044 | # or an Exception that describes why it couldn't be loaded | ||
| 1045 | # and it reconnects the Base.Docs.META | ||
| 1046 | function _include_from_serialized(pkg::PkgId, path::String, ocachepath::Union{Nothing, String}, depmods::Vector{Any}) | ||
| 1047 | assert_havelock(require_lock) | ||
| 1048 | timing_imports = TIMING_IMPORTS[] > 0 | ||
| 1049 | try | ||
| 1050 | if timing_imports | ||
| 1051 | t_before = time_ns() | ||
| 1052 | cumulative_compile_timing(true) | ||
| 1053 | t_comp_before = cumulative_compile_time_ns() | ||
| 1054 | end | ||
| 1055 | |||
| 1056 | if ocachepath !== nothing | ||
| 1057 | @debug "Loading object cache file $ocachepath for $pkg" | ||
| 1058 | sv = ccall(:jl_restore_package_image_from_file, Any, (Cstring, Any, Cint, Cstring), ocachepath, depmods, false, pkg.name) | ||
| 1059 | else | ||
| 1060 | @debug "Loading cache file $path for $pkg" | ||
| 1061 | sv = ccall(:jl_restore_incremental, Any, (Cstring, Any, Cint, Cstring), path, depmods, false, pkg.name) | ||
| 1062 | end | ||
| 1063 | if isa(sv, Exception) | ||
| 1064 | return sv | ||
| 1065 | end | ||
| 1066 | |||
| 1067 | restored = register_restored_modules(sv, pkg, path) | ||
| 1068 | |||
| 1069 | for M in restored | ||
| 1070 | M = M::Module | ||
| 1071 | if parentmodule(M) === M && PkgId(M) == pkg | ||
| 1072 | if timing_imports | ||
| 1073 | elapsed = round((time_ns() - t_before) / 1e6, digits = 1) | ||
| 1074 | comp_time, recomp_time = cumulative_compile_time_ns() .- t_comp_before | ||
| 1075 | print(lpad(elapsed, 9), " ms ") | ||
| 1076 | parentid = get(EXT_PRIMED, pkg, nothing) | ||
| 1077 | if parentid !== nothing | ||
| 1078 | print(parentid.name, " → ") | ||
| 1079 | end | ||
| 1080 | print(pkg.name) | ||
| 1081 | if comp_time > 0 | ||
| 1082 | printstyled(" ", Ryu.writefixed(Float64(100 * comp_time / (elapsed * 1e6)), 2), "% compilation time", color = Base.info_color()) | ||
| 1083 | end | ||
| 1084 | if recomp_time > 0 | ||
| 1085 | perc = Float64(100 * recomp_time / comp_time) | ||
| 1086 | printstyled(" (", perc < 1 ? "<1" : Ryu.writefixed(perc, 0), "% recompilation)", color = Base.warn_color()) | ||
| 1087 | end | ||
| 1088 | println() | ||
| 1089 | end | ||
| 1090 | return M | ||
| 1091 | end | ||
| 1092 | end | ||
| 1093 | return ErrorException("Required dependency $pkg failed to load from a cache file.") | ||
| 1094 | |||
| 1095 | finally | ||
| 1096 | timing_imports && cumulative_compile_timing(false) | ||
| 1097 | end | ||
| 1098 | end | ||
| 1099 | |||
| 1100 | function register_restored_modules(sv::SimpleVector, pkg::PkgId, path::String) | ||
| 1101 | # This function is also used by PkgCacheInspector.jl | ||
| 1102 | restored = sv[1]::Vector{Any} | ||
| 1103 | for M in restored | ||
| 1104 | M = M::Module | ||
| 1105 | if isdefined(M, Base.Docs.META) && getfield(M, Base.Docs.META) !== nothing | ||
| 1106 | push!(Base.Docs.modules, M) | ||
| 1107 | end | ||
| 1108 | if parentmodule(M) === M | ||
| 1109 | register_root_module(M) | ||
| 1110 | end | ||
| 1111 | end | ||
| 1112 | |||
| 1113 | # Register this cache path now - If Requires.jl is loaded, Revise may end | ||
| 1114 | # up looking at the cache path during the init callback. | ||
| 1115 | get!(PkgOrigin, pkgorigins, pkg).cachepath = path | ||
| 1116 | |||
| 1117 | inits = sv[2]::Vector{Any} | ||
| 1118 | if !isempty(inits) | ||
| 1119 | unlock(require_lock) # temporarily _unlock_ during these callbacks | ||
| 1120 | try | ||
| 1121 | for (i, mod) in pairs(inits) | ||
| 1122 | run_module_init(mod, i) | ||
| 1123 | end | ||
| 1124 | finally | ||
| 1125 | lock(require_lock) | ||
| 1126 | end | ||
| 1127 | end | ||
| 1128 | return restored | ||
| 1129 | end | ||
| 1130 | |||
| 1131 | function run_module_init(mod::Module, i::Int=1) | ||
| 1132 | # `i` informs ordering for the `@time_imports` report formatting | ||
| 1133 | if TIMING_IMPORTS[] == 0 | ||
| 1134 | ccall(:jl_init_restored_module, Cvoid, (Any,), mod) | ||
| 1135 | else | ||
| 1136 | if isdefined(mod, :__init__) | ||
| 1137 | connector = i > 1 ? "├" : "┌" | ||
| 1138 | printstyled(" $connector ", color = :light_black) | ||
| 1139 | |||
| 1140 | elapsedtime = time_ns() | ||
| 1141 | cumulative_compile_timing(true) | ||
| 1142 | compile_elapsedtimes = cumulative_compile_time_ns() | ||
| 1143 | |||
| 1144 | ccall(:jl_init_restored_module, Cvoid, (Any,), mod) | ||
| 1145 | |||
| 1146 | elapsedtime = (time_ns() - elapsedtime) / 1e6 | ||
| 1147 | cumulative_compile_timing(false); | ||
| 1148 | comp_time, recomp_time = (cumulative_compile_time_ns() .- compile_elapsedtimes) ./ 1e6 | ||
| 1149 | |||
| 1150 | print(round(elapsedtime, digits=1), " ms $mod.__init__() ") | ||
| 1151 | if comp_time > 0 | ||
| 1152 | printstyled(Ryu.writefixed(Float64(100 * comp_time / elapsedtime), 2), "% compilation time", color = Base.info_color()) | ||
| 1153 | end | ||
| 1154 | if recomp_time > 0 | ||
| 1155 | perc = Float64(100 * recomp_time / comp_time) | ||
| 1156 | printstyled(" (", perc < 1 ? "<1" : Ryu.writefixed(perc, 0), "% recompilation)", color = Base.warn_color()) | ||
| 1157 | end | ||
| 1158 | println() | ||
| 1159 | end | ||
| 1160 | end | ||
| 1161 | end | ||
| 1162 | |||
| 1163 | function run_package_callbacks(modkey::PkgId) | ||
| 1164 | run_extension_callbacks(modkey) | ||
| 1165 | assert_havelock(require_lock) | ||
| 1166 | unlock(require_lock) | ||
| 1167 | try | ||
| 1168 | for callback in package_callbacks | ||
| 1169 | invokelatest(callback, modkey) | ||
| 1170 | end | ||
| 1171 | catch | ||
| 1172 | # Try to continue loading if a callback errors | ||
| 1173 | errs = current_exceptions() | ||
| 1174 | @error "Error during package callback" exception=errs | ||
| 1175 | finally | ||
| 1176 | lock(require_lock) | ||
| 1177 | end | ||
| 1178 | nothing | ||
| 1179 | end | ||
| 1180 | |||
| 1181 | |||
| 1182 | ############## | ||
| 1183 | # Extensions # | ||
| 1184 | ############## | ||
| 1185 | |||
| 1186 | mutable struct ExtensionId | ||
| 1187 | const id::PkgId | ||
| 1188 | const parentid::PkgId # just need the name, for printing | ||
| 1189 | ntriggers::Int # how many more packages must be defined until this is loaded | ||
| 1190 | end | ||
| 1191 | |||
| 1192 | const EXT_PRIMED = Dict{PkgId, PkgId}() # Extension -> Parent | ||
| 1193 | const EXT_DORMITORY = Dict{PkgId,Vector{ExtensionId}}() # Trigger -> Extensions that can be triggered by it | ||
| 1194 | const EXT_DORMITORY_FAILED = ExtensionId[] | ||
| 1195 | |||
| 1196 | function insert_extension_triggers(pkg::PkgId) | ||
| 1197 | pkg.uuid === nothing && return | ||
| 1198 | path_env_loc = locate_package_env(pkg) | ||
| 1199 | path_env_loc === nothing && return | ||
| 1200 | path, env_loc = path_env_loc | ||
| 1201 | if path === nothing || env_loc === nothing | ||
| 1202 | return | ||
| 1203 | end | ||
| 1204 | insert_extension_triggers(env_loc, pkg) | ||
| 1205 | end | ||
| 1206 | |||
| 1207 | function insert_extension_triggers(env::String, pkg::PkgId)::Union{Nothing,Missing} | ||
| 1208 | project_file = env_project_file(env) | ||
| 1209 | if project_file isa String | ||
| 1210 | # Look in project for extensions to insert | ||
| 1211 | proj_pkg = project_file_name_uuid(project_file, pkg.name) | ||
| 1212 | if pkg == proj_pkg | ||
| 1213 | d_proj = parsed_toml(project_file) | ||
| 1214 | weakdeps = get(d_proj, "weakdeps", nothing)::Union{Nothing, Vector{String}, Dict{String,Any}} | ||
| 1215 | extensions = get(d_proj, "extensions", nothing)::Union{Nothing, Dict{String, Any}} | ||
| 1216 | extensions === nothing && return | ||
| 1217 | weakdeps === nothing && return | ||
| 1218 | if weakdeps isa Dict{String, Any} | ||
| 1219 | return _insert_extension_triggers(pkg, extensions, weakdeps) | ||
| 1220 | end | ||
| 1221 | end | ||
| 1222 | |||
| 1223 | # Now look in manifest | ||
| 1224 | manifest_file = project_file_manifest_path(project_file) | ||
| 1225 | manifest_file === nothing && return | ||
| 1226 | d = get_deps(parsed_toml(manifest_file)) | ||
| 1227 | for (dep_name, entries) in d | ||
| 1228 | entries::Vector{Any} | ||
| 1229 | for entry in entries | ||
| 1230 | entry = entry::Dict{String, Any} | ||
| 1231 | uuid = get(entry, "uuid", nothing)::Union{String, Nothing} | ||
| 1232 | uuid === nothing && continue | ||
| 1233 | if UUID(uuid) == pkg.uuid | ||
| 1234 | weakdeps = get(entry, "weakdeps", nothing)::Union{Nothing, Vector{String}, Dict{String,Any}} | ||
| 1235 | extensions = get(entry, "extensions", nothing)::Union{Nothing, Dict{String, Any}} | ||
| 1236 | extensions === nothing && return | ||
| 1237 | weakdeps === nothing && return | ||
| 1238 | if weakdeps isa Dict{String, Any} | ||
| 1239 | return _insert_extension_triggers(pkg, extensions, weakdeps) | ||
| 1240 | end | ||
| 1241 | |||
| 1242 | d_weakdeps = Dict{String, Any}() | ||
| 1243 | for (dep_name, entries) in d | ||
| 1244 | dep_name in weakdeps || continue | ||
| 1245 | entries::Vector{Any} | ||
| 1246 | if length(entries) != 1 | ||
| 1247 | error("expected a single entry for $(repr(dep_name)) in $(repr(project_file))") | ||
| 1248 | end | ||
| 1249 | entry = first(entries)::Dict{String, Any} | ||
| 1250 | uuid = entry["uuid"]::String | ||
| 1251 | d_weakdeps[dep_name] = uuid | ||
| 1252 | end | ||
| 1253 | @assert length(d_weakdeps) == length(weakdeps) | ||
| 1254 | return _insert_extension_triggers(pkg, extensions, d_weakdeps) | ||
| 1255 | end | ||
| 1256 | end | ||
| 1257 | end | ||
| 1258 | end | ||
| 1259 | return nothing | ||
| 1260 | end | ||
| 1261 | |||
| 1262 | function _insert_extension_triggers(parent::PkgId, extensions::Dict{String, Any}, weakdeps::Dict{String, Any}) | ||
| 1263 | for (ext, triggers) in extensions | ||
| 1264 | triggers = triggers::Union{String, Vector{String}} | ||
| 1265 | triggers isa String && (triggers = [triggers]) | ||
| 1266 | id = PkgId(uuid5(parent.uuid, ext), ext) | ||
| 1267 | if id in keys(EXT_PRIMED) || haskey(Base.loaded_modules, id) | ||
| 1268 | continue # extension is already primed or loaded, don't add it again | ||
| 1269 | end | ||
| 1270 | EXT_PRIMED[id] = parent | ||
| 1271 | gid = ExtensionId(id, parent, 1 + length(triggers)) | ||
| 1272 | trigger1 = get!(Vector{ExtensionId}, EXT_DORMITORY, parent) | ||
| 1273 | push!(trigger1, gid) | ||
| 1274 | for trigger in triggers | ||
| 1275 | # TODO: Better error message if this lookup fails? | ||
| 1276 | uuid_trigger = UUID(weakdeps[trigger]::String) | ||
| 1277 | trigger_id = PkgId(uuid_trigger, trigger) | ||
| 1278 | if !haskey(Base.loaded_modules, trigger_id) || haskey(package_locks, trigger_id) | ||
| 1279 | trigger1 = get!(Vector{ExtensionId}, EXT_DORMITORY, trigger_id) | ||
| 1280 | push!(trigger1, gid) | ||
| 1281 | else | ||
| 1282 | gid.ntriggers -= 1 | ||
| 1283 | end | ||
| 1284 | end | ||
| 1285 | end | ||
| 1286 | end | ||
| 1287 | |||
| 1288 | loading_extension::Bool = false | ||
| 1289 | precompiling_extension::Bool = false | ||
| 1290 | function run_extension_callbacks(extid::ExtensionId) | ||
| 1291 | assert_havelock(require_lock) | ||
| 1292 | succeeded = try | ||
| 1293 | # Used by Distributed to now load extensions in the package callback | ||
| 1294 | global loading_extension = true | ||
| 1295 | _require_prelocked(extid.id) | ||
| 1296 | @debug "Extension $(extid.id.name) of $(extid.parentid.name) loaded" | ||
| 1297 | true | ||
| 1298 | catch | ||
| 1299 | # Try to continue loading if loading an extension errors | ||
| 1300 | errs = current_exceptions() | ||
| 1301 | @error "Error during loading of extension $(extid.id.name) of $(extid.parentid.name), \ | ||
| 1302 | use `Base.retry_load_extensions()` to retry." exception=errs | ||
| 1303 | false | ||
| 1304 | finally | ||
| 1305 | global loading_extension = false | ||
| 1306 | end | ||
| 1307 | return succeeded | ||
| 1308 | end | ||
| 1309 | |||
| 1310 | function run_extension_callbacks(pkgid::PkgId) | ||
| 1311 | assert_havelock(require_lock) | ||
| 1312 | # take ownership of extids that depend on this pkgid | ||
| 1313 | extids = pop!(EXT_DORMITORY, pkgid, nothing) | ||
| 1314 | extids === nothing && return | ||
| 1315 | for extid in extids | ||
| 1316 | if extid.ntriggers > 0 | ||
| 1317 | # indicate pkgid is loaded | ||
| 1318 | extid.ntriggers -= 1 | ||
| 1319 | end | ||
| 1320 | if extid.ntriggers < 0 | ||
| 1321 | # indicate pkgid is loaded | ||
| 1322 | extid.ntriggers += 1 | ||
| 1323 | succeeded = false | ||
| 1324 | else | ||
| 1325 | succeeded = true | ||
| 1326 | end | ||
| 1327 | if extid.ntriggers == 0 | ||
| 1328 | # actually load extid, now that all dependencies are met, | ||
| 1329 | # and record the result | ||
| 1330 | succeeded = succeeded && run_extension_callbacks(extid) | ||
| 1331 | succeeded || push!(EXT_DORMITORY_FAILED, extid) | ||
| 1332 | end | ||
| 1333 | end | ||
| 1334 | return | ||
| 1335 | end | ||
| 1336 | |||
| 1337 | """ | ||
| 1338 | retry_load_extensions() | ||
| 1339 | |||
| 1340 | Loads all the (not yet loaded) extensions that have their extension-dependencies loaded. | ||
| 1341 | This is used in cases where the automatic loading of an extension failed | ||
| 1342 | due to some problem with the extension. Instead of restarting the Julia session, | ||
| 1343 | the extension can be fixed, and this function run. | ||
| 1344 | """ | ||
| 1345 | function retry_load_extensions() | ||
| 1346 | @lock require_lock begin | ||
| 1347 | # this copy is desired since run_extension_callbacks will release this lock | ||
| 1348 | # so this can still mutate the list to drop successful ones | ||
| 1349 | failed = copy(EXT_DORMITORY_FAILED) | ||
| 1350 | empty!(EXT_DORMITORY_FAILED) | ||
| 1351 | filter!(failed) do extid | ||
| 1352 | return !run_extension_callbacks(extid) | ||
| 1353 | end | ||
| 1354 | prepend!(EXT_DORMITORY_FAILED, failed) | ||
| 1355 | end | ||
| 1356 | return | ||
| 1357 | end | ||
| 1358 | |||
| 1359 | """ | ||
| 1360 | get_extension(parent::Module, extension::Symbol) | ||
| 1361 | |||
| 1362 | Return the module for `extension` of `parent` or return `nothing` if the extension is not loaded. | ||
| 1363 | """ | ||
| 1364 | get_extension(parent::Module, ext::Symbol) = get_extension(PkgId(parent), ext) | ||
| 1365 | function get_extension(parentid::PkgId, ext::Symbol) | ||
| 1366 | parentid.uuid === nothing && return nothing | ||
| 1367 | extid = PkgId(uuid5(parentid.uuid, string(ext)), string(ext)) | ||
| 1368 | return get(loaded_modules, extid, nothing) | ||
| 1369 | end | ||
| 1370 | |||
| 1371 | # End extensions | ||
| 1372 | |||
| 1373 | # should sync with the types of arguments of `stale_cachefile` | ||
| 1374 | const StaleCacheKey = Tuple{Base.PkgId, UInt128, String, String} | ||
| 1375 | |||
| 1376 | """ | ||
| 1377 | Base.isprecompiled(pkg::PkgId; ignore_loaded::Bool=false) | ||
| 1378 | |||
| 1379 | Returns whether a given PkgId within the active project is precompiled. | ||
| 1380 | |||
| 1381 | By default this check observes the same approach that code loading takes | ||
| 1382 | with respect to when different versions of dependencies are currently loaded | ||
| 1383 | to that which is expected. To ignore loaded modules and answer as if in a | ||
| 1384 | fresh julia session specify `ignore_loaded=true`. | ||
| 1385 | |||
| 1386 | !!! compat "Julia 1.10" | ||
| 1387 | This function requires at least Julia 1.10. | ||
| 1388 | """ | ||
| 1389 | function isprecompiled(pkg::PkgId; | ||
| 1390 | ignore_loaded::Bool=false, | ||
| 1391 | stale_cache::Dict{StaleCacheKey,Bool}=Dict{StaleCacheKey, Bool}(), | ||
| 1392 | cachepaths::Vector{String}=Base.find_all_in_cache_path(pkg), | ||
| 1393 | sourcepath::Union{String,Nothing}=Base.locate_package(pkg) | ||
| 1394 | ) | ||
| 1395 | isnothing(sourcepath) && error("Cannot locate source for $(repr(pkg))") | ||
| 1396 | for path_to_try in cachepaths | ||
| 1397 | staledeps = stale_cachefile(sourcepath, path_to_try, ignore_loaded = true) | ||
| 1398 | if staledeps === true | ||
| 1399 | continue | ||
| 1400 | end | ||
| 1401 | staledeps, _ = staledeps::Tuple{Vector{Any}, Union{Nothing, String}} | ||
| 1402 | # finish checking staledeps module graph | ||
| 1403 | for i in 1:length(staledeps) | ||
| 1404 | dep = staledeps[i] | ||
| 1405 | dep isa Module && continue | ||
| 1406 | modpath, modkey, modbuild_id = dep::Tuple{String, PkgId, UInt128} | ||
| 1407 | modpaths = find_all_in_cache_path(modkey) | ||
| 1408 | for modpath_to_try in modpaths::Vector{String} | ||
| 1409 | stale_cache_key = (modkey, modbuild_id, modpath, modpath_to_try)::StaleCacheKey | ||
| 1410 | if get!(() -> stale_cachefile(stale_cache_key...; ignore_loaded) === true, | ||
| 1411 | stale_cache, stale_cache_key) | ||
| 1412 | continue | ||
| 1413 | end | ||
| 1414 | @goto check_next_dep | ||
| 1415 | end | ||
| 1416 | @goto check_next_path | ||
| 1417 | @label check_next_dep | ||
| 1418 | end | ||
| 1419 | try | ||
| 1420 | # update timestamp of precompilation file so that it is the first to be tried by code loading | ||
| 1421 | touch(path_to_try) | ||
| 1422 | catch ex | ||
| 1423 | # file might be read-only and then we fail to update timestamp, which is fine | ||
| 1424 | ex isa IOError || rethrow() | ||
| 1425 | end | ||
| 1426 | return true | ||
| 1427 | @label check_next_path | ||
| 1428 | end | ||
| 1429 | return false | ||
| 1430 | end | ||
| 1431 | |||
| 1432 | # loads a precompile cache file, after checking stale_cachefile tests | ||
| 1433 | function _tryrequire_from_serialized(modkey::PkgId, build_id::UInt128) | ||
| 1434 | assert_havelock(require_lock) | ||
| 1435 | loaded = nothing | ||
| 1436 | if root_module_exists(modkey) | ||
| 1437 | loaded = root_module(modkey) | ||
| 1438 | else | ||
| 1439 | loaded = start_loading(modkey) | ||
| 1440 | if loaded === nothing | ||
| 1441 | try | ||
| 1442 | modpath = locate_package(modkey) | ||
| 1443 | modpath === nothing && return nothing | ||
| 1444 | set_pkgorigin_version_path(modkey, String(modpath)) | ||
| 1445 | loaded = _require_search_from_serialized(modkey, String(modpath), build_id) | ||
| 1446 | finally | ||
| 1447 | end_loading(modkey, loaded) | ||
| 1448 | end | ||
| 1449 | if loaded isa Module | ||
| 1450 | insert_extension_triggers(modkey) | ||
| 1451 | run_package_callbacks(modkey) | ||
| 1452 | end | ||
| 1453 | end | ||
| 1454 | end | ||
| 1455 | if !(loaded isa Module) || PkgId(loaded) != modkey | ||
| 1456 | return ErrorException("Required dependency $modkey failed to load from a cache file.") | ||
| 1457 | end | ||
| 1458 | return loaded | ||
| 1459 | end | ||
| 1460 | |||
| 1461 | # loads a precompile cache file, ignoring stale_cachefile tests | ||
| 1462 | # assuming all depmods are already loaded and everything is valid | ||
| 1463 | function _tryrequire_from_serialized(modkey::PkgId, path::String, ocachepath::Union{Nothing, String}, sourcepath::String, depmods::Vector{Any}) | ||
| 1464 | assert_havelock(require_lock) | ||
| 1465 | loaded = nothing | ||
| 1466 | if root_module_exists(modkey) | ||
| 1467 | loaded = root_module(modkey) | ||
| 1468 | else | ||
| 1469 | loaded = start_loading(modkey) | ||
| 1470 | if loaded === nothing | ||
| 1471 | try | ||
| 1472 | for i in 1:length(depmods) | ||
| 1473 | dep = depmods[i] | ||
| 1474 | dep isa Module && continue | ||
| 1475 | _, depkey, depbuild_id = dep::Tuple{String, PkgId, UInt128} | ||
| 1476 | @assert root_module_exists(depkey) | ||
| 1477 | dep = root_module(depkey) | ||
| 1478 | depmods[i] = dep | ||
| 1479 | end | ||
| 1480 | set_pkgorigin_version_path(modkey, sourcepath) | ||
| 1481 | loaded = _include_from_serialized(modkey, path, ocachepath, depmods) | ||
| 1482 | finally | ||
| 1483 | end_loading(modkey, loaded) | ||
| 1484 | end | ||
| 1485 | if loaded isa Module | ||
| 1486 | insert_extension_triggers(modkey) | ||
| 1487 | run_package_callbacks(modkey) | ||
| 1488 | end | ||
| 1489 | end | ||
| 1490 | end | ||
| 1491 | if !(loaded isa Module) || PkgId(loaded) != modkey | ||
| 1492 | return ErrorException("Required dependency $modkey failed to load from a cache file.") | ||
| 1493 | end | ||
| 1494 | return loaded | ||
| 1495 | end | ||
| 1496 | |||
| 1497 | # loads a precompile cache file, ignoring stale_cachefile tests | ||
| 1498 | # load the best available (non-stale) version of all dependent modules first | ||
| 1499 | function _tryrequire_from_serialized(pkg::PkgId, path::String, ocachepath::Union{Nothing, String}) | ||
| 1500 | assert_havelock(require_lock) | ||
| 1501 | local depmodnames | ||
| 1502 | io = open(path, "r") | ||
| 1503 | try | ||
| 1504 | iszero(isvalid_cache_header(io)) && return ArgumentError("Invalid header in cache file $path.") | ||
| 1505 | _, _, depmodnames, _, _, _, clone_targets, _ = parse_cache_header(io) | ||
| 1506 | pkgimage = !isempty(clone_targets) | ||
| 1507 | if pkgimage | ||
| 1508 | ocachepath !== nothing || return ArgumentError("Expected ocachepath to be provided") | ||
| 1509 | isfile(ocachepath) || return ArgumentError("Ocachepath $ocachepath is not a file.") | ||
| 1510 | ocachepath == ocachefile_from_cachefile(path) || return ArgumentError("$ocachepath is not the expected ocachefile") | ||
| 1511 | # TODO: Check for valid clone_targets? | ||
| 1512 | isvalid_pkgimage_crc(io, ocachepath) || return ArgumentError("Invalid checksum in cache file $ocachepath.") | ||
| 1513 | else | ||
| 1514 | @assert ocachepath === nothing | ||
| 1515 | end | ||
| 1516 | isvalid_file_crc(io) || return ArgumentError("Invalid checksum in cache file $path.") | ||
| 1517 | finally | ||
| 1518 | close(io) | ||
| 1519 | end | ||
| 1520 | ndeps = length(depmodnames) | ||
| 1521 | depmods = Vector{Any}(undef, ndeps) | ||
| 1522 | for i in 1:ndeps | ||
| 1523 | modkey, build_id = depmodnames[i] | ||
| 1524 | dep = _tryrequire_from_serialized(modkey, build_id) | ||
| 1525 | if !isa(dep, Module) | ||
| 1526 | return dep | ||
| 1527 | end | ||
| 1528 | depmods[i] = dep | ||
| 1529 | end | ||
| 1530 | # then load the file | ||
| 1531 | return _include_from_serialized(pkg, path, ocachepath, depmods) | ||
| 1532 | end | ||
| 1533 | |||
| 1534 | # returns `nothing` if require found a precompile cache for this sourcepath, but couldn't load it | ||
| 1535 | # returns the set of modules restored if the cache load succeeded | ||
| 1536 | @constprop :none function _require_search_from_serialized(pkg::PkgId, sourcepath::String, build_id::UInt128) | ||
| 1537 | assert_havelock(require_lock) | ||
| 1538 | paths = find_all_in_cache_path(pkg) | ||
| 1539 | for path_to_try in paths::Vector{String} | ||
| 1540 | staledeps = stale_cachefile(pkg, build_id, sourcepath, path_to_try) | ||
| 1541 | if staledeps === true | ||
| 1542 | continue | ||
| 1543 | end | ||
| 1544 | staledeps, ocachefile = staledeps::Tuple{Vector{Any}, Union{Nothing, String}} | ||
| 1545 | # finish checking staledeps module graph | ||
| 1546 | for i in 1:length(staledeps) | ||
| 1547 | dep = staledeps[i] | ||
| 1548 | dep isa Module && continue | ||
| 1549 | modpath, modkey, modbuild_id = dep::Tuple{String, PkgId, UInt128} | ||
| 1550 | modpaths = find_all_in_cache_path(modkey) | ||
| 1551 | for modpath_to_try in modpaths | ||
| 1552 | modstaledeps = stale_cachefile(modkey, modbuild_id, modpath, modpath_to_try) | ||
| 1553 | if modstaledeps === true | ||
| 1554 | continue | ||
| 1555 | end | ||
| 1556 | modstaledeps, modocachepath = modstaledeps::Tuple{Vector{Any}, Union{Nothing, String}} | ||
| 1557 | staledeps[i] = (modpath, modkey, modpath_to_try, modstaledeps, modocachepath) | ||
| 1558 | @goto check_next_dep | ||
| 1559 | end | ||
| 1560 | @debug "Rejecting cache file $path_to_try because required dependency $modkey with build ID $(UUID(modbuild_id)) is missing from the cache." | ||
| 1561 | @goto check_next_path | ||
| 1562 | @label check_next_dep | ||
| 1563 | end | ||
| 1564 | try | ||
| 1565 | touch(path_to_try) # update timestamp of precompilation file | ||
| 1566 | catch ex # file might be read-only and then we fail to update timestamp, which is fine | ||
| 1567 | ex isa IOError || rethrow() | ||
| 1568 | end | ||
| 1569 | # finish loading module graph into staledeps | ||
| 1570 | for i in 1:length(staledeps) | ||
| 1571 | dep = staledeps[i] | ||
| 1572 | dep isa Module && continue | ||
| 1573 | modpath, modkey, modcachepath, modstaledeps, modocachepath = dep::Tuple{String, PkgId, String, Vector{Any}, Union{Nothing, String}} | ||
| 1574 | dep = _tryrequire_from_serialized(modkey, modcachepath, modocachepath, modpath, modstaledeps) | ||
| 1575 | if !isa(dep, Module) | ||
| 1576 | @debug "Rejecting cache file $path_to_try because required dependency $modkey failed to load from cache file for $modcachepath." exception=dep | ||
| 1577 | @goto check_next_path | ||
| 1578 | end | ||
| 1579 | staledeps[i] = dep | ||
| 1580 | end | ||
| 1581 | restored = _include_from_serialized(pkg, path_to_try, ocachefile, staledeps) | ||
| 1582 | isa(restored, Module) && return restored | ||
| 1583 | @debug "Deserialization checks failed while attempting to load cache from $path_to_try" exception=restored | ||
| 1584 | continue | ||
| 1585 | @label check_next_path | ||
| 1586 | end | ||
| 1587 | return nothing | ||
| 1588 | end | ||
| 1589 | |||
| 1590 | # to synchronize multiple tasks trying to import/using something | ||
| 1591 | const package_locks = Dict{PkgId,Pair{Task,Threads.Condition}}() | ||
| 1592 | |||
| 1593 | debug_loading_deadlocks::Bool = true # Enable a slightly more expensive, but more complete algorithm that can handle simultaneous tasks. | ||
| 1594 | # This only triggers if you have multiple tasks trying to load the same package at the same time, | ||
| 1595 | # so it is unlikely to make a difference normally. | ||
| 1596 | function start_loading(modkey::PkgId) | ||
| 1597 | # handle recursive calls to require | ||
| 1598 | assert_havelock(require_lock) | ||
| 1599 | loading = get(package_locks, modkey, nothing) | ||
| 1600 | if loading !== nothing | ||
| 1601 | # load already in progress for this module on the task | ||
| 1602 | task, cond = loading | ||
| 1603 | deps = String[modkey.name] | ||
| 1604 | pkgid = modkey | ||
| 1605 | assert_havelock(cond.lock) | ||
| 1606 | if debug_loading_deadlocks && current_task() !== task | ||
| 1607 | waiters = Dict{Task,Pair{Task,PkgId}}() # invert to track waiting tasks => loading tasks | ||
| 1608 | for each in package_locks | ||
| 1609 | cond2 = each[2][2] | ||
| 1610 | assert_havelock(cond2.lock) | ||
| 1611 | for waiting in cond2.waitq | ||
| 1612 | push!(waiters, waiting => (each[2][1] => each[1])) | ||
| 1613 | end | ||
| 1614 | end | ||
| 1615 | while true | ||
| 1616 | running = get(waiters, task, nothing) | ||
| 1617 | running === nothing && break | ||
| 1618 | task, pkgid = running | ||
| 1619 | push!(deps, pkgid.name) | ||
| 1620 | task === current_task() && break | ||
| 1621 | end | ||
| 1622 | end | ||
| 1623 | if current_task() === task | ||
| 1624 | others = String[modkey.name] # repeat this to emphasize the cycle here | ||
| 1625 | for each in package_locks # list the rest of the packages being loaded too | ||
| 1626 | if each[2][1] === task | ||
| 1627 | other = each[1].name | ||
| 1628 | other == modkey.name || other == pkgid.name || push!(others, other) | ||
| 1629 | end | ||
| 1630 | end | ||
| 1631 | msg = sprint(deps, others) do io, deps, others | ||
| 1632 | print(io, "deadlock detected in loading ") | ||
| 1633 | join(io, deps, " -> ") | ||
| 1634 | print(io, " -> ") | ||
| 1635 | join(io, others, " && ") | ||
| 1636 | end | ||
| 1637 | throw(ConcurrencyViolationError(msg)) | ||
| 1638 | end | ||
| 1639 | return wait(cond) | ||
| 1640 | end | ||
| 1641 | package_locks[modkey] = current_task() => Threads.Condition(require_lock) | ||
| 1642 | return | ||
| 1643 | end | ||
| 1644 | |||
| 1645 | function end_loading(modkey::PkgId, @nospecialize loaded) | ||
| 1646 | loading = pop!(package_locks, modkey) | ||
| 1647 | notify(loading[2], loaded, all=true) | ||
| 1648 | nothing | ||
| 1649 | end | ||
| 1650 | |||
| 1651 | # to notify downstream consumers that a module was successfully loaded | ||
| 1652 | # Callbacks take the form (mod::Base.PkgId) -> nothing. | ||
| 1653 | # WARNING: This is an experimental feature and might change later, without deprecation. | ||
| 1654 | const package_callbacks = Any[] | ||
| 1655 | # to notify downstream consumers that a file has been included into a particular module | ||
| 1656 | # Callbacks take the form (mod::Module, filename::String) -> nothing | ||
| 1657 | # WARNING: This is an experimental feature and might change later, without deprecation. | ||
| 1658 | const include_callbacks = Any[] | ||
| 1659 | |||
| 1660 | # used to optionally track dependencies when requiring a module: | ||
| 1661 | const _concrete_dependencies = Pair{PkgId,UInt128}[] # these dependency versions are "set in stone", and the process should try to avoid invalidating them | ||
| 1662 | const _require_dependencies = Any[] # a list of (mod, path, mtime) tuples that are the file dependencies of the module currently being precompiled | ||
| 1663 | const _track_dependencies = Ref(false) # set this to true to track the list of file dependencies | ||
| 1664 | function _include_dependency(mod::Module, _path::AbstractString) | ||
| 1665 | prev = source_path(nothing) | ||
| 1666 | if prev === nothing | ||
| 1667 | path = abspath(_path) | ||
| 1668 | else | ||
| 1669 | path = normpath(joinpath(dirname(prev), _path)) | ||
| 1670 | end | ||
| 1671 | if _track_dependencies[] | ||
| 1672 | @lock require_lock begin | ||
| 1673 | push!(_require_dependencies, (mod, path, mtime(path))) | ||
| 1674 | end | ||
| 1675 | end | ||
| 1676 | return path, prev | ||
| 1677 | end | ||
| 1678 | |||
| 1679 | """ | ||
| 1680 | include_dependency(path::AbstractString) | ||
| 1681 | |||
| 1682 | In a module, declare that the file, directory, or symbolic link specified by `path` | ||
| 1683 | (relative or absolute) is a dependency for precompilation; that is, the module will need | ||
| 1684 | to be recompiled if the modification time of `path` changes. | ||
| 1685 | |||
| 1686 | This is only needed if your module depends on a path that is not used via [`include`](@ref). It has | ||
| 1687 | no effect outside of compilation. | ||
| 1688 | """ | ||
| 1689 | function include_dependency(path::AbstractString) | ||
| 1690 | _include_dependency(Main, path) | ||
| 1691 | return nothing | ||
| 1692 | end | ||
| 1693 | |||
| 1694 | # we throw PrecompilableError when a module doesn't want to be precompiled | ||
| 1695 | import Core: PrecompilableError | ||
| 1696 | function show(io::IO, ex::PrecompilableError) | ||
| 1697 | print(io, "Declaring __precompile__(false) is not allowed in files that are being precompiled.") | ||
| 1698 | end | ||
| 1699 | precompilableerror(ex::PrecompilableError) = true | ||
| 1700 | precompilableerror(ex::WrappedException) = precompilableerror(ex.error) | ||
| 1701 | precompilableerror(@nospecialize ex) = false | ||
| 1702 | |||
| 1703 | # Call __precompile__(false) at the top of a tile prevent it from being precompiled (false) | ||
| 1704 | """ | ||
| 1705 | __precompile__(isprecompilable::Bool) | ||
| 1706 | |||
| 1707 | Specify whether the file calling this function is precompilable, defaulting to `true`. | ||
| 1708 | If a module or file is *not* safely precompilable, it should call `__precompile__(false)` in | ||
| 1709 | order to throw an error if Julia attempts to precompile it. | ||
| 1710 | """ | ||
| 1711 | @noinline function __precompile__(isprecompilable::Bool=true) | ||
| 1712 | if !isprecompilable && ccall(:jl_generating_output, Cint, ()) != 0 | ||
| 1713 | throw(PrecompilableError()) | ||
| 1714 | end | ||
| 1715 | nothing | ||
| 1716 | end | ||
| 1717 | |||
| 1718 | # require always works in Main scope and loads files from node 1 | ||
| 1719 | const toplevel_load = Ref(true) | ||
| 1720 | |||
| 1721 | const _require_world_age = Ref{UInt}(typemax(UInt)) | ||
| 1722 | |||
| 1723 | """ | ||
| 1724 | require(into::Module, module::Symbol) | ||
| 1725 | |||
| 1726 | This function is part of the implementation of [`using`](@ref) / [`import`](@ref), if a module is not | ||
| 1727 | already defined in `Main`. It can also be called directly to force reloading a module, | ||
| 1728 | regardless of whether it has been loaded before (for example, when interactively developing | ||
| 1729 | libraries). | ||
| 1730 | |||
| 1731 | Loads a source file, in the context of the `Main` module, on every active node, searching | ||
| 1732 | standard locations for files. `require` is considered a top-level operation, so it sets the | ||
| 1733 | current `include` path but does not use it to search for files (see help for [`include`](@ref)). | ||
| 1734 | This function is typically used to load library code, and is implicitly called by `using` to | ||
| 1735 | load packages. | ||
| 1736 | |||
| 1737 | When searching for files, `require` first looks for package code in the global array | ||
| 1738 | [`LOAD_PATH`](@ref). `require` is case-sensitive on all platforms, including those with | ||
| 1739 | case-insensitive filesystems like macOS and Windows. | ||
| 1740 | |||
| 1741 | For more details regarding code loading, see the manual sections on [modules](@ref modules) and | ||
| 1742 | [parallel computing](@ref code-availability). | ||
| 1743 | """ | ||
| 1744 | function require(into::Module, mod::Symbol) | ||
| 1745 | if _require_world_age[] != typemax(UInt) | ||
| 1746 | Base.invoke_in_world(_require_world_age[], __require, into, mod) | ||
| 1747 | else | ||
| 1748 | @invokelatest __require(into, mod) | ||
| 1749 | end | ||
| 1750 | end | ||
| 1751 | |||
| 1752 | function __require(into::Module, mod::Symbol) | ||
| 1753 | @lock require_lock begin | ||
| 1754 | LOADING_CACHE[] = LoadingCache() | ||
| 1755 | try | ||
| 1756 | uuidkey_env = identify_package_env(into, String(mod)) | ||
| 1757 | # Core.println("require($(PkgId(into)), $mod) -> $uuidkey_env") | ||
| 1758 | if uuidkey_env === nothing | ||
| 1759 | where = PkgId(into) | ||
| 1760 | if where.uuid === nothing | ||
| 1761 | hint, dots = begin | ||
| 1762 | if isdefined(into, mod) && getfield(into, mod) isa Module | ||
| 1763 | true, "." | ||
| 1764 | elseif isdefined(parentmodule(into), mod) && getfield(parentmodule(into), mod) isa Module | ||
| 1765 | true, ".." | ||
| 1766 | else | ||
| 1767 | false, "" | ||
| 1768 | end | ||
| 1769 | end | ||
| 1770 | hint_message = hint ? ", maybe you meant `import/using $(dots)$(mod)`" : "" | ||
| 1771 | start_sentence = hint ? "Otherwise, run" : "Run" | ||
| 1772 | throw(ArgumentError(""" | ||
| 1773 | Package $mod not found in current path$hint_message. | ||
| 1774 | - $start_sentence `import Pkg; Pkg.add($(repr(String(mod))))` to install the $mod package.""")) | ||
| 1775 | else | ||
| 1776 | throw(ArgumentError(""" | ||
| 1777 | Package $(where.name) does not have $mod in its dependencies: | ||
| 1778 | - You may have a partially installed environment. Try `Pkg.instantiate()` | ||
| 1779 | to ensure all packages in the environment are installed. | ||
| 1780 | - Or, if you have $(where.name) checked out for development and have | ||
| 1781 | added $mod as a dependency but haven't updated your primary | ||
| 1782 | environment's manifest file, try `Pkg.resolve()`. | ||
| 1783 | - Otherwise you may need to report an issue with $(where.name)""")) | ||
| 1784 | end | ||
| 1785 | end | ||
| 1786 | uuidkey, env = uuidkey_env | ||
| 1787 | if _track_dependencies[] | ||
| 1788 | push!(_require_dependencies, (into, binpack(uuidkey), 0.0)) | ||
| 1789 | end | ||
| 1790 | return _require_prelocked(uuidkey, env) | ||
| 1791 | finally | ||
| 1792 | LOADING_CACHE[] = nothing | ||
| 1793 | end | ||
| 1794 | end | ||
| 1795 | end | ||
| 1796 | |||
| 1797 | require(uuidkey::PkgId) = @lock require_lock _require_prelocked(uuidkey) | ||
| 1798 | |||
| 1799 | const REPL_PKGID = PkgId(UUID("3fa0cd96-eef1-5676-8a61-b3b8758bbffb"), "REPL") | ||
| 1800 | |||
| 1801 | function _require_prelocked(uuidkey::PkgId, env=nothing) | ||
| 1802 | if _require_world_age[] != typemax(UInt) | ||
| 1803 | Base.invoke_in_world(_require_world_age[], __require_prelocked, uuidkey, env) | ||
| 1804 | else | ||
| 1805 | @invokelatest __require_prelocked(uuidkey, env) | ||
| 1806 | end | ||
| 1807 | end | ||
| 1808 | |||
| 1809 | function __require_prelocked(uuidkey::PkgId, env=nothing) | ||
| 1810 | assert_havelock(require_lock) | ||
| 1811 | if !root_module_exists(uuidkey) | ||
| 1812 | newm = _require(uuidkey, env) | ||
| 1813 | if newm === nothing | ||
| 1814 | error("package `$(uuidkey.name)` did not define the expected \ | ||
| 1815 | module `$(uuidkey.name)`, check for typos in package module name") | ||
| 1816 | end | ||
| 1817 | insert_extension_triggers(uuidkey) | ||
| 1818 | # After successfully loading, notify downstream consumers | ||
| 1819 | run_package_callbacks(uuidkey) | ||
| 1820 | if uuidkey == REPL_PKGID | ||
| 1821 | REPL_MODULE_REF[] = newm | ||
| 1822 | end | ||
| 1823 | else | ||
| 1824 | newm = root_module(uuidkey) | ||
| 1825 | end | ||
| 1826 | return newm | ||
| 1827 | end | ||
| 1828 | |||
| 1829 | mutable struct PkgOrigin | ||
| 1830 | path::Union{String,Nothing} | ||
| 1831 | cachepath::Union{String,Nothing} | ||
| 1832 | version::Union{VersionNumber,Nothing} | ||
| 1833 | end | ||
| 1834 | PkgOrigin() = PkgOrigin(nothing, nothing, nothing) | ||
| 1835 | const pkgorigins = Dict{PkgId,PkgOrigin}() | ||
| 1836 | |||
| 1837 | const loaded_modules = Dict{PkgId,Module}() | ||
| 1838 | const loaded_modules_order = Vector{Module}() | ||
| 1839 | const module_keys = IdDict{Module,PkgId}() # the reverse | ||
| 1840 | |||
| 1841 | is_root_module(m::Module) = @lock require_lock haskey(module_keys, m) | ||
| 1842 | root_module_key(m::Module) = @lock require_lock module_keys[m] | ||
| 1843 | |||
| 1844 | @constprop :none function register_root_module(m::Module) | ||
| 1845 | # n.b. This is called from C after creating a new module in `Base.__toplevel__`, | ||
| 1846 | # instead of adding them to the binding table there. | ||
| 1847 | @lock require_lock begin | ||
| 1848 | key = PkgId(m, String(nameof(m))) | ||
| 1849 | if haskey(loaded_modules, key) | ||
| 1850 | oldm = loaded_modules[key] | ||
| 1851 | if oldm !== m | ||
| 1852 | if (0 != ccall(:jl_generating_output, Cint, ())) && (JLOptions().incremental != 0) | ||
| 1853 | error("Replacing module `$(key.name)`") | ||
| 1854 | else | ||
| 1855 | @warn "Replacing module `$(key.name)`" | ||
| 1856 | end | ||
| 1857 | end | ||
| 1858 | end | ||
| 1859 | push!(loaded_modules_order, m) | ||
| 1860 | loaded_modules[key] = m | ||
| 1861 | module_keys[m] = key | ||
| 1862 | end | ||
| 1863 | nothing | ||
| 1864 | end | ||
| 1865 | |||
| 1866 | register_root_module(Core) | ||
| 1867 | register_root_module(Base) | ||
| 1868 | register_root_module(Main) | ||
| 1869 | |||
| 1870 | # This is used as the current module when loading top-level modules. | ||
| 1871 | # It has the special behavior that modules evaluated in it get added | ||
| 1872 | # to the loaded_modules table instead of getting bindings. | ||
| 1873 | baremodule __toplevel__ | ||
| 1874 | using Base | ||
| 1875 | end | ||
| 1876 | |||
| 1877 | # get a top-level Module from the given key | ||
| 1878 | root_module(key::PkgId) = @lock require_lock loaded_modules[key] | ||
| 1879 | function root_module(where::Module, name::Symbol) | ||
| 1880 | key = identify_package(where, String(name)) | ||
| 1881 | key isa PkgId || throw(KeyError(name)) | ||
| 1882 | return root_module(key) | ||
| 1883 | end | ||
| 1884 | maybe_root_module(key::PkgId) = @lock require_lock get(loaded_modules, key, nothing) | ||
| 1885 | |||
| 1886 | root_module_exists(key::PkgId) = @lock require_lock haskey(loaded_modules, key) | ||
| 1887 | loaded_modules_array() = @lock require_lock copy(loaded_modules_order) | ||
| 1888 | |||
| 1889 | function unreference_module(key::PkgId) | ||
| 1890 | if haskey(loaded_modules, key) | ||
| 1891 | m = pop!(loaded_modules, key) | ||
| 1892 | # need to ensure all modules are GC rooted; will still be referenced | ||
| 1893 | # in module_keys | ||
| 1894 | end | ||
| 1895 | end | ||
| 1896 | |||
| 1897 | # whoever takes the package_locks[pkg] must call this function immediately | ||
| 1898 | function set_pkgorigin_version_path(pkg::PkgId, path::Union{String,Nothing}) | ||
| 1899 | assert_havelock(require_lock) | ||
| 1900 | pkgorigin = get!(PkgOrigin, pkgorigins, pkg) | ||
| 1901 | if path !== nothing | ||
| 1902 | # Pkg needs access to the version of packages in the sysimage. | ||
| 1903 | if Core.Compiler.generating_sysimg() | ||
| 1904 | pkgorigin.version = get_pkgversion_from_path(joinpath(dirname(path), "..")) | ||
| 1905 | end | ||
| 1906 | end | ||
| 1907 | pkgorigin.path = path | ||
| 1908 | nothing | ||
| 1909 | end | ||
| 1910 | |||
| 1911 | # A hook to allow code load to use Pkg.precompile | ||
| 1912 | const PKG_PRECOMPILE_HOOK = Ref{Function}() | ||
| 1913 | |||
| 1914 | # Returns `nothing` or the new(ish) module | ||
| 1915 | function _require(pkg::PkgId, env=nothing) | ||
| 1916 | assert_havelock(require_lock) | ||
| 1917 | loaded = start_loading(pkg) | ||
| 1918 | loaded === nothing || return loaded | ||
| 1919 | |||
| 1920 | last = toplevel_load[] | ||
| 1921 | try | ||
| 1922 | toplevel_load[] = false | ||
| 1923 | # perform the search operation to select the module file require intends to load | ||
| 1924 | path = locate_package(pkg, env) | ||
| 1925 | if path === nothing | ||
| 1926 | throw(ArgumentError(""" | ||
| 1927 | Package $pkg is required but does not seem to be installed: | ||
| 1928 | - Run `Pkg.instantiate()` to install all recorded dependencies. | ||
| 1929 | """)) | ||
| 1930 | end | ||
| 1931 | set_pkgorigin_version_path(pkg, path) | ||
| 1932 | |||
| 1933 | pkg_precompile_attempted = false # being safe to avoid getting stuck in a Pkg.precompile loop | ||
| 1934 | |||
| 1935 | # attempt to load the module file via the precompile cache locations | ||
| 1936 | if JLOptions().use_compiled_modules != 0 | ||
| 1937 | @label load_from_cache | ||
| 1938 | m = _require_search_from_serialized(pkg, path, UInt128(0)) | ||
| 1939 | if m isa Module | ||
| 1940 | return m | ||
| 1941 | end | ||
| 1942 | end | ||
| 1943 | |||
| 1944 | # if the module being required was supposed to have a particular version | ||
| 1945 | # but it was not handled by the precompile loader, complain | ||
| 1946 | for (concrete_pkg, concrete_build_id) in _concrete_dependencies | ||
| 1947 | if pkg == concrete_pkg | ||
| 1948 | @warn """Module $(pkg.name) with build ID $((UUID(concrete_build_id))) is missing from the cache. | ||
| 1949 | This may mean $pkg does not support precompilation but is imported by a module that does.""" | ||
| 1950 | if JLOptions().incremental != 0 | ||
| 1951 | # during incremental precompilation, this should be fail-fast | ||
| 1952 | throw(PrecompilableError()) | ||
| 1953 | end | ||
| 1954 | end | ||
| 1955 | end | ||
| 1956 | |||
| 1957 | if JLOptions().use_compiled_modules != 0 | ||
| 1958 | if (0 == ccall(:jl_generating_output, Cint, ())) || (JLOptions().incremental != 0) | ||
| 1959 | if !pkg_precompile_attempted && isinteractive() && isassigned(PKG_PRECOMPILE_HOOK) | ||
| 1960 | pkg_precompile_attempted = true | ||
| 1961 | unlock(require_lock) | ||
| 1962 | try | ||
| 1963 | @invokelatest PKG_PRECOMPILE_HOOK[](pkg.name, _from_loading = true) | ||
| 1964 | finally | ||
| 1965 | lock(require_lock) | ||
| 1966 | end | ||
| 1967 | @goto load_from_cache | ||
| 1968 | end | ||
| 1969 | # spawn off a new incremental pre-compile task for recursive `require` calls | ||
| 1970 | cachefile_or_module = maybe_cachefile_lock(pkg, path) do | ||
| 1971 | # double-check now that we have lock | ||
| 1972 | m = _require_search_from_serialized(pkg, path, UInt128(0)) | ||
| 1973 | m isa Module && return m | ||
| 1974 | compilecache(pkg, path) | ||
| 1975 | end | ||
| 1976 | cachefile_or_module isa Module && return cachefile_or_module::Module | ||
| 1977 | cachefile = cachefile_or_module | ||
| 1978 | if isnothing(cachefile) # maybe_cachefile_lock returns nothing if it had to wait for another process | ||
| 1979 | @goto load_from_cache # the new cachefile will have the newest mtime so will come first in the search | ||
| 1980 | elseif isa(cachefile, Exception) | ||
| 1981 | if precompilableerror(cachefile) | ||
| 1982 | verbosity = isinteractive() ? CoreLogging.Info : CoreLogging.Debug | ||
| 1983 | @logmsg verbosity "Skipping precompilation since __precompile__(false). Importing $pkg." | ||
| 1984 | else | ||
| 1985 | @warn "The call to compilecache failed to create a usable precompiled cache file for $pkg" exception=m | ||
| 1986 | end | ||
| 1987 | # fall-through to loading the file locally if not incremental | ||
| 1988 | else | ||
| 1989 | cachefile, ocachefile = cachefile::Tuple{String, Union{Nothing, String}} | ||
| 1990 | m = _tryrequire_from_serialized(pkg, cachefile, ocachefile) | ||
| 1991 | if !isa(m, Module) | ||
| 1992 | @warn "The call to compilecache failed to create a usable precompiled cache file for $pkg" exception=m | ||
| 1993 | else | ||
| 1994 | return m | ||
| 1995 | end | ||
| 1996 | end | ||
| 1997 | if JLOptions().incremental != 0 | ||
| 1998 | # during incremental precompilation, this should be fail-fast | ||
| 1999 | throw(PrecompilableError()) | ||
| 2000 | end | ||
| 2001 | end | ||
| 2002 | end | ||
| 2003 | |||
| 2004 | # just load the file normally via include | ||
| 2005 | # for unknown dependencies | ||
| 2006 | uuid = pkg.uuid | ||
| 2007 | uuid = (uuid === nothing ? (UInt64(0), UInt64(0)) : convert(NTuple{2, UInt64}, uuid)) | ||
| 2008 | old_uuid = ccall(:jl_module_uuid, NTuple{2, UInt64}, (Any,), __toplevel__) | ||
| 2009 | if uuid !== old_uuid | ||
| 2010 | ccall(:jl_set_module_uuid, Cvoid, (Any, NTuple{2, UInt64}), __toplevel__, uuid) | ||
| 2011 | end | ||
| 2012 | unlock(require_lock) | ||
| 2013 | try | ||
| 2014 | include(__toplevel__, path) | ||
| 2015 | loaded = get(loaded_modules, pkg, nothing) | ||
| 2016 | finally | ||
| 2017 | lock(require_lock) | ||
| 2018 | if uuid !== old_uuid | ||
| 2019 | ccall(:jl_set_module_uuid, Cvoid, (Any, NTuple{2, UInt64}), __toplevel__, old_uuid) | ||
| 2020 | end | ||
| 2021 | end | ||
| 2022 | finally | ||
| 2023 | toplevel_load[] = last | ||
| 2024 | end_loading(pkg, loaded) | ||
| 2025 | end | ||
| 2026 | return loaded | ||
| 2027 | end | ||
| 2028 | |||
| 2029 | # Only used from test/precompile.jl | ||
| 2030 | function _require_from_serialized(uuidkey::PkgId, path::String, ocachepath::Union{String, Nothing}) | ||
| 2031 | @lock require_lock begin | ||
| 2032 | set_pkgorigin_version_path(uuidkey, nothing) | ||
| 2033 | newm = _tryrequire_from_serialized(uuidkey, path, ocachepath) | ||
| 2034 | newm isa Module || throw(newm) | ||
| 2035 | insert_extension_triggers(uuidkey) | ||
| 2036 | # After successfully loading, notify downstream consumers | ||
| 2037 | run_package_callbacks(uuidkey) | ||
| 2038 | return newm | ||
| 2039 | end | ||
| 2040 | end | ||
| 2041 | |||
| 2042 | |||
| 2043 | |||
| 2044 | # relative-path load | ||
| 2045 | |||
| 2046 | """ | ||
| 2047 | include_string([mapexpr::Function,] m::Module, code::AbstractString, filename::AbstractString="string") | ||
| 2048 | |||
| 2049 | Like [`include`](@ref), except reads code from the given string rather than from a file. | ||
| 2050 | |||
| 2051 | The optional first argument `mapexpr` can be used to transform the included code before | ||
| 2052 | it is evaluated: for each parsed expression `expr` in `code`, the `include_string` function | ||
| 2053 | actually evaluates `mapexpr(expr)`. If it is omitted, `mapexpr` defaults to [`identity`](@ref). | ||
| 2054 | |||
| 2055 | !!! compat "Julia 1.5" | ||
| 2056 | Julia 1.5 is required for passing the `mapexpr` argument. | ||
| 2057 | """ | ||
| 2058 | function include_string(mapexpr::Function, mod::Module, code::AbstractString, | ||
| 2059 | filename::AbstractString="string") | ||
| 2060 | loc = LineNumberNode(1, Symbol(filename)) | ||
| 2061 | try | ||
| 2062 | ast = Meta.parseall(code, filename=filename) | ||
| 2063 | @assert Meta.isexpr(ast, :toplevel) | ||
| 2064 | result = nothing | ||
| 2065 | line_and_ex = Expr(:toplevel, loc, nothing) | ||
| 2066 | for ex in ast.args | ||
| 2067 | if ex isa LineNumberNode | ||
| 2068 | loc = ex | ||
| 2069 | line_and_ex.args[1] = ex | ||
| 2070 | continue | ||
| 2071 | end | ||
| 2072 | ex = mapexpr(ex) | ||
| 2073 | # Wrap things to be eval'd in a :toplevel expr to carry line | ||
| 2074 | # information as part of the expr. | ||
| 2075 | line_and_ex.args[2] = ex | ||
| 2076 | 58 (100 %) |
58 (100 %)
samples spent calling
eval
result = Core.eval(mod, line_and_ex)
|
|
| 2077 | end | ||
| 2078 | return result | ||
| 2079 | catch exc | ||
| 2080 | # TODO: Now that stacktraces are more reliable we should remove | ||
| 2081 | # LoadError and expose the real error type directly. | ||
| 2082 | rethrow(LoadError(filename, loc.line, exc)) | ||
| 2083 | end | ||
| 2084 | end | ||
| 2085 | |||
| 2086 | include_string(m::Module, txt::AbstractString, fname::AbstractString="string") = | ||
| 2087 | include_string(identity, m, txt, fname) | ||
| 2088 | |||
| 2089 | function source_path(default::Union{AbstractString,Nothing}="") | ||
| 2090 | s = current_task().storage | ||
| 2091 | if s !== nothing | ||
| 2092 | s = s::IdDict{Any,Any} | ||
| 2093 | if haskey(s, :SOURCE_PATH) | ||
| 2094 | return s[:SOURCE_PATH]::Union{Nothing,String} | ||
| 2095 | end | ||
| 2096 | end | ||
| 2097 | return default | ||
| 2098 | end | ||
| 2099 | |||
| 2100 | function source_dir() | ||
| 2101 | p = source_path(nothing) | ||
| 2102 | return p === nothing ? pwd() : dirname(p) | ||
| 2103 | end | ||
| 2104 | |||
| 2105 | """ | ||
| 2106 | Base.include([mapexpr::Function,] m::Module, path::AbstractString) | ||
| 2107 | |||
| 2108 | Evaluate the contents of the input source file in the global scope of module `m`. | ||
| 2109 | Every module (except those defined with [`baremodule`](@ref)) has its own | ||
| 2110 | definition of `include` omitting the `m` argument, which evaluates the file in that module. | ||
| 2111 | Returns the result of the last evaluated expression of the input file. During including, | ||
| 2112 | a task-local include path is set to the directory containing the file. Nested calls to | ||
| 2113 | `include` will search relative to that path. This function is typically used to load source | ||
| 2114 | interactively, or to combine files in packages that are broken into multiple source files. | ||
| 2115 | |||
| 2116 | The optional first argument `mapexpr` can be used to transform the included code before | ||
| 2117 | it is evaluated: for each parsed expression `expr` in `path`, the `include` function | ||
| 2118 | actually evaluates `mapexpr(expr)`. If it is omitted, `mapexpr` defaults to [`identity`](@ref). | ||
| 2119 | |||
| 2120 | !!! compat "Julia 1.5" | ||
| 2121 | Julia 1.5 is required for passing the `mapexpr` argument. | ||
| 2122 | """ | ||
| 2123 | Base.include # defined in Base.jl | ||
| 2124 | |||
| 2125 | # Full include() implementation which is used after bootstrap | ||
| 2126 | function _include(mapexpr::Function, mod::Module, _path::AbstractString) | ||
| 2127 | @noinline # Workaround for module availability in _simplify_include_frames | ||
| 2128 | path, prev = _include_dependency(mod, _path) | ||
| 2129 | for callback in include_callbacks # to preserve order, must come before eval in include_string | ||
| 2130 | invokelatest(callback, mod, path) | ||
| 2131 | end | ||
| 2132 | code = read(path, String) | ||
| 2133 | tls = task_local_storage() | ||
| 2134 | tls[:SOURCE_PATH] = path | ||
| 2135 | try | ||
| 2136 | 58 (100 %) |
58 (100 %)
samples spent calling
include_string
return include_string(mapexpr, mod, code, path)
|
|
| 2137 | finally | ||
| 2138 | if prev === nothing | ||
| 2139 | delete!(tls, :SOURCE_PATH) | ||
| 2140 | else | ||
| 2141 | tls[:SOURCE_PATH] = prev | ||
| 2142 | end | ||
| 2143 | end | ||
| 2144 | end | ||
| 2145 | |||
| 2146 | """ | ||
| 2147 | evalfile(path::AbstractString, args::Vector{String}=String[]) | ||
| 2148 | |||
| 2149 | Load the file into an anonymous module using [`include`](@ref), evaluate all expressions, | ||
| 2150 | and return the value of the last expression. | ||
| 2151 | The optional `args` argument can be used to set the input arguments of the script (i.e. the global `ARGS` variable). | ||
| 2152 | Note that definitions (e.g. methods, globals) are evaluated in the anonymous module and do not affect the current module. | ||
| 2153 | |||
| 2154 | # Example | ||
| 2155 | |||
| 2156 | ```jldoctest | ||
| 2157 | julia> write("testfile.jl", \"\"\" | ||
| 2158 | @show ARGS | ||
| 2159 | 1 + 1 | ||
| 2160 | \"\"\"); | ||
| 2161 | |||
| 2162 | julia> x = evalfile("testfile.jl", ["ARG1", "ARG2"]); | ||
| 2163 | ARGS = ["ARG1", "ARG2"] | ||
| 2164 | |||
| 2165 | julia> x | ||
| 2166 | 2 | ||
| 2167 | |||
| 2168 | julia> rm("testfile.jl") | ||
| 2169 | ``` | ||
| 2170 | """ | ||
| 2171 | function evalfile(path::AbstractString, args::Vector{String}=String[]) | ||
| 2172 | return Core.eval(Module(:__anon__), | ||
| 2173 | Expr(:toplevel, | ||
| 2174 | :(const ARGS = $args), | ||
| 2175 | :(eval(x) = $(Expr(:core, :eval))(__anon__, x)), | ||
| 2176 | :(include(x) = $(Expr(:top, :include))(__anon__, x)), | ||
| 2177 | :(include(mapexpr::Function, x) = $(Expr(:top, :include))(mapexpr, __anon__, x)), | ||
| 2178 | :(include($path)))) | ||
| 2179 | end | ||
| 2180 | evalfile(path::AbstractString, args::Vector) = evalfile(path, String[args...]) | ||
| 2181 | |||
| 2182 | function load_path_setup_code(load_path::Bool=true) | ||
| 2183 | code = """ | ||
| 2184 | append!(empty!(Base.DEPOT_PATH), $(repr(map(abspath, DEPOT_PATH)))) | ||
| 2185 | append!(empty!(Base.DL_LOAD_PATH), $(repr(map(abspath, DL_LOAD_PATH)))) | ||
| 2186 | """ | ||
| 2187 | if load_path | ||
| 2188 | load_path = map(abspath, Base.load_path()) | ||
| 2189 | path_sep = Sys.iswindows() ? ';' : ':' | ||
| 2190 | any(path -> path_sep in path, load_path) && | ||
| 2191 | error("LOAD_PATH entries cannot contain $(repr(path_sep))") | ||
| 2192 | code *= """ | ||
| 2193 | append!(empty!(Base.LOAD_PATH), $(repr(load_path))) | ||
| 2194 | ENV["JULIA_LOAD_PATH"] = $(repr(join(load_path, Sys.iswindows() ? ';' : ':'))) | ||
| 2195 | Base.set_active_project(nothing) | ||
| 2196 | """ | ||
| 2197 | end | ||
| 2198 | return code | ||
| 2199 | end | ||
| 2200 | |||
| 2201 | # this is called in the external process that generates precompiled package files | ||
| 2202 | function include_package_for_output(pkg::PkgId, input::String, depot_path::Vector{String}, dl_load_path::Vector{String}, load_path::Vector{String}, | ||
| 2203 | concrete_deps::typeof(_concrete_dependencies), source::Union{Nothing,String}) | ||
| 2204 | append!(empty!(Base.DEPOT_PATH), depot_path) | ||
| 2205 | append!(empty!(Base.DL_LOAD_PATH), dl_load_path) | ||
| 2206 | append!(empty!(Base.LOAD_PATH), load_path) | ||
| 2207 | ENV["JULIA_LOAD_PATH"] = join(load_path, Sys.iswindows() ? ';' : ':') | ||
| 2208 | set_active_project(nothing) | ||
| 2209 | Base._track_dependencies[] = true | ||
| 2210 | get!(Base.PkgOrigin, Base.pkgorigins, pkg).path = input | ||
| 2211 | append!(empty!(Base._concrete_dependencies), concrete_deps) | ||
| 2212 | uuid_tuple = pkg.uuid === nothing ? (UInt64(0), UInt64(0)) : convert(NTuple{2, UInt64}, pkg.uuid) | ||
| 2213 | |||
| 2214 | ccall(:jl_set_module_uuid, Cvoid, (Any, NTuple{2, UInt64}), Base.__toplevel__, uuid_tuple) | ||
| 2215 | if source !== nothing | ||
| 2216 | task_local_storage()[:SOURCE_PATH] = source | ||
| 2217 | end | ||
| 2218 | |||
| 2219 | ccall(:jl_set_newly_inferred, Cvoid, (Any,), Core.Compiler.newly_inferred) | ||
| 2220 | Core.Compiler.track_newly_inferred.x = true | ||
| 2221 | try | ||
| 2222 | Base.include(Base.__toplevel__, input) | ||
| 2223 | catch ex | ||
| 2224 | precompilableerror(ex) || rethrow() | ||
| 2225 | @debug "Aborting `create_expr_cache'" exception=(ErrorException("Declaration of __precompile__(false) not allowed"), catch_backtrace()) | ||
| 2226 | exit(125) # we define status = 125 means PrecompileableError | ||
| 2227 | finally | ||
| 2228 | Core.Compiler.track_newly_inferred.x = false | ||
| 2229 | end | ||
| 2230 | end | ||
| 2231 | |||
| 2232 | const PRECOMPILE_TRACE_COMPILE = Ref{String}() | ||
| 2233 | function create_expr_cache(pkg::PkgId, input::String, output::String, output_o::Union{Nothing, String}, | ||
| 2234 | concrete_deps::typeof(_concrete_dependencies), internal_stderr::IO = stderr, internal_stdout::IO = stdout) | ||
| 2235 | @nospecialize internal_stderr internal_stdout | ||
| 2236 | rm(output, force=true) # Remove file if it exists | ||
| 2237 | output_o === nothing || rm(output_o, force=true) | ||
| 2238 | depot_path = map(abspath, DEPOT_PATH) | ||
| 2239 | dl_load_path = map(abspath, DL_LOAD_PATH) | ||
| 2240 | load_path = map(abspath, Base.load_path()) | ||
| 2241 | path_sep = Sys.iswindows() ? ';' : ':' | ||
| 2242 | any(path -> path_sep in path, load_path) && | ||
| 2243 | error("LOAD_PATH entries cannot contain $(repr(path_sep))") | ||
| 2244 | |||
| 2245 | deps_strs = String[] | ||
| 2246 | function pkg_str(_pkg::PkgId) | ||
| 2247 | if _pkg.uuid === nothing | ||
| 2248 | "Base.PkgId($(repr(_pkg.name)))" | ||
| 2249 | else | ||
| 2250 | "Base.PkgId(Base.UUID(\"$(_pkg.uuid)\"), $(repr(_pkg.name)))" | ||
| 2251 | end | ||
| 2252 | end | ||
| 2253 | for (pkg, build_id) in concrete_deps | ||
| 2254 | push!(deps_strs, "$(pkg_str(pkg)) => $(repr(build_id))") | ||
| 2255 | end | ||
| 2256 | |||
| 2257 | if output_o !== nothing | ||
| 2258 | cpu_target = get(ENV, "JULIA_CPU_TARGET", nothing) | ||
| 2259 | opt_level = Base.JLOptions().opt_level | ||
| 2260 | opts = `-O$(opt_level) --output-o $(output_o) --output-ji $(output) --output-incremental=yes` | ||
| 2261 | else | ||
| 2262 | cpu_target = nothing | ||
| 2263 | opts = `-O0 --output-ji $(output) --output-incremental=yes` | ||
| 2264 | end | ||
| 2265 | |||
| 2266 | deps_eltype = sprint(show, eltype(concrete_deps); context = :module=>nothing) | ||
| 2267 | deps = deps_eltype * "[" * join(deps_strs, ",") * "]" | ||
| 2268 | trace = isassigned(PRECOMPILE_TRACE_COMPILE) ? `--trace-compile=$(PRECOMPILE_TRACE_COMPILE[])` : `` | ||
| 2269 | io = open(pipeline(addenv(`$(julia_cmd(;cpu_target)::Cmd) $(opts) | ||
| 2270 | --startup-file=no --history-file=no --warn-overwrite=yes | ||
| 2271 | --color=$(have_color === nothing ? "auto" : have_color ? "yes" : "no") | ||
| 2272 | $trace | ||
| 2273 | -`, | ||
| 2274 | "OPENBLAS_NUM_THREADS" => 1, | ||
| 2275 | "JULIA_NUM_THREADS" => 1), | ||
| 2276 | stderr = internal_stderr, stdout = internal_stdout), | ||
| 2277 | "w", stdout) | ||
| 2278 | # write data over stdin to avoid the (unlikely) case of exceeding max command line size | ||
| 2279 | write(io.in, """ | ||
| 2280 | empty!(Base.EXT_DORMITORY) # If we have a custom sysimage with `EXT_DORMITORY` prepopulated | ||
| 2281 | Base.precompiling_extension = $(loading_extension) | ||
| 2282 | Base.include_package_for_output($(pkg_str(pkg)), $(repr(abspath(input))), $(repr(depot_path)), $(repr(dl_load_path)), | ||
| 2283 | $(repr(load_path)), $deps, $(repr(source_path(nothing)))) | ||
| 2284 | """) | ||
| 2285 | close(io.in) | ||
| 2286 | return io | ||
| 2287 | end | ||
| 2288 | |||
| 2289 | function compilecache_dir(pkg::PkgId) | ||
| 2290 | entrypath, entryfile = cache_file_entry(pkg) | ||
| 2291 | return joinpath(DEPOT_PATH[1], entrypath) | ||
| 2292 | end | ||
| 2293 | |||
| 2294 | function compilecache_path(pkg::PkgId, prefs_hash::UInt64; project::String=something(Base.active_project(), ""))::String | ||
| 2295 | entrypath, entryfile = cache_file_entry(pkg) | ||
| 2296 | cachepath = joinpath(DEPOT_PATH[1], entrypath) | ||
| 2297 | isdir(cachepath) || mkpath(cachepath) | ||
| 2298 | if pkg.uuid === nothing | ||
| 2299 | abspath(cachepath, entryfile) * ".ji" | ||
| 2300 | else | ||
| 2301 | crc = _crc32c(project) | ||
| 2302 | crc = _crc32c(unsafe_string(JLOptions().image_file), crc) | ||
| 2303 | crc = _crc32c(unsafe_string(JLOptions().julia_bin), crc) | ||
| 2304 | crc = _crc32c(ccall(:jl_cache_flags, UInt8, ()), crc) | ||
| 2305 | |||
| 2306 | cpu_target = get(ENV, "JULIA_CPU_TARGET", nothing) | ||
| 2307 | if cpu_target === nothing | ||
| 2308 | cpu_target = unsafe_string(JLOptions().cpu_target) | ||
| 2309 | end | ||
| 2310 | crc = _crc32c(cpu_target, crc) | ||
| 2311 | |||
| 2312 | crc = _crc32c(prefs_hash, crc) | ||
| 2313 | project_precompile_slug = slug(crc, 5) | ||
| 2314 | abspath(cachepath, string(entryfile, "_", project_precompile_slug, ".ji")) | ||
| 2315 | end | ||
| 2316 | end | ||
| 2317 | |||
| 2318 | """ | ||
| 2319 | Base.compilecache(module::PkgId) | ||
| 2320 | |||
| 2321 | Creates a precompiled cache file for a module and all of its dependencies. | ||
| 2322 | This can be used to reduce package load times. Cache files are stored in | ||
| 2323 | `DEPOT_PATH[1]/compiled`. See [Module initialization and precompilation](@ref) | ||
| 2324 | for important notes. | ||
| 2325 | """ | ||
| 2326 | function compilecache(pkg::PkgId, internal_stderr::IO = stderr, internal_stdout::IO = stdout) | ||
| 2327 | @nospecialize internal_stderr internal_stdout | ||
| 2328 | path = locate_package(pkg) | ||
| 2329 | path === nothing && throw(ArgumentError("$pkg not found during precompilation")) | ||
| 2330 | return compilecache(pkg, path, internal_stderr, internal_stdout) | ||
| 2331 | end | ||
| 2332 | |||
| 2333 | const MAX_NUM_PRECOMPILE_FILES = Ref(10) | ||
| 2334 | |||
| 2335 | function compilecache(pkg::PkgId, path::String, internal_stderr::IO = stderr, internal_stdout::IO = stdout, | ||
| 2336 | keep_loaded_modules::Bool = true) | ||
| 2337 | |||
| 2338 | @nospecialize internal_stderr internal_stdout | ||
| 2339 | # decide where to put the resulting cache file | ||
| 2340 | cachepath = compilecache_dir(pkg) | ||
| 2341 | |||
| 2342 | # build up the list of modules that we want the precompile process to preserve | ||
| 2343 | concrete_deps = copy(_concrete_dependencies) | ||
| 2344 | if keep_loaded_modules | ||
| 2345 | for mod in loaded_modules_array() | ||
| 2346 | if !(mod === Main || mod === Core || mod === Base) | ||
| 2347 | push!(concrete_deps, PkgId(mod) => module_build_id(mod)) | ||
| 2348 | end | ||
| 2349 | end | ||
| 2350 | end | ||
| 2351 | # run the expression and cache the result | ||
| 2352 | verbosity = isinteractive() ? CoreLogging.Info : CoreLogging.Debug | ||
| 2353 | @logmsg verbosity "Precompiling $pkg" | ||
| 2354 | |||
| 2355 | # create a temporary file in `cachepath` directory, write the cache in it, | ||
| 2356 | # write the checksum, _and then_ atomically move the file to `cachefile`. | ||
| 2357 | mkpath(cachepath) | ||
| 2358 | cache_objects = JLOptions().use_pkgimages != 0 | ||
| 2359 | tmppath, tmpio = mktemp(cachepath) | ||
| 2360 | |||
| 2361 | if cache_objects | ||
| 2362 | tmppath_o, tmpio_o = mktemp(cachepath) | ||
| 2363 | tmppath_so, tmpio_so = mktemp(cachepath) | ||
| 2364 | else | ||
| 2365 | tmppath_o = nothing | ||
| 2366 | end | ||
| 2367 | local p | ||
| 2368 | try | ||
| 2369 | close(tmpio) | ||
| 2370 | if cache_objects | ||
| 2371 | close(tmpio_o) | ||
| 2372 | close(tmpio_so) | ||
| 2373 | end | ||
| 2374 | p = create_expr_cache(pkg, path, tmppath, tmppath_o, concrete_deps, internal_stderr, internal_stdout) | ||
| 2375 | |||
| 2376 | if success(p) | ||
| 2377 | if cache_objects | ||
| 2378 | # Run linker over tmppath_o | ||
| 2379 | Linking.link_image(tmppath_o, tmppath_so) | ||
| 2380 | end | ||
| 2381 | |||
| 2382 | # Read preferences hash back from .ji file (we can't precompute because | ||
| 2383 | # we don't actually know what the list of compile-time preferences are without compiling) | ||
| 2384 | prefs_hash = preferences_hash(tmppath) | ||
| 2385 | cachefile = compilecache_path(pkg, prefs_hash) | ||
| 2386 | ocachefile = cache_objects ? ocachefile_from_cachefile(cachefile) : nothing | ||
| 2387 | |||
| 2388 | # append checksum for so to the end of the .ji file: | ||
| 2389 | crc_so = UInt32(0) | ||
| 2390 | if cache_objects | ||
| 2391 | crc_so = open(_crc32c, tmppath_so, "r") | ||
| 2392 | end | ||
| 2393 | |||
| 2394 | # append extra crc to the end of the .ji file: | ||
| 2395 | open(tmppath, "r+") do f | ||
| 2396 | if iszero(isvalid_cache_header(f)) | ||
| 2397 | error("Invalid header for $pkg in new cache file $(repr(tmppath)).") | ||
| 2398 | end | ||
| 2399 | seekend(f) | ||
| 2400 | write(f, crc_so) | ||
| 2401 | seekstart(f) | ||
| 2402 | write(f, _crc32c(f)) | ||
| 2403 | end | ||
| 2404 | |||
| 2405 | # inherit permission from the source file (and make them writable) | ||
| 2406 | chmod(tmppath, filemode(path) & 0o777 | 0o200) | ||
| 2407 | |||
| 2408 | # prune the directory with cache files | ||
| 2409 | if pkg.uuid !== nothing | ||
| 2410 | entrypath, entryfile = cache_file_entry(pkg) | ||
| 2411 | cachefiles = filter!(x -> startswith(x, entryfile * "_") && endswith(x, ".ji"), readdir(cachepath)) | ||
| 2412 | if length(cachefiles) >= MAX_NUM_PRECOMPILE_FILES[] | ||
| 2413 | idx = findmin(mtime.(joinpath.(cachepath, cachefiles)))[2] | ||
| 2414 | evicted_cachefile = joinpath(cachepath, cachefiles[idx]) | ||
| 2415 | @debug "Evicting file from cache" evicted_cachefile | ||
| 2416 | rm(evicted_cachefile; force=true) | ||
| 2417 | try | ||
| 2418 | rm(ocachefile_from_cachefile(evicted_cachefile); force=true) | ||
| 2419 | @static if Sys.isapple() | ||
| 2420 | rm(ocachefile_from_cachefile(evicted_cachefile) * ".dSYM"; force=true, recursive=true) | ||
| 2421 | end | ||
| 2422 | catch e | ||
| 2423 | e isa IOError || rethrow() | ||
| 2424 | end | ||
| 2425 | end | ||
| 2426 | end | ||
| 2427 | |||
| 2428 | if cache_objects | ||
| 2429 | try | ||
| 2430 | rename(tmppath_so, ocachefile::String; force=true) | ||
| 2431 | catch e | ||
| 2432 | e isa IOError || rethrow() | ||
| 2433 | isfile(ocachefile::String) || rethrow() | ||
| 2434 | # Windows prevents renaming a file that is in use so if there is a Julia session started | ||
| 2435 | # with a package image loaded, we cannot rename that file. | ||
| 2436 | # The code belows append a `_i` to the name of the cache file where `i` is the smallest number such that | ||
| 2437 | # that cache file does not exist. | ||
| 2438 | ocachename, ocacheext = splitext(ocachefile::String) | ||
| 2439 | old_cachefiles = Set(readdir(cachepath)) | ||
| 2440 | num = 1 | ||
| 2441 | while true | ||
| 2442 | ocachefile = ocachename * "_$num" * ocacheext | ||
| 2443 | in(basename(ocachefile), old_cachefiles) || break | ||
| 2444 | num += 1 | ||
| 2445 | end | ||
| 2446 | # TODO: Risk for a race here if some other process grabs this name before us | ||
| 2447 | cachefile = cachefile_from_ocachefile(ocachefile) | ||
| 2448 | rename(tmppath_so, ocachefile::String; force=true) | ||
| 2449 | end | ||
| 2450 | @static if Sys.isapple() | ||
| 2451 | run(`$(Linking.dsymutil()) $ocachefile`, Base.DevNull(), Base.DevNull(), Base.DevNull()) | ||
| 2452 | end | ||
| 2453 | end | ||
| 2454 | # this is atomic according to POSIX (not Win32): | ||
| 2455 | rename(tmppath, cachefile; force=true) | ||
| 2456 | return cachefile, ocachefile | ||
| 2457 | end | ||
| 2458 | finally | ||
| 2459 | rm(tmppath, force=true) | ||
| 2460 | if cache_objects | ||
| 2461 | rm(tmppath_o::String, force=true) | ||
| 2462 | rm(tmppath_so, force=true) | ||
| 2463 | end | ||
| 2464 | end | ||
| 2465 | if p.exitcode == 125 | ||
| 2466 | return PrecompilableError() | ||
| 2467 | else | ||
| 2468 | error("Failed to precompile $pkg to $(repr(tmppath)).") | ||
| 2469 | end | ||
| 2470 | end | ||
| 2471 | |||
| 2472 | function module_build_id(m::Module) | ||
| 2473 | hi, lo = ccall(:jl_module_build_id, NTuple{2,UInt64}, (Any,), m) | ||
| 2474 | return (UInt128(hi) << 64) | lo | ||
| 2475 | end | ||
| 2476 | |||
| 2477 | function isvalid_cache_header(f::IOStream) | ||
| 2478 | pkgimage = Ref{UInt8}() | ||
| 2479 | checksum = ccall(:jl_read_verify_header, UInt64, (Ptr{Cvoid}, Ptr{UInt8}, Ptr{Int64}, Ptr{Int64}), f.ios, pkgimage, Ref{Int64}(), Ref{Int64}()) # returns checksum id or zero | ||
| 2480 | |||
| 2481 | if !iszero(checksum) && pkgimage[] != 0 | ||
| 2482 | @debug "Cache header was for pkgimage" | ||
| 2483 | return UInt64(0) # We somehow read the header for a pkgimage and not a ji | ||
| 2484 | end | ||
| 2485 | return checksum | ||
| 2486 | end | ||
| 2487 | isvalid_file_crc(f::IOStream) = (_crc32c(seekstart(f), filesize(f) - 4) == read(f, UInt32)) | ||
| 2488 | |||
| 2489 | function isvalid_pkgimage_crc(f::IOStream, ocachefile::String) | ||
| 2490 | seekstart(f) # TODO necessary | ||
| 2491 | seek(f, filesize(f) - 8) | ||
| 2492 | expected_crc_so = read(f, UInt32) | ||
| 2493 | crc_so = open(_crc32c, ocachefile, "r") | ||
| 2494 | expected_crc_so == crc_so | ||
| 2495 | end | ||
| 2496 | |||
| 2497 | struct CacheHeaderIncludes | ||
| 2498 | id::PkgId | ||
| 2499 | filename::String | ||
| 2500 | mtime::Float64 | ||
| 2501 | modpath::Vector{String} # seemingly not needed in Base, but used by Revise | ||
| 2502 | end | ||
| 2503 | |||
| 2504 | function parse_cache_header(f::IO) | ||
| 2505 | flags = read(f, UInt8) | ||
| 2506 | modules = Vector{Pair{PkgId, UInt64}}() | ||
| 2507 | while true | ||
| 2508 | n = read(f, Int32) | ||
| 2509 | n == 0 && break | ||
| 2510 | sym = String(read(f, n)) # module name | ||
| 2511 | uuid = UUID((read(f, UInt64), read(f, UInt64))) # pkg UUID | ||
| 2512 | build_id = read(f, UInt64) # build UUID (mostly just a timestamp) | ||
| 2513 | push!(modules, PkgId(uuid, sym) => build_id) | ||
| 2514 | end | ||
| 2515 | totbytes = read(f, Int64) # total bytes for file dependencies + preferences | ||
| 2516 | # read the list of requirements | ||
| 2517 | # and split the list into include and requires statements | ||
| 2518 | includes = CacheHeaderIncludes[] | ||
| 2519 | requires = Pair{PkgId, PkgId}[] | ||
| 2520 | while true | ||
| 2521 | n2 = read(f, Int32) | ||
| 2522 | totbytes -= 4 | ||
| 2523 | if n2 == 0 | ||
| 2524 | break | ||
| 2525 | end | ||
| 2526 | depname = String(read(f, n2)) | ||
| 2527 | totbytes -= n2 | ||
| 2528 | mtime = read(f, Float64) | ||
| 2529 | totbytes -= 8 | ||
| 2530 | n1 = read(f, Int32) | ||
| 2531 | totbytes -= 4 | ||
| 2532 | # map ids to keys | ||
| 2533 | modkey = (n1 == 0) ? PkgId("") : modules[n1].first | ||
| 2534 | modpath = String[] | ||
| 2535 | if n1 != 0 | ||
| 2536 | # determine the complete module path | ||
| 2537 | while true | ||
| 2538 | n1 = read(f, Int32) | ||
| 2539 | totbytes -= 4 | ||
| 2540 | if n1 == 0 | ||
| 2541 | break | ||
| 2542 | end | ||
| 2543 | push!(modpath, String(read(f, n1))) | ||
| 2544 | totbytes -= n1 | ||
| 2545 | end | ||
| 2546 | end | ||
| 2547 | if depname[1] == '\0' | ||
| 2548 | push!(requires, modkey => binunpack(depname)) | ||
| 2549 | else | ||
| 2550 | push!(includes, CacheHeaderIncludes(modkey, depname, mtime, modpath)) | ||
| 2551 | end | ||
| 2552 | end | ||
| 2553 | prefs = String[] | ||
| 2554 | while true | ||
| 2555 | n2 = read(f, Int32) | ||
| 2556 | totbytes -= 4 | ||
| 2557 | if n2 == 0 | ||
| 2558 | break | ||
| 2559 | end | ||
| 2560 | push!(prefs, String(read(f, n2))) | ||
| 2561 | totbytes -= n2 | ||
| 2562 | end | ||
| 2563 | prefs_hash = read(f, UInt64) | ||
| 2564 | totbytes -= 8 | ||
| 2565 | srctextpos = read(f, Int64) | ||
| 2566 | totbytes -= 8 | ||
| 2567 | @assert totbytes == 0 "header of cache file appears to be corrupt (totbytes == $(totbytes))" | ||
| 2568 | # read the list of modules that are required to be present during loading | ||
| 2569 | required_modules = Vector{Pair{PkgId, UInt128}}() | ||
| 2570 | while true | ||
| 2571 | n = read(f, Int32) | ||
| 2572 | n == 0 && break | ||
| 2573 | sym = String(read(f, n)) # module name | ||
| 2574 | uuid = UUID((read(f, UInt64), read(f, UInt64))) # pkg UUID | ||
| 2575 | build_id = UInt128(read(f, UInt64)) << 64 | ||
| 2576 | build_id |= read(f, UInt64) | ||
| 2577 | push!(required_modules, PkgId(uuid, sym) => build_id) | ||
| 2578 | end | ||
| 2579 | l = read(f, Int32) | ||
| 2580 | clone_targets = read(f, l) | ||
| 2581 | |||
| 2582 | return modules, (includes, requires), required_modules, srctextpos, prefs, prefs_hash, clone_targets, flags | ||
| 2583 | end | ||
| 2584 | |||
| 2585 | function parse_cache_header(cachefile::String; srcfiles_only::Bool=false) | ||
| 2586 | io = open(cachefile, "r") | ||
| 2587 | try | ||
| 2588 | iszero(isvalid_cache_header(io)) && throw(ArgumentError("Invalid header in cache file $cachefile.")) | ||
| 2589 | ret = parse_cache_header(io) | ||
| 2590 | srcfiles_only || return ret | ||
| 2591 | _, (includes, _), _, srctextpos, _... = ret | ||
| 2592 | srcfiles = srctext_files(io, srctextpos) | ||
| 2593 | delidx = Int[] | ||
| 2594 | for (i, chi) in enumerate(includes) | ||
| 2595 | chi.filename ∈ srcfiles || push!(delidx, i) | ||
| 2596 | end | ||
| 2597 | deleteat!(includes, delidx) | ||
| 2598 | return ret | ||
| 2599 | finally | ||
| 2600 | close(io) | ||
| 2601 | end | ||
| 2602 | end | ||
| 2603 | |||
| 2604 | preferences_hash(f::IO) = parse_cache_header(f)[6] | ||
| 2605 | function preferences_hash(cachefile::String) | ||
| 2606 | io = open(cachefile, "r") | ||
| 2607 | try | ||
| 2608 | if iszero(isvalid_cache_header(io)) | ||
| 2609 | throw(ArgumentError("Invalid header in cache file $cachefile.")) | ||
| 2610 | end | ||
| 2611 | return preferences_hash(io) | ||
| 2612 | finally | ||
| 2613 | close(io) | ||
| 2614 | end | ||
| 2615 | end | ||
| 2616 | |||
| 2617 | function cache_dependencies(f::IO) | ||
| 2618 | _, (includes, _), modules, _... = parse_cache_header(f) | ||
| 2619 | return modules, map(chi -> (chi.filename, chi.mtime), includes) # return just filename and mtime | ||
| 2620 | end | ||
| 2621 | |||
| 2622 | function cache_dependencies(cachefile::String) | ||
| 2623 | io = open(cachefile, "r") | ||
| 2624 | try | ||
| 2625 | iszero(isvalid_cache_header(io)) && throw(ArgumentError("Invalid header in cache file $cachefile.")) | ||
| 2626 | return cache_dependencies(io) | ||
| 2627 | finally | ||
| 2628 | close(io) | ||
| 2629 | end | ||
| 2630 | end | ||
| 2631 | |||
| 2632 | function read_dependency_src(io::IO, filename::AbstractString) | ||
| 2633 | srctextpos = parse_cache_header(io)[4] | ||
| 2634 | srctextpos == 0 && error("no source-text stored in cache file") | ||
| 2635 | seek(io, srctextpos) | ||
| 2636 | return _read_dependency_src(io, filename) | ||
| 2637 | end | ||
| 2638 | |||
| 2639 | function _read_dependency_src(io::IO, filename::AbstractString) | ||
| 2640 | while !eof(io) | ||
| 2641 | filenamelen = read(io, Int32) | ||
| 2642 | filenamelen == 0 && break | ||
| 2643 | fn = String(read(io, filenamelen)) | ||
| 2644 | len = read(io, UInt64) | ||
| 2645 | if fn == filename | ||
| 2646 | return String(read(io, len)) | ||
| 2647 | end | ||
| 2648 | seek(io, position(io) + len) | ||
| 2649 | end | ||
| 2650 | error(filename, " is not stored in the source-text cache") | ||
| 2651 | end | ||
| 2652 | |||
| 2653 | function read_dependency_src(cachefile::String, filename::AbstractString) | ||
| 2654 | io = open(cachefile, "r") | ||
| 2655 | try | ||
| 2656 | iszero(isvalid_cache_header(io)) && throw(ArgumentError("Invalid header in cache file $cachefile.")) | ||
| 2657 | return read_dependency_src(io, filename) | ||
| 2658 | finally | ||
| 2659 | close(io) | ||
| 2660 | end | ||
| 2661 | end | ||
| 2662 | |||
| 2663 | function srctext_files(f::IO, srctextpos::Int64) | ||
| 2664 | files = Set{String}() | ||
| 2665 | srctextpos == 0 && return files | ||
| 2666 | seek(f, srctextpos) | ||
| 2667 | while !eof(f) | ||
| 2668 | filenamelen = read(f, Int32) | ||
| 2669 | filenamelen == 0 && break | ||
| 2670 | fn = String(read(f, filenamelen)) | ||
| 2671 | len = read(f, UInt64) | ||
| 2672 | push!(files, fn) | ||
| 2673 | seek(f, position(f) + len) | ||
| 2674 | end | ||
| 2675 | return files | ||
| 2676 | end | ||
| 2677 | |||
| 2678 | # Test to see if this UUID is mentioned in this `Project.toml`; either as | ||
| 2679 | # the top-level UUID (e.g. that of the project itself), as a dependency, | ||
| 2680 | # or as an extra/weakdep for Preferences. | ||
| 2681 | function get_uuid_name(project::Dict{String, Any}, uuid::UUID) | ||
| 2682 | uuid_p = get(project, "uuid", nothing)::Union{Nothing, String} | ||
| 2683 | name = get(project, "name", nothing)::Union{Nothing, String} | ||
| 2684 | if name !== nothing && uuid_p !== nothing && UUID(uuid_p) == uuid | ||
| 2685 | return name | ||
| 2686 | end | ||
| 2687 | deps = get(project, "deps", nothing)::Union{Nothing, Dict{String, Any}} | ||
| 2688 | if deps !== nothing | ||
| 2689 | for (k, v) in deps | ||
| 2690 | if uuid == UUID(v::String) | ||
| 2691 | return k | ||
| 2692 | end | ||
| 2693 | end | ||
| 2694 | end | ||
| 2695 | for subkey in ("deps", "extras", "weakdeps") | ||
| 2696 | subsection = get(project, subkey, nothing)::Union{Nothing, Dict{String, Any}} | ||
| 2697 | if subsection !== nothing | ||
| 2698 | for (k, v) in subsection | ||
| 2699 | if uuid == UUID(v::String) | ||
| 2700 | return k | ||
| 2701 | end | ||
| 2702 | end | ||
| 2703 | end | ||
| 2704 | end | ||
| 2705 | return nothing | ||
| 2706 | end | ||
| 2707 | |||
| 2708 | function get_uuid_name(project_toml::String, uuid::UUID) | ||
| 2709 | project = parsed_toml(project_toml) | ||
| 2710 | return get_uuid_name(project, uuid) | ||
| 2711 | end | ||
| 2712 | |||
| 2713 | # If we've asked for a specific UUID, this function will extract the prefs | ||
| 2714 | # for that particular UUID. Otherwise, it returns all preferences. | ||
| 2715 | function filter_preferences(prefs::Dict{String, Any}, pkg_name) | ||
| 2716 | if pkg_name === nothing | ||
| 2717 | return prefs | ||
| 2718 | else | ||
| 2719 | return get(Dict{String, Any}, prefs, pkg_name)::Dict{String, Any} | ||
| 2720 | end | ||
| 2721 | end | ||
| 2722 | |||
| 2723 | function collect_preferences(project_toml::String, uuid::Union{UUID,Nothing}) | ||
| 2724 | # We'll return a list of dicts to be merged | ||
| 2725 | dicts = Dict{String, Any}[] | ||
| 2726 | |||
| 2727 | project = parsed_toml(project_toml) | ||
| 2728 | pkg_name = nothing | ||
| 2729 | if uuid !== nothing | ||
| 2730 | # If we've been given a UUID, map that to the name of the package as | ||
| 2731 | # recorded in the preferences section. If we can't find that mapping, | ||
| 2732 | # exit out, as it means there's no way preferences can be set for that | ||
| 2733 | # UUID, as we only allow actual dependencies to have preferences set. | ||
| 2734 | pkg_name = get_uuid_name(project, uuid) | ||
| 2735 | if pkg_name === nothing | ||
| 2736 | return dicts | ||
| 2737 | end | ||
| 2738 | end | ||
| 2739 | |||
| 2740 | # Look first inside of `Project.toml` to see we have preferences embedded within there | ||
| 2741 | proj_preferences = get(Dict{String, Any}, project, "preferences")::Dict{String, Any} | ||
| 2742 | push!(dicts, filter_preferences(proj_preferences, pkg_name)) | ||
| 2743 | |||
| 2744 | # Next, look for `(Julia)LocalPreferences.toml` files next to this `Project.toml` | ||
| 2745 | project_dir = dirname(project_toml) | ||
| 2746 | for name in preferences_names | ||
| 2747 | toml_path = joinpath(project_dir, name) | ||
| 2748 | if isfile(toml_path) | ||
| 2749 | prefs = parsed_toml(toml_path) | ||
| 2750 | push!(dicts, filter_preferences(prefs, pkg_name)) | ||
| 2751 | |||
| 2752 | # If we find `JuliaLocalPreferences.toml`, don't look for `LocalPreferences.toml` | ||
| 2753 | break | ||
| 2754 | end | ||
| 2755 | end | ||
| 2756 | |||
| 2757 | return dicts | ||
| 2758 | end | ||
| 2759 | |||
| 2760 | """ | ||
| 2761 | recursive_prefs_merge(base::Dict, overrides::Dict...) | ||
| 2762 | |||
| 2763 | Helper function to merge preference dicts recursively, honoring overrides in nested | ||
| 2764 | dictionaries properly. | ||
| 2765 | """ | ||
| 2766 | function recursive_prefs_merge(base::Dict{String, Any}, overrides::Dict{String, Any}...) | ||
| 2767 | new_base = Base._typeddict(base, overrides...) | ||
| 2768 | |||
| 2769 | for override in overrides | ||
| 2770 | # Clear entries are keys that should be deleted from any previous setting. | ||
| 2771 | override_clear = get(override, "__clear__", nothing) | ||
| 2772 | if override_clear isa Vector{String} | ||
| 2773 | for k in override_clear | ||
| 2774 | delete!(new_base, k) | ||
| 2775 | end | ||
| 2776 | end | ||
| 2777 | |||
| 2778 | for (k, override_k) in override | ||
| 2779 | # Note that if `base` has a mapping that is _not_ a `Dict`, and `override` | ||
| 2780 | new_base_k = get(new_base, k, nothing) | ||
| 2781 | if new_base_k isa Dict{String, Any} && override_k isa Dict{String, Any} | ||
| 2782 | new_base[k] = recursive_prefs_merge(new_base_k, override_k) | ||
| 2783 | else | ||
| 2784 | new_base[k] = override_k | ||
| 2785 | end | ||
| 2786 | end | ||
| 2787 | end | ||
| 2788 | return new_base | ||
| 2789 | end | ||
| 2790 | |||
| 2791 | function get_preferences(uuid::Union{UUID,Nothing} = nothing) | ||
| 2792 | merged_prefs = Dict{String,Any}() | ||
| 2793 | for env in reverse(load_path()) | ||
| 2794 | project_toml = env_project_file(env) | ||
| 2795 | if !isa(project_toml, String) | ||
| 2796 | continue | ||
| 2797 | end | ||
| 2798 | |||
| 2799 | # Collect all dictionaries from the current point in the load path, then merge them in | ||
| 2800 | dicts = collect_preferences(project_toml, uuid) | ||
| 2801 | merged_prefs = recursive_prefs_merge(merged_prefs, dicts...) | ||
| 2802 | end | ||
| 2803 | return merged_prefs | ||
| 2804 | end | ||
| 2805 | |||
| 2806 | function get_preferences_hash(uuid::Union{UUID, Nothing}, prefs_list::Vector{String}) | ||
| 2807 | # Start from a predictable hash point to ensure that the same preferences always | ||
| 2808 | # hash to the same value, modulo changes in how Dictionaries are hashed. | ||
| 2809 | h = UInt(0) | ||
| 2810 | uuid === nothing && return UInt64(h) | ||
| 2811 | |||
| 2812 | # Load the preferences | ||
| 2813 | prefs = get_preferences(uuid) | ||
| 2814 | |||
| 2815 | # Walk through each name that's called out as a compile-time preference | ||
| 2816 | for name in prefs_list | ||
| 2817 | prefs_value = get(prefs, name, nothing) | ||
| 2818 | if prefs_value !== nothing | ||
| 2819 | h = hash(prefs_value, h)::UInt | ||
| 2820 | end | ||
| 2821 | end | ||
| 2822 | # We always return a `UInt64` so that our serialization format is stable | ||
| 2823 | return UInt64(h) | ||
| 2824 | end | ||
| 2825 | |||
| 2826 | get_preferences_hash(m::Module, prefs_list::Vector{String}) = get_preferences_hash(PkgId(m).uuid, prefs_list) | ||
| 2827 | |||
| 2828 | # This is how we keep track of who is using what preferences at compile-time | ||
| 2829 | const COMPILETIME_PREFERENCES = Dict{UUID,Set{String}}() | ||
| 2830 | |||
| 2831 | # In `Preferences.jl`, if someone calls `load_preference(@__MODULE__, key)` while we're precompiling, | ||
| 2832 | # we mark that usage as a usage at compile-time and call this method, so that at the end of `.ji` generation, | ||
| 2833 | # we can record the list of compile-time preferences and embed that into the `.ji` header | ||
| 2834 | function record_compiletime_preference(uuid::UUID, key::String) | ||
| 2835 | pref = get!(Set{String}, COMPILETIME_PREFERENCES, uuid) | ||
| 2836 | push!(pref, key) | ||
| 2837 | return nothing | ||
| 2838 | end | ||
| 2839 | get_compiletime_preferences(uuid::UUID) = collect(get(Vector{String}, COMPILETIME_PREFERENCES, uuid)) | ||
| 2840 | get_compiletime_preferences(m::Module) = get_compiletime_preferences(PkgId(m).uuid) | ||
| 2841 | get_compiletime_preferences(::Nothing) = String[] | ||
| 2842 | |||
| 2843 | function check_clone_targets(clone_targets) | ||
| 2844 | rejection_reason = ccall(:jl_check_pkgimage_clones, Any, (Ptr{Cchar},), clone_targets) | ||
| 2845 | if rejection_reason !== nothing | ||
| 2846 | return rejection_reason | ||
| 2847 | end | ||
| 2848 | end | ||
| 2849 | |||
| 2850 | struct CacheFlags | ||
| 2851 | # OOICCDDP - see jl_cache_flags | ||
| 2852 | use_pkgimages::Bool | ||
| 2853 | debug_level::Int | ||
| 2854 | check_bounds::Int | ||
| 2855 | inline::Bool | ||
| 2856 | opt_level::Int | ||
| 2857 | |||
| 2858 | function CacheFlags(f::UInt8) | ||
| 2859 | use_pkgimages = Bool(f & 1) | ||
| 2860 | debug_level = Int((f >> 1) & 3) | ||
| 2861 | check_bounds = Int((f >> 3) & 3) | ||
| 2862 | inline = Bool((f >> 5) & 1) | ||
| 2863 | opt_level = Int((f >> 6) & 3) # define OPT_LEVEL in statiddata_utils | ||
| 2864 | new(use_pkgimages, debug_level, check_bounds, inline, opt_level) | ||
| 2865 | end | ||
| 2866 | end | ||
| 2867 | CacheFlags(f::Int) = CacheFlags(UInt8(f)) | ||
| 2868 | CacheFlags() = CacheFlags(ccall(:jl_cache_flags, UInt8, ())) | ||
| 2869 | |||
| 2870 | function show(io::IO, cf::CacheFlags) | ||
| 2871 | print(io, "use_pkgimages = ", cf.use_pkgimages) | ||
| 2872 | print(io, ", debug_level = ", cf.debug_level) | ||
| 2873 | print(io, ", check_bounds = ", cf.check_bounds) | ||
| 2874 | print(io, ", inline = ", cf.inline) | ||
| 2875 | print(io, ", opt_level = ", cf.opt_level) | ||
| 2876 | end | ||
| 2877 | |||
| 2878 | struct ImageTarget | ||
| 2879 | name::String | ||
| 2880 | flags::Int32 | ||
| 2881 | ext_features::String | ||
| 2882 | features_en::Vector{UInt8} | ||
| 2883 | features_dis::Vector{UInt8} | ||
| 2884 | end | ||
| 2885 | |||
| 2886 | function parse_image_target(io::IO) | ||
| 2887 | flags = read(io, Int32) | ||
| 2888 | nfeature = read(io, Int32) | ||
| 2889 | feature_en = read(io, 4*nfeature) | ||
| 2890 | feature_dis = read(io, 4*nfeature) | ||
| 2891 | name_len = read(io, Int32) | ||
| 2892 | name = String(read(io, name_len)) | ||
| 2893 | ext_features_len = read(io, Int32) | ||
| 2894 | ext_features = String(read(io, ext_features_len)) | ||
| 2895 | ImageTarget(name, flags, ext_features, feature_en, feature_dis) | ||
| 2896 | end | ||
| 2897 | |||
| 2898 | function parse_image_targets(targets::Vector{UInt8}) | ||
| 2899 | io = IOBuffer(targets) | ||
| 2900 | ntargets = read(io, Int32) | ||
| 2901 | targets = Vector{ImageTarget}(undef, ntargets) | ||
| 2902 | for i in 1:ntargets | ||
| 2903 | targets[i] = parse_image_target(io) | ||
| 2904 | end | ||
| 2905 | return targets | ||
| 2906 | end | ||
| 2907 | |||
| 2908 | function current_image_targets() | ||
| 2909 | targets = @ccall jl_reflect_clone_targets()::Vector{UInt8} | ||
| 2910 | return parse_image_targets(targets) | ||
| 2911 | end | ||
| 2912 | |||
| 2913 | struct FeatureName | ||
| 2914 | name::Cstring | ||
| 2915 | bit::UInt32 # bit index into a `uint32_t` array; | ||
| 2916 | llvmver::UInt32 # 0 if it is available on the oldest LLVM version we support | ||
| 2917 | end | ||
| 2918 | |||
| 2919 | function feature_names() | ||
| 2920 | fnames = Ref{Ptr{FeatureName}}() | ||
| 2921 | nf = Ref{Csize_t}() | ||
| 2922 | @ccall jl_reflect_feature_names(fnames::Ptr{Ptr{FeatureName}}, nf::Ptr{Csize_t})::Cvoid | ||
| 2923 | if fnames[] == C_NULL | ||
| 2924 | @assert nf[] == 0 | ||
| 2925 | return Vector{FeatureName}(undef, 0) | ||
| 2926 | end | ||
| 2927 | Base.unsafe_wrap(Array, fnames[], nf[], own=false) | ||
| 2928 | end | ||
| 2929 | |||
| 2930 | function test_feature(features::Vector{UInt8}, feat::FeatureName) | ||
| 2931 | bitidx = feat.bit | ||
| 2932 | u8idx = div(bitidx, 8) + 1 | ||
| 2933 | bit = bitidx % 8 | ||
| 2934 | return (features[u8idx] & (1 << bit)) != 0 | ||
| 2935 | end | ||
| 2936 | |||
| 2937 | function show(io::IO, it::ImageTarget) | ||
| 2938 | print(io, it.name) | ||
| 2939 | if !isempty(it.ext_features) | ||
| 2940 | print(io, ",", it.ext_features) | ||
| 2941 | end | ||
| 2942 | print(io, "; flags=", it.flags) | ||
| 2943 | print(io, "; features_en=(") | ||
| 2944 | first = true | ||
| 2945 | for feat in feature_names() | ||
| 2946 | if test_feature(it.features_en, feat) | ||
| 2947 | name = Base.unsafe_string(feat.name) | ||
| 2948 | if first | ||
| 2949 | first = false | ||
| 2950 | print(io, name) | ||
| 2951 | else | ||
| 2952 | print(io, ", ", name) | ||
| 2953 | end | ||
| 2954 | end | ||
| 2955 | end | ||
| 2956 | print(io, ")") | ||
| 2957 | # Is feature_dis useful? | ||
| 2958 | end | ||
| 2959 | |||
| 2960 | # Set by FileWatching.__init__() | ||
| 2961 | global mkpidlock_hook | ||
| 2962 | global trymkpidlock_hook | ||
| 2963 | global parse_pidfile_hook | ||
| 2964 | |||
| 2965 | # The preferences hash is only known after precompilation so just assume no preferences. | ||
| 2966 | # Also ignore the active project, which means that if all other conditions are equal, | ||
| 2967 | # the same package cannot be precompiled from different projects and/or different preferences at the same time. | ||
| 2968 | compilecache_pidfile_path(pkg::PkgId) = compilecache_path(pkg, UInt64(0); project="") * ".pidfile" | ||
| 2969 | |||
| 2970 | const compilecache_pidlock_stale_age = 10 | ||
| 2971 | |||
| 2972 | # Allows processes to wait if another process is precompiling a given source already. | ||
| 2973 | # The lock file mtime will be updated when held at most every `stale_age/2` seconds, with expected | ||
| 2974 | # variance of 10 seconds or more being infrequent but not unusual. | ||
| 2975 | # After `stale_age` seconds beyond the mtime of the lock file, the lock file is deleted and | ||
| 2976 | # precompilation will proceed if the locking process no longer exists or after `stale_age * 5` | ||
| 2977 | # seconds if the process does still exist. | ||
| 2978 | # If the lock is held by another host, it will conservatively wait `stale_age * 5` | ||
| 2979 | # seconds since processes cannot be checked remotely | ||
| 2980 | function maybe_cachefile_lock(f, pkg::PkgId, srcpath::String; stale_age=compilecache_pidlock_stale_age) | ||
| 2981 | if @isdefined(mkpidlock_hook) && @isdefined(trymkpidlock_hook) && @isdefined(parse_pidfile_hook) | ||
| 2982 | pidfile = compilecache_pidfile_path(pkg) | ||
| 2983 | cachefile = invokelatest(trymkpidlock_hook, f, pidfile; stale_age) | ||
| 2984 | if cachefile === false | ||
| 2985 | pid, hostname, age = invokelatest(parse_pidfile_hook, pidfile) | ||
| 2986 | verbosity = isinteractive() ? CoreLogging.Info : CoreLogging.Debug | ||
| 2987 | if isempty(hostname) || hostname == gethostname() | ||
| 2988 | @logmsg verbosity "Waiting for another process (pid: $pid) to finish precompiling $pkg. Pidfile: $pidfile" | ||
| 2989 | else | ||
| 2990 | @logmsg verbosity "Waiting for another machine (hostname: $hostname, pid: $pid) to finish precompiling $pkg. Pidfile: $pidfile" | ||
| 2991 | end | ||
| 2992 | # wait until the lock is available, but don't actually acquire it | ||
| 2993 | # returning nothing indicates a process waited for another | ||
| 2994 | return invokelatest(mkpidlock_hook, Returns(nothing), pidfile; stale_age) | ||
| 2995 | end | ||
| 2996 | return cachefile | ||
| 2997 | else | ||
| 2998 | # for packages loaded before FileWatching.__init__() | ||
| 2999 | f() | ||
| 3000 | end | ||
| 3001 | end | ||
| 3002 | # returns true if it "cachefile.ji" is stale relative to "modpath.jl" and build_id for modkey | ||
| 3003 | # otherwise returns the list of dependencies to also check | ||
| 3004 | @constprop :none function stale_cachefile(modpath::String, cachefile::String; ignore_loaded::Bool = false) | ||
| 3005 | return stale_cachefile(PkgId(""), UInt128(0), modpath, cachefile; ignore_loaded) | ||
| 3006 | end | ||
| 3007 | @constprop :none function stale_cachefile(modkey::PkgId, build_id::UInt128, modpath::String, cachefile::String; ignore_loaded::Bool = false) | ||
| 3008 | io = open(cachefile, "r") | ||
| 3009 | try | ||
| 3010 | checksum = isvalid_cache_header(io) | ||
| 3011 | if iszero(checksum) | ||
| 3012 | @debug "Rejecting cache file $cachefile due to it containing an invalid cache header" | ||
| 3013 | return true # invalid cache file | ||
| 3014 | end | ||
| 3015 | modules, (includes, requires), required_modules, srctextpos, prefs, prefs_hash, clone_targets, flags = parse_cache_header(io) | ||
| 3016 | if isempty(modules) | ||
| 3017 | return true # ignore empty file | ||
| 3018 | end | ||
| 3019 | if ccall(:jl_match_cache_flags, UInt8, (UInt8,), flags) == 0 | ||
| 3020 | @debug """ | ||
| 3021 | Rejecting cache file $cachefile for $modkey since the flags are mismatched | ||
| 3022 | current session: $(CacheFlags()) | ||
| 3023 | cache file: $(CacheFlags(flags)) | ||
| 3024 | """ | ||
| 3025 | return true | ||
| 3026 | end | ||
| 3027 | pkgimage = !isempty(clone_targets) | ||
| 3028 | if pkgimage | ||
| 3029 | ocachefile = ocachefile_from_cachefile(cachefile) | ||
| 3030 | if JLOptions().use_pkgimages == 0 | ||
| 3031 | # presence of clone_targets means native code cache | ||
| 3032 | @debug "Rejecting cache file $cachefile for $modkey since it would require usage of pkgimage" | ||
| 3033 | return true | ||
| 3034 | end | ||
| 3035 | rejection_reasons = check_clone_targets(clone_targets) | ||
| 3036 | if !isnothing(rejection_reasons) | ||
| 3037 | @debug("Rejecting cache file $cachefile for $modkey:", | ||
| 3038 | Reasons=rejection_reasons, | ||
| 3039 | var"Image Targets"=parse_image_targets(clone_targets), | ||
| 3040 | var"Current Targets"=current_image_targets()) | ||
| 3041 | return true | ||
| 3042 | end | ||
| 3043 | if !isfile(ocachefile) | ||
| 3044 | @debug "Rejecting cache file $cachefile for $modkey since pkgimage $ocachefile was not found" | ||
| 3045 | return true | ||
| 3046 | end | ||
| 3047 | else | ||
| 3048 | ocachefile = nothing | ||
| 3049 | end | ||
| 3050 | id = first(modules) | ||
| 3051 | if id.first != modkey && modkey != PkgId("") | ||
| 3052 | @debug "Rejecting cache file $cachefile for $modkey since it is for $id instead" | ||
| 3053 | return true | ||
| 3054 | end | ||
| 3055 | if build_id != UInt128(0) | ||
| 3056 | id_build = (UInt128(checksum) << 64) | id.second | ||
| 3057 | if id_build != build_id | ||
| 3058 | @debug "Ignoring cache file $cachefile for $modkey ($((UUID(id_build)))) since it is does not provide desired build_id ($((UUID(build_id))))" | ||
| 3059 | return true | ||
| 3060 | end | ||
| 3061 | end | ||
| 3062 | id = id.first | ||
| 3063 | modules = Dict{PkgId, UInt64}(modules) | ||
| 3064 | |||
| 3065 | # Check if transitive dependencies can be fulfilled | ||
| 3066 | ndeps = length(required_modules) | ||
| 3067 | depmods = Vector{Any}(undef, ndeps) | ||
| 3068 | for i in 1:ndeps | ||
| 3069 | req_key, req_build_id = required_modules[i] | ||
| 3070 | # Module is already loaded | ||
| 3071 | if root_module_exists(req_key) | ||
| 3072 | M = root_module(req_key) | ||
| 3073 | if PkgId(M) == req_key && module_build_id(M) === req_build_id | ||
| 3074 | depmods[i] = M | ||
| 3075 | elseif ignore_loaded | ||
| 3076 | # Used by Pkg.precompile given that there it's ok to precompile different versions of loaded packages | ||
| 3077 | @goto locate_branch | ||
| 3078 | else | ||
| 3079 | @debug "Rejecting cache file $cachefile because module $req_key is already loaded and incompatible." | ||
| 3080 | return true # Won't be able to fulfill dependency | ||
| 3081 | end | ||
| 3082 | else | ||
| 3083 | @label locate_branch | ||
| 3084 | path = locate_package(req_key) | ||
| 3085 | if path === nothing | ||
| 3086 | @debug "Rejecting cache file $cachefile because dependency $req_key not found." | ||
| 3087 | return true # Won't be able to fulfill dependency | ||
| 3088 | end | ||
| 3089 | depmods[i] = (path, req_key, req_build_id) | ||
| 3090 | end | ||
| 3091 | end | ||
| 3092 | |||
| 3093 | # check if this file is going to provide one of our concrete dependencies | ||
| 3094 | # or if it provides a version that conflicts with our concrete dependencies | ||
| 3095 | # or neither | ||
| 3096 | skip_timecheck = false | ||
| 3097 | for (req_key, req_build_id) in _concrete_dependencies | ||
| 3098 | build_id = get(modules, req_key, UInt64(0)) | ||
| 3099 | if build_id !== UInt64(0) | ||
| 3100 | build_id |= UInt128(checksum) << 64 | ||
| 3101 | if build_id === req_build_id | ||
| 3102 | skip_timecheck = true | ||
| 3103 | break | ||
| 3104 | end | ||
| 3105 | @debug "Rejecting cache file $cachefile because it provides the wrong build_id (got $((UUID(build_id)))) for $req_key (want $(UUID(req_build_id)))" | ||
| 3106 | return true # cachefile doesn't provide the required version of the dependency | ||
| 3107 | end | ||
| 3108 | end | ||
| 3109 | |||
| 3110 | # now check if this file is fresh relative to its source files | ||
| 3111 | if !skip_timecheck | ||
| 3112 | if !samefile(includes[1].filename, modpath) && !samefile(fixup_stdlib_path(includes[1].filename), modpath) | ||
| 3113 | @debug "Rejecting cache file $cachefile because it is for file $(includes[1].filename) not file $modpath" | ||
| 3114 | return true # cache file was compiled from a different path | ||
| 3115 | end | ||
| 3116 | for (modkey, req_modkey) in requires | ||
| 3117 | # verify that `require(modkey, name(req_modkey))` ==> `req_modkey` | ||
| 3118 | if identify_package(modkey, req_modkey.name) != req_modkey | ||
| 3119 | @debug "Rejecting cache file $cachefile because uuid mapping for $modkey => $req_modkey has changed" | ||
| 3120 | return true | ||
| 3121 | end | ||
| 3122 | end | ||
| 3123 | for chi in includes | ||
| 3124 | f, ftime_req = chi.filename, chi.mtime | ||
| 3125 | if !ispath(f) | ||
| 3126 | _f = fixup_stdlib_path(f) | ||
| 3127 | if isfile(_f) && startswith(_f, Sys.STDLIB) | ||
| 3128 | # mtime is changed by extraction | ||
| 3129 | @debug "Skipping mtime check for file $f used by $cachefile, since it is a stdlib" | ||
| 3130 | continue | ||
| 3131 | end | ||
| 3132 | @debug "Rejecting stale cache file $cachefile because file $f does not exist" | ||
| 3133 | return true | ||
| 3134 | end | ||
| 3135 | ftime = mtime(f) | ||
| 3136 | is_stale = ( ftime != ftime_req ) && | ||
| 3137 | ( ftime != floor(ftime_req) ) && # Issue #13606, PR #13613: compensate for Docker images rounding mtimes | ||
| 3138 | ( ftime != ceil(ftime_req) ) && # PR: #47433 Compensate for CirceCI's truncating of timestamps in its caching | ||
| 3139 | ( ftime != trunc(ftime_req, digits=6) ) && # Issue #20837, PR #20840: compensate for GlusterFS truncating mtimes to microseconds | ||
| 3140 | ( ftime != 1.0 ) && # PR #43090: provide compatibility with Nix mtime. | ||
| 3141 | !( 0 < (ftime_req - ftime) < 1e-6 ) # PR #45552: Compensate for Windows tar giving mtimes that may be incorrect by up to one microsecond | ||
| 3142 | if is_stale | ||
| 3143 | @debug "Rejecting stale cache file $cachefile (mtime $ftime_req) because file $f (mtime $ftime) has changed" | ||
| 3144 | return true | ||
| 3145 | end | ||
| 3146 | end | ||
| 3147 | end | ||
| 3148 | |||
| 3149 | if !isvalid_file_crc(io) | ||
| 3150 | @debug "Rejecting cache file $cachefile because it has an invalid checksum" | ||
| 3151 | return true | ||
| 3152 | end | ||
| 3153 | |||
| 3154 | if pkgimage | ||
| 3155 | if !isvalid_pkgimage_crc(io, ocachefile::String) | ||
| 3156 | @debug "Rejecting cache file $cachefile because $ocachefile has an invalid checksum" | ||
| 3157 | return true | ||
| 3158 | end | ||
| 3159 | end | ||
| 3160 | |||
| 3161 | curr_prefs_hash = get_preferences_hash(id.uuid, prefs) | ||
| 3162 | if prefs_hash != curr_prefs_hash | ||
| 3163 | @debug "Rejecting cache file $cachefile because preferences hash does not match 0x$(string(prefs_hash, base=16)) != 0x$(string(curr_prefs_hash, base=16))" | ||
| 3164 | return true | ||
| 3165 | end | ||
| 3166 | |||
| 3167 | return depmods, ocachefile # fresh cachefile | ||
| 3168 | finally | ||
| 3169 | close(io) | ||
| 3170 | end | ||
| 3171 | end | ||
| 3172 | |||
| 3173 | """ | ||
| 3174 | @__FILE__ -> String | ||
| 3175 | |||
| 3176 | Expand to a string with the path to the file containing the | ||
| 3177 | macrocall, or an empty string if evaluated by `julia -e <expr>`. | ||
| 3178 | Return `nothing` if the macro was missing parser source information. | ||
| 3179 | Alternatively see [`PROGRAM_FILE`](@ref). | ||
| 3180 | """ | ||
| 3181 | macro __FILE__() | ||
| 3182 | __source__.file === nothing && return nothing | ||
| 3183 | return String(__source__.file::Symbol) | ||
| 3184 | end | ||
| 3185 | |||
| 3186 | """ | ||
| 3187 | @__DIR__ -> String | ||
| 3188 | |||
| 3189 | Expand to a string with the absolute path to the directory of the file | ||
| 3190 | containing the macrocall. | ||
| 3191 | Return the current working directory if run from a REPL or if evaluated by `julia -e <expr>`. | ||
| 3192 | """ | ||
| 3193 | macro __DIR__() | ||
| 3194 | __source__.file === nothing && return nothing | ||
| 3195 | _dirname = dirname(String(__source__.file::Symbol)) | ||
| 3196 | return isempty(_dirname) ? pwd() : abspath(_dirname) | ||
| 3197 | end | ||
| 3198 | |||
| 3199 | """ | ||
| 3200 | precompile(f, argtypes::Tuple{Vararg{Any}}) | ||
| 3201 | |||
| 3202 | Compile the given function `f` for the argument tuple (of types) `argtypes`, but do not execute it. | ||
| 3203 | """ | ||
| 3204 | function precompile(@nospecialize(f), @nospecialize(argtypes::Tuple)) | ||
| 3205 | precompile(Tuple{Core.Typeof(f), argtypes...}) | ||
| 3206 | end | ||
| 3207 | |||
| 3208 | const ENABLE_PRECOMPILE_WARNINGS = Ref(false) | ||
| 3209 | function precompile(@nospecialize(argt::Type)) | ||
| 3210 | ret = ccall(:jl_compile_hint, Int32, (Any,), argt) != 0 | ||
| 3211 | if !ret && ENABLE_PRECOMPILE_WARNINGS[] | ||
| 3212 | @warn "Inactive precompile statement" maxlog=100 form=argt _module=nothing _file=nothing _line=0 | ||
| 3213 | end | ||
| 3214 | return ret | ||
| 3215 | end | ||
| 3216 | |||
| 3217 | # Variants that work for `invoke`d calls for which the signature may not be sufficient | ||
| 3218 | precompile(mi::Core.MethodInstance, world::UInt=get_world_counter()) = | ||
| 3219 | (ccall(:jl_compile_method_instance, Cvoid, (Any, Any, UInt), mi, C_NULL, world); return true) | ||
| 3220 | |||
| 3221 | """ | ||
| 3222 | precompile(f, argtypes::Tuple{Vararg{Any}}, m::Method) | ||
| 3223 | |||
| 3224 | Precompile a specific method for the given argument types. This may be used to precompile | ||
| 3225 | a different method than the one that would ordinarily be chosen by dispatch, thus | ||
| 3226 | mimicking `invoke`. | ||
| 3227 | """ | ||
| 3228 | function precompile(@nospecialize(f), @nospecialize(argtypes::Tuple), m::Method) | ||
| 3229 | precompile(Tuple{Core.Typeof(f), argtypes...}, m) | ||
| 3230 | end | ||
| 3231 | |||
| 3232 | function precompile(@nospecialize(argt::Type), m::Method) | ||
| 3233 | atype, sparams = ccall(:jl_type_intersection_with_env, Any, (Any, Any), argt, m.sig)::SimpleVector | ||
| 3234 | mi = Core.Compiler.specialize_method(m, atype, sparams) | ||
| 3235 | return precompile(mi) | ||
| 3236 | end | ||
| 3237 | |||
| 3238 | precompile(include_package_for_output, (PkgId, String, Vector{String}, Vector{String}, Vector{String}, typeof(_concrete_dependencies), Nothing)) | ||
| 3239 | precompile(include_package_for_output, (PkgId, String, Vector{String}, Vector{String}, Vector{String}, typeof(_concrete_dependencies), String)) | ||
| 3240 | precompile(create_expr_cache, (PkgId, String, String, String, typeof(_concrete_dependencies), IO, IO)) | ||
| 3241 | precompile(create_expr_cache, (PkgId, String, String, Nothing, typeof(_concrete_dependencies), IO, IO)) |