Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
TuLiPa = "970f5c25-cd7d-4f04-b50d-7a4fe2af6639"
YAML = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6"
PythonCall = "6099a3de-0909-46bc-b1f4-468b9a2dfc0d"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"

[weakdeps]
OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed"
Expand Down
15 changes: 12 additions & 3 deletions src/JulES.jl
Original file line number Diff line number Diff line change
@@ -1,17 +1,25 @@
module JulES

macro debugtime(msg, expr)
quote
local stats = Base.@timed $(esc(expr))
@debug $msg elapsed_s = round(stats.time, digits=3) bytes = stats.bytes gctime_s = round(stats.gctime, digits=3)
stats.value
end
end

import TuLiPa

using Distributed
using Dates
using Statistics
using Clustering
using Distributions
using DataFrames
using DataFrames
using JSON
using YAML
using HDF5

using Logging
# Used by ifm
#using ComponentArrays
#using Interpolations
Expand All @@ -25,7 +33,8 @@ using HDF5
# using OptimizationBBO
# using Zygote

include("abstract_types.jl")
include("python_logger.jl")
include("abstract_types.jl")
include("dimension_types.jl")
include("ifm.jl")
include("generic_io.jl")
Expand Down
304 changes: 157 additions & 147 deletions src/io.jl

Large diffs are not rendered by default.

53 changes: 53 additions & 0 deletions src/python_logger.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
using PythonCall
using Logging

const _py_logging = Ref{Py}()
function py_logging()
if !isassigned(_py_logging)
_py_logging[] = pyimport("logging")
end
return _py_logging[]
end

const _min_level = Ref{LogLevel}(Info)

struct PythonLogBridge <: AbstractLogger end
Logging.shouldlog(::PythonLogBridge, level, _module, group, id) = true
Logging.min_enabled_level(::PythonLogBridge) = _min_level[]
Logging.catch_exceptions(::PythonLogBridge) = true

function Logging.handle_message(
::PythonLogBridge, level, message, _module, group, id, filepath, line;
kwargs...
)
log = py_logging()
name = _module !== nothing ? string(_module) : "julia"
pylevel = if level == Debug
10
elseif level == Info
20
elseif level == Warn
30
elseif level == Error
40
else
50
end
extra = pydict(Dict(string(k) => string(v) for (k, v) in kwargs))
log.getLogger(name).log(pylevel, string(message), extra=extra)
end

function use_python_logging!()
log = py_logging()
pylevel = pyconvert(Int, log.root.level)
_min_level[] = if pylevel <= 10
Debug
elseif pylevel <= 20
Info
elseif pylevel <= 30
Warn
else
Error
end
global_logger(PythonLogBridge())
end
184 changes: 94 additions & 90 deletions src/run_jules_wrapper.jl
Original file line number Diff line number Diff line change
@@ -1,99 +1,103 @@
function getdataset(config, names, filename_clearing, filename_aggregated)
settings = config[config["main"]["settings"]]

sti_dataset = joinpath(config["main"]["outputpath"])

clearing = JulES.JSON.parsefile(joinpath(sti_dataset, filename_clearing))
clearing = JulES.TuLiPa.getelements(clearing)

aggregated = JulES.JSON.parsefile(joinpath(sti_dataset, filename_aggregated))
aggregated = JulES.TuLiPa.getelements(aggregated)

timevectors = JulES.JSON.parsefile(joinpath(sti_dataset, names["FILENAME_DATAELEMENTS_TIMEVECTORS"] ))
timevectors = JulES.TuLiPa.getelements(timevectors, sti_dataset)

elements = vcat(clearing, timevectors)
elements_ppp = vcat(aggregated, timevectors)
storage_mapping = JulES.JSON.parsefile(
joinpath(sti_dataset, names["FILENAME_STORAGE_MAPPING"]),
dicttype=Dict{String, String},
)

startmag_aggregated = JulES.JSON.parsefile(
joinpath(sti_dataset, names["FILENAME_START_STORAGES_AGGREGATED"]),
dicttype=Dict{String, Float64},
)
startmag_clearing = JulES.JSON.parsefile(
joinpath(sti_dataset, names["FILENAME_START_STORAGES_CLEARING"]),
dicttype=Dict{String, Float64},
)

return Dict(
"elements" => elements,
"elements_ppp" => elements_ppp,
"detailedrescopl" => storage_mapping,
"startmagdict" => startmag_clearing,
"aggstartmagdict" => startmag_aggregated,
)
settings = config[config["main"]["settings"]]

sti_dataset = joinpath(config["main"]["outputpath"])

clearing = JulES.JSON.parsefile(joinpath(sti_dataset, filename_clearing))
clearing = JulES.TuLiPa.getelements(clearing)

aggregated = JulES.JSON.parsefile(joinpath(sti_dataset, filename_aggregated))
aggregated = JulES.TuLiPa.getelements(aggregated)

timevectors = JulES.JSON.parsefile(joinpath(sti_dataset, names["FILENAME_DATAELEMENTS_TIMEVECTORS"]))
timevectors = JulES.TuLiPa.getelements(timevectors, sti_dataset)

elements = vcat(clearing, timevectors)
elements_ppp = vcat(aggregated, timevectors)

storage_mapping = JulES.JSON.parsefile(
joinpath(sti_dataset, names["FILENAME_STORAGE_MAPPING"]),
dicttype=Dict{String,String},
)

startmag_aggregated = JulES.JSON.parsefile(
joinpath(sti_dataset, names["FILENAME_START_STORAGES_AGGREGATED"]),
dicttype=Dict{String,Float64},
)

startmag_clearing = JulES.JSON.parsefile(
joinpath(sti_dataset, names["FILENAME_START_STORAGES_CLEARING"]),
dicttype=Dict{String,Float64},
)

return Dict(
"elements" => elements,
"elements_ppp" => elements_ppp,
"detailedrescopl" => storage_mapping,
"startmagdict" => startmag_clearing,
"aggstartmagdict" => startmag_aggregated,
)
end

function load_ifm_dep()
if myid() == 1
function ensure_packages(pkgs::Vector{String})
deps = values(Pkg.dependencies())
not_installed = filter(pkg -> !any(d -> d.name == pkg, deps), pkgs)
if !isempty(not_installed)
println("Installing missing packages: ", join(not_installed, ", "))
Pkg.add(not_installed)
else
println("All packages already installed.")
end
end
ensure_packages(["OrdinaryDiffEq", "ComponentArrays", "Interpolations", "JLD2"])
end

@everywhere begin
Pkg.instantiate()
Base.eval(Main, :(using OrdinaryDiffEq))
Base.eval(Main, :(using ComponentArrays))
Base.eval(Main, :(using Interpolations))
Base.eval(Main, :(using JLD2))
end
if myid() == 1
function ensure_packages(pkgs::Vector{String})
deps = values(Pkg.dependencies())
not_installed = filter(pkg -> !any(d -> d.name == pkg, deps), pkgs)
if !isempty(not_installed)
@info "Installing missing packages: ", join(not_installed, ", ")
Pkg.add(not_installed)
else
@info "All packages already installed."
end
end
ensure_packages(["OrdinaryDiffEq", "ComponentArrays", "Interpolations", "JLD2"])
end

@everywhere begin
Pkg.instantiate()
Base.eval(Main, :(using OrdinaryDiffEq))
Base.eval(Main, :(using ComponentArrays))
Base.eval(Main, :(using Interpolations))
Base.eval(Main, :(using JLD2))
end
end

function run_jules(
config_path,
datayear,
weatheryear,
outputpath,
JulESNames,
filename_clearing,
filename_aggregated
)

config = YAML.load_file(config_path)

dataset = getdataset(
config,
JulESNames,
filename_clearing,
filename_aggregated
)

input = JulES.DefaultJulESInput(config, dataset, datayear, weatheryear)

has_ifm_results(input) && load_ifm_dep()

@time data = JulES.run_serial(input)
println("Total serial time above")

println("Save output")
@time h5open(outputpath, "w") do file
for (k,v) in data
println(k)
write(file, k, v)
end
config_path,
datayear,
weatheryear,
outputpath,
JulESNames,
filename_clearing,
filename_aggregated
)

@info "Starting JulES run" datayear = datayear weatheryear = weatheryear outputpath = outputpath workers = nworkers()

config = YAML.load_file(config_path)

dataset = getdataset(
config,
JulESNames,
filename_clearing,
filename_aggregated
)

input = JulES.DefaultJulESInput(config, dataset, datayear, weatheryear)

if has_ifm_results(input)
@info "Loading IFM dependencies"
load_ifm_dep()
else
@debug "IFM dependency loading skipped" reason = "direct mode"
end

@debugtime "Run serial" data = JulES.run_serial(input)
@debugtime "Save output" h5open(outputpath, "w") do file
for (k, v) in data
@debug k
write(file, k, v)
end
end
end
Loading
Loading