Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
174 commits
Select commit Hold shift + click to select a range
30083a6
const SEM=StructuralEquationModels
Mar 16, 2024
a505da0
ParamTable: convert from Dict to NamedTuple
Mar 12, 2024
0d0d389
obj_grad_hess: simplify mapreduce
Mar 9, 2024
080a8ad
cov_and_mean(): use StatsBase.mean_and_cov()
alyst Mar 18, 2024
5e9db0a
fill_A_S_M(): use `@inbounds`
Mar 9, 2024
74735f3
fill_A_S_M!(): add !
Mar 10, 2024
60c61f3
==: use &&
Mar 9, 2024
ae89781
RAMMatrices: cleanup params index
Mar 10, 2024
4609c00
RAMMatrices: tiny rename for clarity
Mar 9, 2024
2739f0b
RAMMatrices: cleanup indexing params in arrays
Mar 9, 2024
613dba7
RAMConstant: move before RAMMatrices
Mar 9, 2024
436d31c
RAMConstant: simplify
Mar 9, 2024
6ce98c3
RAMMatrices: optimize F_indices init
Mar 9, 2024
80b1927
RAMMatrices ctor: dim checks
Mar 9, 2024
67e01ce
RAMMatrices: declare types for all fields
Mar 9, 2024
b3e12a0
include RAMMatrices before EnsParTable
alyst Mar 18, 2024
b3b8a8a
fix EnsParTable to Dict{RAMMatrices} convert
Mar 17, 2024
05040e9
replace no-op ctors with convert(T, obj)
Mar 17, 2024
6187f24
ParTable ctor: simplify rows code
Mar 9, 2024
07ed9a9
ParTable: full support for Iterator iface
Mar 9, 2024
c6eb013
use Fix1 instead of anonymous function
Mar 9, 2024
165c640
fix typo
Mar 9, 2024
b48a0c5
rename vars for type stability
Mar 9, 2024
0391a5e
obj!()/grad!(): avoid tmp array creation
Mar 9, 2024
b18e658
grad!(): avoid extra array copying
Mar 9, 2024
764d4a0
check_acyclic(): use istril/u()
Mar 9, 2024
0bbf5c9
grad!(SemML): reduce * ops
Mar 9, 2024
74e23e9
obj/grad/hess!(SemML): avoid extra arr copying
Mar 10, 2024
67a988e
use .+= to reduce allocs
Mar 10, 2024
28950f3
ParTable: update StenGraph-based ctor
Mar 17, 2024
9b51e59
EnsParTable: update dict-based and graph-based ctors
Mar 12, 2024
e6fde8c
start_fabin3: optimize indexing
Mar 10, 2024
4c61f10
start_fabin3(): optimize math
Mar 10, 2024
c6641d4
start_fabin3(): directly access imply.ram_matrices
Mar 12, 2024
2c9ffc9
reorder_data(): optimize
Mar 10, 2024
0aa6903
SemObservedData(): cleanup code
Mar 10, 2024
28965a0
SemSpecification base type
Mar 17, 2024
a8c8e8a
use SemSpecification in method signatures
Mar 17, 2024
cdc7618
SemSpecification: vars API
Mar 17, 2024
1fc18cc
RAMMatrices: vars API
Mar 17, 2024
b30d2a6
ParamTable: vars API
alyst Mar 18, 2024
dd3a97c
RAM imply: use vars API
Mar 17, 2024
86b7a7f
RAMSymbolic: use vars API
Mar 17, 2024
4108f7a
start_simple(): use vars API
Mar 17, 2024
1403932
starts_fabin3: use vars API
Mar 17, 2024
085b0df
remove get_colnames()
Mar 12, 2024
d66452d
remove get_n_nodes()
Mar 17, 2024
b44a9e8
n_par() -> nparams()
Mar 17, 2024
9b58bbf
MeanStructure, HessianEvaluation traits
Mar 19, 2024
692d6df
RAMMatrices: option to keep zero constants
Mar 10, 2024
7f1d171
getindex(EnsParTable, i) instead of get_group()
Mar 17, 2024
f4411a1
matrix_gradient(): refactor
Mar 10, 2024
204044a
optimize kron
Mar 10, 2024
71bb58f
ML: optimize C
Mar 10, 2024
79616dd
refactor get_partition()
Mar 10, 2024
64219f5
remove_all_missing(): optimize
Mar 10, 2024
ef81919
skipmissing_mean(): optimize
Mar 10, 2024
af8a9e3
get_observed(): refactor
Mar 10, 2024
fa1c453
fix ridge eval
Mar 17, 2024
a4dba9b
obj/grad/hess: refactor evaluation API
alyst Mar 20, 2024
c69f262
use ternary op as intended
Mar 10, 2024
2133f97
symbolic: constrain to tril before simplifying
Mar 10, 2024
a1a6f10
fix dangling whitespace
Mar 10, 2024
b2eb026
check_acyclic: notify if matrix is triangular
Mar 11, 2024
e61daf2
tests/examples: import -> using
alyst Mar 15, 2024
6cc0446
fix dangling spaces
Mar 11, 2024
15e0bd2
don't import ==
Mar 11, 2024
78193a7
don't import push!()
Mar 11, 2024
61ac92b
remove no-op push!()
Mar 23, 2024
fca7778
rename Base.sort() to sort_vars()
Mar 22, 2024
a9d55ee
sort_vars!(ParTable): cleanup
alyst Mar 18, 2024
115fc39
remove spurious "using SEM"
Mar 11, 2024
25d7b94
fix typo
Mar 12, 2024
ad51f76
add ParamsArray
Apr 3, 2024
d6e90e5
vars(RAMMatrices)
Mar 20, 2024
39e9d0e
reorder_obs_cov/mean(): cleanup
Mar 12, 2024
30343ce
param_values(ParTable)
Mar 23, 2024
b9591cc
lavaan_param_values(lav_fit, partable)
Mar 23, 2024
958d65a
test_gradient(): do tests inside
Mar 17, 2024
96aece8
test_hessian(): do tests inside
Mar 17, 2024
397cf23
compare_estimates() -> test_estimates()
Mar 23, 2024
352bf02
comp_fitmeasures() -> test_fitmeasures()
Mar 17, 2024
06df803
tests: tiny improvements
Mar 17, 2024
f52f4d5
tests: use approx op
alyst Mar 15, 2024
5a12f62
tests: fix ensembl ctor
alyst Mar 15, 2024
cc3ae11
tests: relax multithreading check
Mar 15, 2024
f621f20
tests: use ismissing()
Mar 17, 2024
137bf0c
tests: SEM module alias
alyst Mar 18, 2024
9be6615
tests: use update_se_hessian!()
alyst Mar 20, 2024
287a3ef
remove module spec
Mar 16, 2024
6ec5540
remove no-op method
Mar 17, 2024
d8dc844
start_fabin3: check mean data and model
Mar 17, 2024
76b35dd
identifier/parameters -> params
Mar 19, 2024
84bf448
identifier column -> param
alyst Mar 17, 2024
ba1283f
check_params(): opt to append missing ones
Mar 19, 2024
9c416ac
EnsParTable ctor: enforce same params in tables
Mar 19, 2024
6f37d7f
ParTable(graph): use check_params() to update params
Mar 19, 2024
bcd8551
WIP SemImplyState
Mar 17, 2024
23abc82
SemCov: stricter type checks
Mar 17, 2024
ee171f5
FIML: simplify index generation
alyst Mar 18, 2024
05ddefd
get_data(SemObserved): default implementation
alyst Mar 18, 2024
9784e14
SemObsMissing: refactor
Apr 10, 2024
fe2bfeb
remove cov_and_mean(): not used anymore
alyst Mar 18, 2024
0dfde3d
minus2ll(): cleanup method signatures
alyst Mar 18, 2024
1825014
fix chi2
Mar 20, 2024
ee0f273
fix RMSEA
Mar 19, 2024
848a00f
FIML: update
alyst Mar 20, 2024
1d0e8b5
FIML: use 5-arg mul!
Mar 20, 2024
87a93f8
WLS: use 5-arg mul!()
Mar 20, 2024
9a13e7c
ML: use 5-arg mul!()
Mar 23, 2024
c4c4c8c
declare cov matrices symmetric
Mar 23, 2024
12b31ec
tiny simplification
alyst Mar 20, 2024
c35ae53
se_hessian(): rename hessian -> method
alyst Mar 20, 2024
032abfb
se_hessian!(): optimize calc
Mar 23, 2024
b5a7ad2
H_scaling(): cleanup
alyst Mar 20, 2024
3d6be59
EM: optimizations
Mar 20, 2024
992bfda
SemObsMissing: remove outdated docstring
alyst Mar 20, 2024
5bf1369
SemObsData: remove rowwise
alyst Mar 20, 2024
2e53057
cleanup data columns reordering
alyst Mar 20, 2024
e9ab0c0
n_obs/man(data): restrict to integer
Apr 1, 2024
34b611d
SemObsCov is a type alias for SemObsData
alyst Mar 20, 2024
68c22c5
DataFrame(EnsParTable)
Mar 20, 2024
9dff90f
test: use proper partable
Mar 20, 2024
5e7d3bb
start_simple(SemEnsemble): simplify
Mar 20, 2024
74449e8
SemOptOptim: remove redundant sem_fit()
alyst Mar 21, 2024
e398e9d
SemOptNLopt: remove redundant sem_fit()
alyst Mar 21, 2024
98bf229
SemOptOptim: use evaluate!() directly
alyst Mar 21, 2024
4562514
SemOptNLopt: use evaluate!() directly
alyst Mar 21, 2024
b3d3c2c
remove unused parameters.jl
alyst Mar 21, 2024
0a2bde6
remove identifier.jl
alyst Mar 21, 2024
67873ab
rename parameter_type to relation
alyst Mar 21, 2024
3f53a74
materialize!(Symm/LowTri/UpTri)
Mar 22, 2024
0b27e8c
generic imply: keep F sparse
Mar 22, 2024
af277c7
generic imply: impose matrix constraints
Mar 22, 2024
d6d5849
neumann_series(): avoid endless loop
Mar 22, 2024
a22247b
ParamsArray: faster sparse materialize!
Mar 22, 2024
25c6432
RAM: reuse sigma array
Mar 23, 2024
28a9baf
RAM: optional sparse Sigma matrix
Apr 1, 2024
1d6ab6f
RAM: declare (I-A)^-1 up/low tri too
Mar 23, 2024
5551edc
cleanup update_partable!()
Mar 23, 2024
73cedb8
cleanup start_vals handling
Mar 24, 2024
d7ae28f
ML: refactor to minimize allocs
Mar 23, 2024
5edefab
lower/upper_bounds() API for optim
Apr 1, 2024
c0f54ee
u/l_bounds support for Optim.jl
Apr 1, 2024
a73d52d
SemOptimizer(engine = ...) ctor
Mar 12, 2024
e6b17bd
SEMNLOptExt for NLopt
Mar 12, 2024
75495af
SEMProximalOptExt for Proximal opt
Mar 12, 2024
cabd4dd
NLopt: minor tweaks
Apr 3, 2024
e9808ed
add PackageExtensionCompat
Mar 12, 2024
d968a3a
tests helper: is_extended_tests()
Mar 12, 2024
09b8621
tests: fix optimizer usage
Mar 12, 2024
dfb7944
variance_params(SEMSpec)
Mar 26, 2024
861733c
nonunique() helper function
Apr 4, 2024
74561b3
RAMMatrices ctor: dupl. vars check
Apr 4, 2024
a46f0ed
ParTable: better params unique check
Apr 4, 2024
a26a165
RAMSymbolic: calc (I-A)^{-1} once
Apr 1, 2024
db2344c
AbstractSemSingle: vars API
Apr 1, 2024
e8fb878
predict_latent_vars()
Apr 14, 2024
36b9274
lavaan_model()
Apr 1, 2024
0ba7948
BlackBoxOptim.jl backend support
Apr 1, 2024
77655a3
CommutationMatrix type
alyst Apr 14, 2024
75138e8
simplify elimination_matrix()
alyst Apr 14, 2024
3a6f596
simplify duplication_matrix()
Apr 3, 2024
5c4ac3c
FIML: optimize Jmu
Apr 3, 2024
09c93c8
fix typo
Apr 3, 2024
f281de1
SemWLS: dim checks
Apr 3, 2024
7cc2f58
EM: move code refs to docstring
Apr 10, 2024
1b30da4
EM MVN: decouple from SemObsMissing
Apr 10, 2024
0eb2690
fixup semoptimizer in ext tests
alyst Apr 14, 2024
090ef5e
test/fiml: set EM MVN rtol=1e-10
alyst Apr 14, 2024
1eb987a
MissingPattern: transpose data
Apr 17, 2024
fd71ebd
EM MVN: report rel_error if not converged
Apr 17, 2024
b93f128
EM: max_nobs_em opt to limit obs used
Apr 17, 2024
52c1a8b
EM: optimize mean handling
Apr 17, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ LazyArtifacts = "4af54fe1-eca0-43a8-85a7-787d91b784e3"
LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
NLSolversBase = "d41bc354-129a-5804-8e4c-c37616107c6c"
NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd"
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
PackageExtensionCompat = "65ce6f38-6b18-4e1d-a461-8949797d7930"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
PrettyTables = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Expand Down Expand Up @@ -42,3 +42,16 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test"]

[weakdeps]
BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209"
NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd"
Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2"
ProximalAlgorithms = "140ffc9f-1907-541a-a177-7475e0a401e9"
ProximalCore = "dc4f5ac2-75d1-4f31-931e-60435d74994b"
ProximalOperators = "f3b72e0c-5f3e-4b3e-8f3e-3f4f3e3e3e3e"

[extensions]
SEMNLOptExt = "NLopt"
SEMProximalOptExt = ["ProximalCore", "ProximalAlgorithms", "ProximalOperators"]
SEMBlackBoxOptimExt = ["BlackBoxOptim", "Optimisers"]
44 changes: 44 additions & 0 deletions ext/SEMBlackBoxOptimExt/AdamMutation.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# mutate by moving in the gradient direction
mutable struct AdamMutation{M <: AbstractSem, O, S} <: MutationOperator
model::M
optim::O
opt_state::S
params_fraction::Float64

function AdamMutation(model::AbstractSem, params::AbstractDict)
optim = RAdam(params[:AdamMutation_eta], params[:AdamMutation_beta])
params_fraction = params[:AdamMutation_params_fraction]
opt_state = Optimisers.init(optim, Vector{Float64}(undef, nparams(model)))

new{typeof(model), typeof(optim), typeof(opt_state)}(
model, optim, opt_state, params_fraction)
end
end

Base.show(io::IO, op::AdamMutation) = print(io, "AdamMutation(", op.optim, " state[3]=", op.opt_state[3], ")")

"""
Default parameters for `AdamMutation`.
"""
const AdamMutation_DefaultOptions = ParamsDict(
:AdamMutation_eta => 1E-1,
:AdamMutation_beta => (0.99, 0.999),
:AdamMutation_params_fraction => 0.25,
)

function BlackBoxOptim.apply!(m::AdamMutation, v::AbstractVector{<:Real}, target_index::Int)
grad = similar(v)
obj = SEM.evaluate!(0.0, grad, nothing, m.model, v)
@inbounds for i in eachindex(grad)
(rand() > m.params_fraction) && (grad[i] = 0.0)
end

m.opt_state, dv = Optimisers.apply!(m.optim, m.opt_state, v, grad)
if (m.opt_state[3][1] <= 1E-20) || !isfinite(obj) || any(!isfinite, dv)
m.opt_state = Optimisers.init(m.optim, v)
else
v .-= dv
end

return v
end
75 changes: 75 additions & 0 deletions ext/SEMBlackBoxOptimExt/BlackBoxOptim.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
############################################################################################
### connect to BlackBoxOptim.jl as backend
############################################################################################

"""
"""
struct SemOptimizerBlackBoxOptim <: SemOptimizer{:BlackBoxOptim}
lower_bound::Float64 # default lower bound
variance_lower_bound::Float64 # default variance lower bound
lower_bounds::Union{Dict{Symbol, Float64}, Nothing}

upper_bound::Float64 # default upper bound
upper_bounds::Union{Dict{Symbol, Float64}, Nothing}
end

function SemOptimizerBlackBoxOptim(;
lower_bound::Float64 = -1000.0,
lower_bounds::Union{AbstractDict{Symbol, Float64}, Nothing} = nothing,
variance_lower_bound::Float64 = 0.001,
upper_bound::Float64 = 1000.0,
upper_bounds::Union{AbstractDict{Symbol, Float64}, Nothing} = nothing,
kwargs...
)
if variance_lower_bound < 0.0
throw(ArgumentError("variance_lower_bound must be non-negative"))
end
return SemOptimizerBlackBoxOptim(lower_bound, variance_lower_bound, lower_bounds,
upper_bound, upper_bounds)
end

SEM.SemOptimizer{:BlackBoxOptim}(args...; kwargs...) = SemOptimizerBlackBoxOptim(args...; kwargs...)

SEM.algorithm(optimizer::SemOptimizerBlackBoxOptim) = optimizer.algorithm
SEM.options(optimizer::SemOptimizerBlackBoxOptim) = optimizer.options

struct SemModelBlackBoxOptimProblem{M <: AbstractSem} <: OptimizationProblem{ScalarFitnessScheme{true}}
model::M
fitness_scheme::ScalarFitnessScheme{true}
search_space::ContinuousRectSearchSpace
end

function BlackBoxOptim.search_space(model::AbstractSem)
optim = model.optimizer::SemOptimizerBlackBoxOptim
varparams = Set(SEM.variance_params(model.imply.ram_matrices))
return ContinuousRectSearchSpace(
[begin
def = in(p, varparams) ? optim.variance_lower_bound : optim.lower_bound
isnothing(optim.lower_bounds) ? def : get(optim.lower_bounds, p, def)
end for p in SEM.params(model)],
[begin
def = optim.upper_bound
isnothing(optim.upper_bounds) ? def : get(optim.upper_bounds, p, def)
end for p in SEM.params(model)])
end

function SemModelBlackBoxOptimProblem(model::AbstractSem, optimizer::SemOptimizerBlackBoxOptim)
SemModelBlackBoxOptimProblem(model, ScalarFitnessScheme{true}(), search_space(model))
end

BlackBoxOptim.fitness(params::AbstractVector, wrapper::SemModelBlackBoxOptimProblem) =
return SEM.evaluate!(0.0, nothing, nothing, wrapper.model, params)

# sem_fit method
function SEM.sem_fit(
optimizer::SemOptimizerBlackBoxOptim,
model::AbstractSem,
start_params::AbstractVector;
MaxSteps::Integer = 50000,
kwargs...)

problem = SemModelBlackBoxOptimProblem(model, optimizer)
res = bboptimize(problem; MaxSteps, kwargs...)
return SemFit(best_fitness(res), best_candidate(res),
nothing, model, res)
end
138 changes: 138 additions & 0 deletions ext/SEMBlackBoxOptimExt/DiffEvoFactory.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
"""
Base class for factories of optimizers for a specific problem.
"""
abstract type OptimizerFactory{P<:OptimizationProblem} end

problem(factory::OptimizerFactory) = factory.problem

const OptController_DefaultParameters = ParamsDict(
:MaxTime => 60.0, :MaxSteps => 10^8,
:TraceMode => :compact, :TraceInterval => 5.0,
:RecoverResults => false, :SaveTrace => false
)

function generate_opt_controller(alg::Optimizer, optim_factory::OptimizerFactory, params)
return BlackBoxOptim.OptController(alg, problem(optim_factory),
BlackBoxOptim.chain(BlackBoxOptim.DefaultParameters,
OptController_DefaultParameters,
params))
end

function check_population(factory::OptimizerFactory, popmatrix::BlackBoxOptim.PopulationMatrix)
ssp = factory |> problem |> search_space
for i in 1:popsize(popmatrix)
@assert popmatrix[:, i] ∈ ssp "Individual $i is out of space: $(popmatrix[:,i])" # fitness: $(fitness(population, i))"
end
end

initial_search_space(factory::OptimizerFactory, id::Int) = search_space(factory.problem)

function initial_population_matrix(factory::OptimizerFactory, id::Int)
#@info "Standard initial_population_matrix()"
ini_ss = initial_search_space(factory, id)
if !isempty(factory.initial_population)
numdims(factory.initial_population) == numdims(factory.problem) ||
throw(DimensionMismatch("Dimensions of :Population ($(numdims(factory.initial_population))) "*
"are different from the problem dimensions ($(numdims(factory.problem)))"))
res = factory.initial_population[:, StatsBase.sample(1:popsize(factory.initial_population), factory.population_size)]
else
res = rand_individuals(ini_ss, factory.population_size, method=:latin_hypercube)
end
prj = RandomBound(ini_ss)
if size(res, 2) > 1
apply!(prj, view(res, :, 1), SEM.start_fabin3(factory.problem.model))
end
if size(res, 2) > 2
apply!(prj, view(res, :, 2), SEM.start_simple(factory.problem.model))
end
return res
end

# convert individuals in the archive into population matrix
population_matrix(archive::Any) =
population_matrix!(Matrix{Float64}(undef, length(BlackBoxOptim.params(first(archive))), length(archive)),
archive)

function population_matrix!(pop::AbstractMatrix{<:Real}, archive::Any)
npars = length(BlackBoxOptim.params(first(archive)))
size(pop, 1) == npars ||
throw(DimensionMismatch("Matrix rows count ($(size(pop, 1))) doesn't match the number of problem dimensions ($(npars))"))
@inbounds for (i, indi) in enumerate(archive)
(i <= size(pop, 2)) || break
pop[:, i] .= BlackBoxOptim.params(indi)
end
if size(pop, 2) > length(archive)
@warn "Matrix columns count ($(size(pop, 2))) is bigger than population size ($(length(archive))), last columns not set"
end
return pop
end

generate_embedder(factory::OptimizerFactory, id::Int, problem::OptimizationProblem) =
RandomBound(search_space(problem))

abstract type DiffEvoFactory{P<:OptimizationProblem} <: OptimizerFactory{P} end

generate_selector(factory::DiffEvoFactory, id::Int, problem::OptimizationProblem, population) =
RadiusLimitedSelector(get(factory.params, :selector_radius, popsize(population) ÷ 5))

function generate_modifier(factory::DiffEvoFactory, id::Int, problem::OptimizationProblem)
ops = GeneticOperator[
MutationClock(UniformMutation(search_space(problem)), 1/numdims(problem)),
BlackBoxOptim.AdaptiveDiffEvoRandBin1(BlackBoxOptim.AdaptiveDiffEvoParameters(factory.params[:fdistr], factory.params[:crdistr])),
SimplexCrossover{3}(1.05),
SimplexCrossover{2}(1.1),
#SimulatedBinaryCrossover(0.05, 16.0),
#SimulatedBinaryCrossover(0.05, 3.0),
#SimulatedBinaryCrossover(0.1, 5.0),
#SimulatedBinaryCrossover(0.2, 16.0),
UnimodalNormalDistributionCrossover{2}(chain(BlackBoxOptim.UNDX_DefaultOptions, factory.params)),
UnimodalNormalDistributionCrossover{3}(chain(BlackBoxOptim.UNDX_DefaultOptions, factory.params)),
ParentCentricCrossover{2}(chain(BlackBoxOptim.PCX_DefaultOptions, factory.params)),
ParentCentricCrossover{3}(chain(BlackBoxOptim.PCX_DefaultOptions, factory.params))
]
if problem isa SemModelBlackBoxOptimProblem
push!(ops, AdamMutation(problem.model, chain(AdamMutation_DefaultOptions, factory.params)))
end
FAGeneticOperatorsMixture(ops)
end

function generate_optimizer(factory::DiffEvoFactory, id::Int, problem::OptimizationProblem, popmatrix)
population = FitPopulation(popmatrix, nafitness(fitness_scheme(problem)))
BlackBoxOptim.DiffEvoOpt("AdaptiveDE/rand/1/bin/gradient", population,
generate_selector(factory, id, problem, population),
generate_modifier(factory, id, problem),
generate_embedder(factory, id, problem))
end

const Population_DefaultParameters = ParamsDict(
:Population => BlackBoxOptim.PopulationMatrix(undef, 0, 0),
:PopulationSize => 100,
)

const DE_DefaultParameters = chain(ParamsDict(
:SelectorRadius => 0,
:fdistr => BlackBoxOptim.BimodalCauchy(0.65, 0.1, 1.0, 0.1, clampBelow0 = false),
:crdistr => BlackBoxOptim.BimodalCauchy(0.1, 0.1, 0.95, 0.1, clampBelow0 = false),
), Population_DefaultParameters)

struct DefaultDiffEvoFactory{P<:OptimizationProblem} <: DiffEvoFactory{P}
problem::P
initial_population::BlackBoxOptim.PopulationMatrix
population_size::Int
params::ParamsDictChain
end

DefaultDiffEvoFactory(problem::OptimizationProblem; kwargs...) =
DefaultDiffEvoFactory(problem, BlackBoxOptim.kwargs2dict(kwargs))

function DefaultDiffEvoFactory(problem::OptimizationProblem, params::AbstractDict)
params = chain(DE_DefaultParameters, params)
DefaultDiffEvoFactory{typeof(problem)}(problem, params[:Population], params[:PopulationSize], params)
end

function BlackBoxOptim.bbsetup(factory::OptimizerFactory; kwargs...)
popmatrix = initial_population_matrix(factory, 1)
check_population(factory, popmatrix)
alg = generate_optimizer(factory, 1, problem(factory), popmatrix)
return generate_opt_controller(alg, factory, BlackBoxOptim.kwargs2dict(kwargs))
end
13 changes: 13 additions & 0 deletions ext/SEMBlackBoxOptimExt/SEMBlackBoxOptimExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
module SEMBlackBoxOptimExt

using StructuralEquationModels, BlackBoxOptim, Optimisers

SEM = StructuralEquationModels

export SemOptimizerBlackBoxOptim

include("AdamMutation.jl")
include("DiffEvoFactory.jl")
include("SemOptimizerBlackBoxOptim.jl")

end
76 changes: 76 additions & 0 deletions ext/SEMBlackBoxOptimExt/SemOptimizerBlackBoxOptim.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
############################################################################################
### connect to BlackBoxOptim.jl as backend
############################################################################################

"""
"""
struct SemOptimizerBlackBoxOptim <: SemOptimizer{:BlackBoxOptim}
lower_bound::Float64 # default lower bound
variance_lower_bound::Float64 # default variance lower bound
lower_bounds::Union{Dict{Symbol, Float64}, Nothing}

upper_bound::Float64 # default upper bound
upper_bounds::Union{Dict{Symbol, Float64}, Nothing}
end

function SemOptimizerBlackBoxOptim(;
lower_bound::Float64 = -1000.0,
lower_bounds::Union{AbstractDict{Symbol, Float64}, Nothing} = nothing,
variance_lower_bound::Float64 = 0.001,
upper_bound::Float64 = 1000.0,
upper_bounds::Union{AbstractDict{Symbol, Float64}, Nothing} = nothing,
kwargs...
)
if variance_lower_bound < 0.0
throw(ArgumentError("variance_lower_bound must be non-negative"))
end
return SemOptimizerBlackBoxOptim(lower_bound, variance_lower_bound, lower_bounds,
upper_bound, upper_bounds)
end

SEM.SemOptimizer{:BlackBoxOptim}(args...; kwargs...) = SemOptimizerBlackBoxOptim(args...; kwargs...)

SEM.algorithm(optimizer::SemOptimizerBlackBoxOptim) = optimizer.algorithm
SEM.options(optimizer::SemOptimizerBlackBoxOptim) = optimizer.options

struct SemModelBlackBoxOptimProblem{M <: AbstractSem} <: OptimizationProblem{ScalarFitnessScheme{true}}
model::M
fitness_scheme::ScalarFitnessScheme{true}
search_space::ContinuousRectSearchSpace
end

function BlackBoxOptim.search_space(model::AbstractSem)
optim = model.optimizer::SemOptimizerBlackBoxOptim
return ContinuousRectSearchSpace(
SEM.lower_bounds(optim.lower_bounds, model, default=optim.lower_bound, variance_default=optim.variance_lower_bound),
SEM.upper_bounds(optim.upper_bounds, model, default=optim.upper_bound))
end

function SemModelBlackBoxOptimProblem(model::AbstractSem, optimizer::SemOptimizerBlackBoxOptim)
SemModelBlackBoxOptimProblem(model, ScalarFitnessScheme{true}(), search_space(model))
end

BlackBoxOptim.fitness(params::AbstractVector, wrapper::SemModelBlackBoxOptimProblem) =
return SEM.evaluate!(0.0, nothing, nothing, wrapper.model, params)

# sem_fit method
function SEM.sem_fit(
optimizer::SemOptimizerBlackBoxOptim,
model::AbstractSem,
start_params::AbstractVector;
Method::Symbol = :adaptive_de_rand_1_bin_with_gradient,
MaxSteps::Integer = 50000,
kwargs...)

problem = SemModelBlackBoxOptimProblem(model, optimizer)
if Method == :adaptive_de_rand_1_bin_with_gradient
# custom adaptive differential evolution with mutation that moves along the gradient
bbopt_factory = DefaultDiffEvoFactory(problem; kwargs...)
bbopt = bbsetup(bbopt_factory; MaxSteps, kwargs...)
else
bbopt = bbsetup(problem; Method, MaxSteps, kwargs...)
end
res = bboptimize(bbopt)
return SemFit(best_fitness(res), best_candidate(res),
nothing, model, res)
end
12 changes: 12 additions & 0 deletions ext/SEMNLOptExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
module SEMNLOptExt

using StructuralEquationModels, NLopt

SEM = StructuralEquationModels

export SemOptimizerNLopt, NLoptConstraint

include("diff/NLopt.jl")
include("optimizer/NLopt.jl")

end
Loading