DSPEx Adaptive Optimization System - Implementation
This document provides the detailed implementation for DSPEx’s solution to automated adapter and module optimization.
Core Implementation
Universal Optimizer Framework
The UniversalOptimizer
automatically selects optimization strategies based on variable space characteristics:
defmodule DSPEx.Optimization.UniversalOptimizer do
@moduledoc """
Universal optimizer that can tune discrete and continuous variables.
Automatically selects optimal strategy based on problem characteristics.
"""
defstruct [
:variable_space,
:strategy,
:budget,
:parallel_evaluations,
:optimization_history,
:best_configuration
]
def new(variable_space, opts \\ []) do
%__MODULE__{
variable_space: variable_space,
strategy: select_optimal_strategy(variable_space, opts),
budget: Keyword.get(opts, :budget, 100),
parallel_evaluations: Keyword.get(opts, :parallel_evaluations, System.schedulers_online())
}
end
def optimize(optimizer, program, trainset, evaluation_fn) do
case optimizer.strategy do
:bayesian_optimization ->
Strategies.BayesianOptimization.optimize(optimizer, program, trainset, evaluation_fn)
:genetic_algorithm ->
Strategies.GeneticAlgorithm.optimize(optimizer, program, trainset, evaluation_fn)
:multi_objective ->
Strategies.MultiObjective.optimize(optimizer, program, trainset, evaluation_fn)
_ ->
Strategies.RandomSearch.optimize(optimizer, program, trainset, evaluation_fn)
end
end
end
Multi-Objective Evaluation
The system evaluates configurations across multiple objectives using Nx for performance:
defmodule DSPEx.Optimization.MultiObjectiveEvaluator do
@moduledoc """
Evaluates configurations across performance, cost, and latency objectives.
Uses Pareto optimization for balanced trade-offs.
"""
import Nx.Defn
def dspex_standard_evaluator() do
objectives = [
%{name: :accuracy, weight: 0.4, direction: :maximize},
%{name: :cost, weight: 0.3, direction: :minimize},
%{name: :latency, weight: 0.2, direction: :minimize},
%{name: :reliability, weight: 0.1, direction: :maximize}
]
new(objectives)
end
def evaluate(evaluator, configuration, program, trainset) do
# Parallel evaluation of all objectives
objective_scores =
evaluator.objectives
|> Task.async_stream(fn objective ->
score = objective.function.(configuration, {program, trainset})
{objective.name, score, objective.direction}
end, max_concurrency: System.schedulers_online())
|> Enum.map(fn {:ok, result} -> result end)
# Use Nx for weighted score calculation
weighted_score = calculate_weighted_score_nx(objective_scores, evaluator.weights)
%{
configuration: configuration,
objective_scores: objective_scores,
weighted_score: weighted_score
}
end
defn calculate_weighted_score_nx(scores, weights) do
scores_tensor = Nx.tensor(extract_scores(scores))
weights_tensor = Nx.tensor(extract_weights(weights))
Nx.sum(Nx.multiply(scores_tensor, weights_tensor))
end
end
Adaptive Configuration Application
The system automatically applies optimized configurations to programs:
defmodule DSPEx.Optimization.AdaptiveConfig do
@moduledoc """
Applies optimized configurations to DSPEx programs.
Handles adapter, module, and parameter configuration.
"""
def apply_configuration(program, configuration, variable_space) do
Enum.reduce(configuration, program, fn {var_id, value}, acc_program ->
variable = Map.get(variable_space.variables, var_id)
apply_variable_configuration(acc_program, variable, value)
end)
end
defp apply_variable_configuration(program, variable, value) do
case variable.id do
:adapter -> %{program | adapter: value}
:module_type -> transform_module_type(program, value)
:provider -> configure_provider(program, value)
:temperature -> configure_parameter(program, :temperature, value)
:top_p -> configure_parameter(program, :top_p, value)
_ -> configure_custom_parameter(program, variable.id, value)
end
end
defp transform_module_type(program, DSPEx.ChainOfThought) do
%DSPEx.ChainOfThought{
signature: program.signature,
adapter: program.adapter,
client: program.client,
reasoning_steps: 3
}
end
defp transform_module_type(program, DSPEx.ProgramOfThought) do
%DSPEx.ProgramOfThought{
signature: program.signature,
adapter: program.adapter,
client: program.client,
code_executor: DSPEx.CodeExecutor.Python
}
end
defp transform_module_type(program, _), do: program
end
Usage Examples
Basic Optimization
# Define optimization space
variable_space = VariableSpace.dspex_standard_space()
# Create optimizer
optimizer = UniversalOptimizer.new(variable_space, budget: 50)
# Run optimization
{:ok, optimized_program} = UniversalOptimizer.optimize(
optimizer,
program,
trainset,
&evaluation_function/2
)
Multi-Objective Optimization
# Create evaluator with custom objectives
evaluator = MultiObjectiveEvaluator.new([
%{name: :accuracy, weight: 0.5, direction: :maximize},
%{name: :cost, weight: 0.3, direction: :minimize},
%{name: :latency, weight: 0.2, direction: :minimize}
])
optimizer = UniversalOptimizer.new(variable_space,
strategy: :multi_objective,
evaluation_metric: evaluator
)
Configuration
Dependencies
# mix.exs
defp deps do
[
{:nx, "~> 0.6"}, # Numerical computing
{:jason, "~> 1.4"}, # JSON handling
{:statistics, "~> 0.6"} # Statistical functions
]
end
Application Configuration
# config/config.exs
config :dspex, :adaptive_optimization,
default_budget: 100,
parallel_evaluations: System.schedulers_online(),
convergence_threshold: 0.001
Benefits
- Automated Discovery: Finds optimal adapter/module combinations automatically
- Cost Optimization: Balances performance with cost considerations
- Concurrent Evaluation: Leverages Elixir’s concurrency for parallel optimization
- Nx Integration: High-performance numerical operations for optimization algorithms
- Extensible Design: Easy to add new variables, constraints, and strategies
This system addresses the exact needs identified by the DSPy community while leveraging Elixir’s unique strengths for building robust, concurrent optimization systems.