Neural Network Inference & Optimization: Using 'TabularData' to find optimal weights for a Titanic survival model. This example demonstrates an automated grid search that evaluates hundreds of model variations in milliseconds to discover non-linear interactions that outperform the standard gender-only baseline.
uses System.Data, System.Data.Tabular;
var model := new TabularData;
// 1. Load and Parse Dataset
var csvPath := 'data/titanic.csv';
if not FileExists(csvPath) then begin
PrintLn('Error: Titanic dataset not found at ' + csvPath);
Exit;
end;
var lines := FileReadLines(csvPath);
var colSex, colPclass, colAge, colActual: array of Float;
for var i := 1 to lines.Length - 1 do begin
var line := lines[i];
if line = '' then continue;
var inQuote := False;
for var j := 1 to line.Length do begin
if line[j] = '"' then inQuote := not inQuote;
if inQuote and (line[j] = ',') then line[j] := ';';
end;
var fields := line.Split(',');
if fields.Length < 6 then continue;
colActual.Add(StrToFloatDef(fields[1], 0.0));
colSex.Add(if fields[4].Contains('female') then 0.0 else 1.0);
var pc := StrToIntDef(fields[2], 3);
colPclass.Add(if pc = 1 then 1.0 else if pc = 2 then 0.5 else 0.0);
colAge.Add(StrToFloatDef(fields[5], 28.0) / 80.0);
end;
model.AddColumn('Actual', colActual);
model.AddColumn('Sex', colSex);
model.AddColumn('PClass', colPclass);
model.AddColumn('Age', colAge);
PrintLn('<h3>Titanic Model Optimization</h3>');
PrintLn(Format('Searching for optimal interaction weights on %d passengers...', [colActual.Length]));
// 2. Automated Weight Search
var bestAcc := -1.0;
var bestMaleBoost := 0.0;
var bestChildBoost := 0.0;
var bestPoorFemalePenalty := 0.0;
var bestBias := 0.0;
for var i := 0 to 4 do begin
var mb := Float(i) * 3.0; // Male 1st Class boost
for var j := 0 to 4 do begin
var cb := Float(j) * 3.0; // Child boost
for var l := 0 to 4 do begin
var pfp := Float(l) * 3.0; // Poor Female Penalty
for var k := 0 to 10 do begin
var bias := Float(k) * 0.2;
model.EvaluateNewColumn('TmpProb', [
1, 1, '"Sex"', -10.0, '*', 4.0, '+', 'dup0',
'"Sex"', '"PClass"', '*', mb, '*', '+',
0.2, '"Age"', '-', 0, 'max', cb, '*', '+',
'"Sex"', 0, '=', '"PClass"', 0, '=', '*', pfp, '*', '-',
0, 'relu', '+', bias, '+',
-1, '*', 'exp', '+', '/'
]);
model.EvaluateNewColumn('TmpMatch', [ '"TmpProb"', 0.5, '>=', '"Actual"', '=' ]);
var acc := (model.EvaluateAggregate('sum', ['"TmpMatch"']) / colActual.Length) * 100;
if acc > bestAcc then begin
bestAcc := acc;
bestMaleBoost := mb;
bestChildBoost := cb;
bestPoorFemalePenalty := pfp;
bestBias := bias;
end;
model.DropColumn('TmpProb');
model.DropColumn('TmpMatch');
end;
end;
end;
end;
PrintLn(Format('<b>Search Complete!</b> Found optimal weights: MaleBoost=%.1f, ChildBoost=%.1f, PoorPenalty=%.1f, Bias=%.1f',
[bestMaleBoost, bestChildBoost, bestPoorFemalePenalty, bestBias]));
// 3. Evaluate Final Models
model.EvaluateNewColumn('ProbA', [
1, 1, '"Sex"', -10.0, '*', 5.0, '+', -1, '*', 'exp', '+', '/'
]);
model.EvaluateNewColumn('ProbB', [
1, 1,
'"Sex"', -10.0, '*', 4.0, '+',
'dup0',
'"Sex"', '"PClass"', '*', bestMaleBoost, '*', '+',
0.2, '"Age"', '-', 0, 'max', bestChildBoost, '*', '+',
'"Sex"', 0, '=', '"PClass"', 0, '=', '*', bestPoorFemalePenalty, '*', '-',
0, 'relu', '+', bestBias, '+',
-1, '*', 'exp', '+', '/'
]);
model.EvaluateNewColumn('MatchA', [ '"ProbA"', 0.5, '>=', '"Actual"', '=' ]);
model.EvaluateNewColumn('MatchB', [ '"ProbB"', 0.5, '>=', '"Actual"', '=' ]);
var accA := (model.EvaluateAggregate('sum', ['"MatchA"']) / colActual.Length) * 100;
var accB := (model.EvaluateAggregate('sum', ['"MatchB"']) / colActual.Length) * 100;
PrintLn('<h4>Final Accuracy Comparison</h4>');
PrintLn(Format('<b>Baseline (Gender Only):</b> %.1f%%', [accA]));
PrintLn(Format('<b>Optimized Residual:</b> %.1f%% (Improvement: +%.1f%%)', [accB, accB - accA]));
// 4. Sample Successes
PrintLn('<br><b>Outliers correctly identified by the Deep Model:</b>');
var count := 0;
for var i := 0 to colActual.Length - 1 do begin
if (StrToFloat(model.ColumnStrings('MatchA')[i]) = 0.0) and (StrToFloat(model.ColumnStrings('MatchB')[i]) = 1.0) then begin
var sex := if colSex[i] = 0 then 'Female' else 'Male';
var pc := if colPclass[i] = 1 then '1st' else if colPclass[i] = 0.5 then '2nd' else '3rd';
var age := Integer(colAge[i] * 80);
PrintLn(Format('Pass #%d [%s, %s, Age %d] (Survived): Corrected by Residual Model', [i+1, sex, pc, age]));
Inc(count);
if count >= 3 then break;
end;
end;
<h3>Titanic Model Optimization</h3> Searching for optimal interaction weights on 891 passengers... <b>Search Complete!</b> Found optimal weights: MaleBoost=9.0, ChildBoost=9.0, PoorPenalty=0.0, Bias=1.8 <h4>Final Accuracy Comparison</h4> <b>Baseline (Gender Only):</b> 78.7% <b>Optimized Residual:</b> 78.9% (Improvement: +0.2%) <br><b>Outliers correctly identified by the Deep Model:</b> Pass #306 [Male, 1st, Age 1] (Survived): Corrected by Residual Model Pass #446 [Male, 1st, Age 4] (Survived): Corrected by Residual Model