add: NeuralSolver
This commit is contained in:
@@ -2,10 +2,18 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<RuntimeIdentifier>linux-x64</RuntimeIdentifier>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>disable</Nullable>
|
||||
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
||||
<Version>0.0.2</Version>
|
||||
<CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.16.0" />
|
||||
<PackageReference Include="TensorFlow.Keras" Version="0.15.0" />
|
||||
<PackageReference Include="TensorFlow.NET" Version="0.150.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
19
src/Board.cs
19
src/Board.cs
@@ -4,6 +4,7 @@ namespace InverseOfLife;
|
||||
|
||||
public class Board
|
||||
{
|
||||
|
||||
[SetsRequiredMembers]
|
||||
public Board(int w, int h, bool qx = false, bool qy = false, bool useTracer = false)
|
||||
{
|
||||
@@ -128,6 +129,24 @@ public class Board
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
public HashSet<(int, int)>[] Frames(int steps)
|
||||
{
|
||||
HashSet<(int, int)>[] res = new HashSet<(int, int)>[steps];
|
||||
for (int i = 0; i < steps; i++)
|
||||
{
|
||||
Evaluate();
|
||||
res[i] = CopyLives();
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
private HashSet<(int, int)> CopyLives()
|
||||
{
|
||||
HashSet<(int, int)> res = new();
|
||||
foreach ((int, int) cell in Lives)
|
||||
res.Add(cell);
|
||||
return res;
|
||||
}
|
||||
|
||||
public void Play(int generations, int delay = 200)
|
||||
{
|
||||
|
||||
@@ -32,7 +32,7 @@ public class Generator
|
||||
{
|
||||
if (idx > layers * split)
|
||||
break;
|
||||
(Gene g, double x) = s.Solve(mode: mode);
|
||||
(Gene g, double x) = s.Solve(2, 10, 20, 0.2f, mode: mode);
|
||||
res[idx] = g.Restore(target.Width, target.Height, target.QuotientX, target.QuotientY);
|
||||
idx += 1;
|
||||
Console.WriteLine($"Progress: {idx}/{layers * split}");
|
||||
@@ -45,13 +45,13 @@ public class Generator
|
||||
result[i] = new Dictionary<(int, int), float>();
|
||||
foreach (Board b in res)
|
||||
{
|
||||
b.Evaluate();
|
||||
foreach ((int, int) cell in b.Lives)
|
||||
{
|
||||
if(!result[i].Keys.Contains(cell))
|
||||
result[i][cell] = 0f;
|
||||
result[i][cell] += 1f/(layers * split);
|
||||
}
|
||||
b.Evaluate();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
198
src/NeuralSolver.cs
Normal file
198
src/NeuralSolver.cs
Normal file
@@ -0,0 +1,198 @@
|
||||
using Tensorflow;
|
||||
using Tensorflow.Gradients;
|
||||
using Tensorflow.Keras.Engine;
|
||||
using Tensorflow.NumPy;
|
||||
using static Tensorflow.Binding;
|
||||
using static Tensorflow.KerasApi;
|
||||
namespace InverseOfLife;
|
||||
|
||||
public class NeuralSolver
|
||||
{
|
||||
private int Width { get; set; }
|
||||
private int Height { get; set; }
|
||||
private int Steps { get; set; }
|
||||
private bool QuotientX { get; set; }
|
||||
private bool QuotientY { get; set; }
|
||||
private IOptimizer Optimizer { get; set; }
|
||||
|
||||
private IModel ForwardModel { get; set; }
|
||||
private IModel ReverseModel { get; set; }
|
||||
|
||||
private void BuildForwardModel()
|
||||
{
|
||||
Tensors inputs = keras.Input(shape: new Shape(Height, Width, 1), name: "InitialState");
|
||||
Tensors hidden = keras.layers.Conv2D(32, kernel_size: 3, padding:"same", activation: keras.activations.Relu).Apply(inputs);
|
||||
hidden = keras.layers.Conv2D(32, kernel_size: 3, padding: "same", activation: keras.activations.Relu).Apply(hidden);
|
||||
Tensors outputs = keras.layers.Conv2D(1, kernel_size: 1, padding: "same", activation: keras.activations.Sigmoid).Apply(hidden);
|
||||
ForwardModel = keras.Model(inputs, outputs, name: "ForwardModel");
|
||||
}
|
||||
|
||||
private void BuildReverseModel()
|
||||
{
|
||||
Tensors inputs = keras.Input(shape: new Shape(Height, Width, 1), name: "FinalState");
|
||||
Tensors hidden = keras.layers.Conv2D(32, kernel_size: 3, padding: "same", activation: keras.activations.Relu).Apply(inputs);
|
||||
hidden = keras.layers.Conv2D(32, kernel_size:3, padding:"same", activation: keras.activations.Relu).Apply(hidden);
|
||||
Tensors outputs = keras.layers.Conv2D(1, kernel_size: 1, padding:"same", activation: keras.activations.Sigmoid).Apply(hidden);
|
||||
ReverseModel = keras.Model(inputs, outputs, name: "ReverseModel");
|
||||
}
|
||||
|
||||
public NeuralSolver(int width, int height, int steps, bool quotientX, bool quotientY)
|
||||
{
|
||||
Width = width;
|
||||
Height = height;
|
||||
Steps = steps;
|
||||
QuotientX = quotientX;
|
||||
QuotientY = quotientY;
|
||||
BuildForwardModel();
|
||||
BuildReverseModel();
|
||||
}
|
||||
|
||||
public void SaveModel(string basePath)
|
||||
{
|
||||
ForwardModel.save($"{basePath}/FM{Width}x{Height}_{Steps}_{QuotientX}_{QuotientY}");
|
||||
ReverseModel.save($"{basePath}/RM{Width}x{Height}_{Steps}_{QuotientX}_{QuotientY}");
|
||||
}
|
||||
public void LoadModel(string basePath)
|
||||
{
|
||||
ForwardModel = keras.models.load_model($"{basePath}/FM{Width}x{Height}_{Steps}_{QuotientX}_{QuotientY}");
|
||||
ReverseModel = keras.models.load_model($"{basePath}/RM{Width}x{Height}_{Steps}_{QuotientX}_{QuotientY}");
|
||||
}
|
||||
public (NDArray, NDArray) GenerateTrainingData(int datasetSize)
|
||||
{
|
||||
Random rnd = new Random();
|
||||
|
||||
float[] inputsData = new float[datasetSize * Height * Width];
|
||||
float[] labelsData = new float[datasetSize * Height * Width];
|
||||
|
||||
for (int idx = 0; idx < datasetSize; idx++)
|
||||
{
|
||||
Board board = new Board(Width, Height, QuotientX, QuotientY);
|
||||
int randomCells = rnd.Next(1, Width * Height / 4);
|
||||
for (int c = 0; c < randomCells; c++)
|
||||
{
|
||||
int x = rnd.Next(0, Width);
|
||||
int y = rnd.Next(0, Height);
|
||||
board.Toggle(x, y, true);
|
||||
}
|
||||
|
||||
int offsetLabel = idx * Width * Height;
|
||||
foreach ( (int x, int y) in board.Lives)
|
||||
{
|
||||
int pos = y * Width + x;
|
||||
labelsData[offsetLabel + pos] = 1f;
|
||||
}
|
||||
|
||||
board.Evaluate(Steps);
|
||||
|
||||
int offsetInput = idx * Width * Height;
|
||||
foreach (var (x, y) in board.Lives)
|
||||
inputsData[offsetInput + (y * Width + x)] = 1f;
|
||||
}
|
||||
NDArray inputsTensor = np.array(inputsData).reshape(new Shape(datasetSize, Height, Width, 1));
|
||||
NDArray labelsTensor = np.array(labelsData).reshape(new Shape(datasetSize, Height, Width, 1));
|
||||
return (inputsTensor, labelsTensor);
|
||||
}
|
||||
|
||||
public void Train(int datasetSize = 1000, int batchSize = 8, int epochs = 10)
|
||||
{
|
||||
(NDArray trainFinal, NDArray trainInitial) = GenerateTrainingData(datasetSize);
|
||||
Optimizer = keras.optimizers.Adam(learning_rate: 0.001f);
|
||||
for (int epoch = 0; epoch < epochs; epoch++)
|
||||
{
|
||||
for (int i = 0; i < datasetSize; i += batchSize)
|
||||
{
|
||||
NDArray initialBatch = trainInitial[$"{i}:{i + batchSize}"];
|
||||
NDArray finalBatch = trainFinal[$"{i}:{i + batchSize}"];
|
||||
using (GradientTape tape = tf.GradientTape())
|
||||
{
|
||||
Tensors predictedFinal = ForwardModel.Apply(initialBatch);
|
||||
Tensors predictedInitial = ReverseModel.Apply(finalBatch);
|
||||
Tensors reconstructedFinal = ForwardModel.Apply(predictedInitial);
|
||||
|
||||
Tensor forwardLoss = keras.losses.BinaryCrossentropy().Call(finalBatch, predictedFinal);
|
||||
Tensor reverseLoss = keras.losses.BinaryCrossentropy().Call(initialBatch, predictedInitial);
|
||||
Tensor cycleLoss = keras.losses.BinaryCrossentropy().Call(finalBatch, reconstructedFinal);
|
||||
Tensor totalLoss = forwardLoss + reverseLoss + cycleLoss;
|
||||
Tensor[] gradients = tape.gradient(totalLoss, ForwardModel.TrainableVariables.Concat(ReverseModel.TrainableVariables));
|
||||
Optimizer.apply_gradients(zip(gradients, ForwardModel.TrainableVariables.Concat(ReverseModel.TrainableVariables)));
|
||||
Console.WriteLine($"Epoch {epoch + 1}, Batch {i / batchSize + 1}, Loss: {totalLoss.numpy()}");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
public Board Predict(Board target)
|
||||
{
|
||||
float[] inputData = new float[Height * Width];
|
||||
foreach (var (x, y) in target.Lives)
|
||||
inputData[y * Width + x] = 1f;
|
||||
NDArray input = np.array(inputData).reshape(new Shape(1, Height, Width, 1));
|
||||
Tensors pred = ReverseModel.predict(input);
|
||||
float[] predData = pred.ToArray<float>();
|
||||
Board res = new Board(Width, Height);
|
||||
for (int i = 0; i < predData.Length; i++)
|
||||
{
|
||||
int x = i % Width;
|
||||
int y = i / Width;
|
||||
if (predData[i] > 0.5f)
|
||||
res.Lives.Add((x, y));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public static IEnumerable<(int, int)> Circle()
|
||||
{
|
||||
int centerX = 10;
|
||||
int centerY = 10;
|
||||
int radius = 7;
|
||||
int x = 0;
|
||||
int y = radius;
|
||||
|
||||
int d = 1 - radius;
|
||||
|
||||
IEnumerable<(int, int)> PlotCirclePoints(int cx, int cy, int px, int py)
|
||||
{
|
||||
yield return (cx + px, cy + py);
|
||||
yield return (cx - px, cy + py);
|
||||
yield return (cx + px, cy - py);
|
||||
yield return (cx - px, cy - py);
|
||||
yield return (cx + py, cy + px);
|
||||
yield return (cx - py, cy + px);
|
||||
yield return (cx + py, cy - px);
|
||||
yield return (cx - py, cy - px);
|
||||
}
|
||||
|
||||
foreach (var point in PlotCirclePoints(centerX, centerY, x, y))
|
||||
yield return point;
|
||||
|
||||
while (x < y)
|
||||
{
|
||||
x++;
|
||||
if (d < 0)
|
||||
d += 2 * x + 1;
|
||||
else
|
||||
{
|
||||
y--;
|
||||
d += 2 * (x - y) + 1;
|
||||
}
|
||||
|
||||
foreach (var point in PlotCirclePoints(centerX, centerY, x, y))
|
||||
yield return point;
|
||||
}
|
||||
}
|
||||
public static void Run()
|
||||
{
|
||||
NeuralSolver solver = new NeuralSolver(20, 20, 10, false, false);
|
||||
solver.Train(1000,8,20);
|
||||
Board b = new Board(20, 20);
|
||||
foreach ((int, int) cell in Circle())
|
||||
b.Toggle(cell);
|
||||
b.Evaluate(10);
|
||||
Board z = solver.Predict(b);
|
||||
Console.WriteLine(z.ToString());
|
||||
|
||||
}
|
||||
}
|
||||
123
src/ResultData.cs
Normal file
123
src/ResultData.cs
Normal file
@@ -0,0 +1,123 @@
|
||||
namespace InverseOfLife;
|
||||
|
||||
public class ResultData
|
||||
{
|
||||
public int Width { get; set; }
|
||||
public int Height { get; set; }
|
||||
public bool QuotientX { get; set; }
|
||||
public bool QuotientY { get; set; }
|
||||
public byte[] TargetSignature { get; set; }
|
||||
public HashSet<(int x, int y)>[] Frames { get; set; }
|
||||
public double Score { get; set; }
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
|
||||
return $"""
|
||||
{Width}x{Height}
|
||||
{QuotientX} {QuotientY}
|
||||
{BytesToString(TargetSignature)}
|
||||
{FramesToString(Frames)}
|
||||
{Score}
|
||||
""";
|
||||
}
|
||||
public static ResultData Restore(string save)
|
||||
{
|
||||
|
||||
var rawLines = save.Split(new[] { "\r\n", "\n" }, StringSplitOptions.None);
|
||||
|
||||
var linesList = new List<string>();
|
||||
foreach (var line in rawLines)
|
||||
{
|
||||
var trimmed = line.Trim();
|
||||
if (trimmed.Length > 0)
|
||||
linesList.Add(trimmed);
|
||||
}
|
||||
var lines = linesList.ToArray();
|
||||
|
||||
if (lines.Length < 4)
|
||||
throw new FormatException("Input string not in expected format (less than 4 non-empty lines).");
|
||||
|
||||
var line1 = lines[0];
|
||||
var wh = line1.Split('x');
|
||||
if (wh.Length != 2)
|
||||
throw new FormatException($"Line1 format invalid: {line1}");
|
||||
int width = int.Parse(wh[0]);
|
||||
int height = int.Parse(wh[1]);
|
||||
|
||||
var line2 = lines[1].Split(' ', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (line2.Length != 2)
|
||||
throw new FormatException($"Line2 format invalid: {lines[1]}");
|
||||
bool qx = bool.Parse(line2[0]);
|
||||
bool qy = bool.Parse(line2[1]);
|
||||
|
||||
var line3 = lines[2];
|
||||
byte[] signature;
|
||||
if (string.IsNullOrWhiteSpace(line3))
|
||||
signature = Array.Empty<byte>();
|
||||
else
|
||||
{
|
||||
var hexes = line3.Split(' ', StringSplitOptions.RemoveEmptyEntries);
|
||||
signature = new byte[hexes.Length];
|
||||
for (int i = 0; i < hexes.Length; i++)
|
||||
signature[i] = Convert.ToByte(hexes[i], 16);
|
||||
}
|
||||
|
||||
|
||||
var scoreLine = lines[lines.Length - 1];
|
||||
double score = double.Parse(scoreLine);
|
||||
|
||||
var framesList = new List<HashSet<(int, int)>>();
|
||||
for (int i = 3; i < lines.Length - 1; i++)
|
||||
{
|
||||
var frameLine = lines[i].Trim();
|
||||
if (string.IsNullOrEmpty(frameLine))
|
||||
{
|
||||
framesList.Add(new HashSet<(int,int)>());
|
||||
continue;
|
||||
}
|
||||
|
||||
var tokens = frameLine.Split(' ', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
var frameSet = new HashSet<(int, int)>();
|
||||
foreach (var token in tokens)
|
||||
{
|
||||
var trimmed = token.Trim('(', ')');
|
||||
var xy = trimmed.Split(',');
|
||||
if (xy.Length != 2)
|
||||
throw new FormatException($"Frame cell format invalid: {token}");
|
||||
int cellX = int.Parse(xy[0]);
|
||||
int cellY = int.Parse(xy[1]);
|
||||
frameSet.Add((cellX, cellY));
|
||||
}
|
||||
framesList.Add(frameSet);
|
||||
}
|
||||
|
||||
return new ResultData
|
||||
{
|
||||
Width = width,
|
||||
Height = height,
|
||||
QuotientX = qx,
|
||||
QuotientY = qy,
|
||||
TargetSignature = signature,
|
||||
Frames = framesList.ToArray(),
|
||||
Score = score
|
||||
};
|
||||
}
|
||||
|
||||
private static string BytesToString(byte[] bytes)
|
||||
=> String.Join(" ", bytes.Select(b => b.ToString("X2")));
|
||||
|
||||
private static string FramesToString(HashSet<(int, int)>[] frames)
|
||||
{
|
||||
string res = "";
|
||||
foreach (HashSet<(int, int)> frame in frames)
|
||||
{
|
||||
res += String.Join(" ", frame.Select(cell => $"({cell.Item1},{cell.Item2})"));
|
||||
res += "\n";
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -36,7 +36,22 @@ public class Solver
|
||||
return res;
|
||||
}
|
||||
|
||||
[SuppressMessage("ReSharper.DPA", "DPA0002: Excessive memory allocations in SOH", MessageId = "type: Entry[System.Int32,System.Double][]; size: 14391MB")]
|
||||
public ResultData SolveToData(int resolution, int maxGeneration, int topN, float mutationRate, string mode)
|
||||
{
|
||||
(Gene res, double score) = Solve(resolution, maxGeneration, topN, mutationRate, mode);
|
||||
ResultData result = new ResultData
|
||||
{
|
||||
Width = Target.Width,
|
||||
Height = Target.Height,
|
||||
QuotientX = Target.QuotientX,
|
||||
QuotientY = Target.QuotientY,
|
||||
TargetSignature = new Gene(1, Target).RiboseSequence,
|
||||
Frames = res.Restore(Target.Width, Target.Height, Target.QuotientX, Target.QuotientY).Frames(Steps),
|
||||
Score = score,
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
public (Gene, double) Solve(int withResolution=1, int maxGenerations=50, int topN=10, float mutationRate=0.1f, string mode="tp_only")
|
||||
{
|
||||
List<(Board, Gene)> currentGeneration = GetInitialGeneration()
|
||||
|
||||
30
summerizer.py
Normal file
30
summerizer.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import os
|
||||
|
||||
ignores = [
|
||||
'bin',
|
||||
'obj'
|
||||
]
|
||||
def find_all_proj_files(base_path):
|
||||
res = []
|
||||
for root, dirs, files in os.walk(base_path):
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and not d in ignores]
|
||||
for file in files:
|
||||
if file not in ignores:
|
||||
res.append(os.path.join(root, file))
|
||||
return res
|
||||
|
||||
|
||||
def summerizer():
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
fs = find_all_proj_files(current_dir)
|
||||
res = ""
|
||||
for file in fs:
|
||||
with open(file) as f:
|
||||
res += f"---------------------{file}-------------------------\n"
|
||||
res += f.read()
|
||||
res += "\n"
|
||||
print(res)
|
||||
|
||||
summerizer()
|
||||
Reference in New Issue
Block a user