add: as service

This commit is contained in:
h z
2025-01-26 16:13:49 +00:00
parent 1e6d11ebcf
commit 4f440e7e20
3 changed files with 145 additions and 11 deletions

37
Board/__init__.py Normal file
View File

@@ -0,0 +1,37 @@
import itertools
class Board:
def __init__(self, width, height, quotient_x, quotient_y):
self.width = width
self.height = height
self.quotient_x = quotient_x
self.quotient_y = quotient_y
self.lives = set()
def toggle(self, x, y):
if (x, y) in self.lives:
self.lives.remove((x, y))
else:
self.lives.add((x, y))
def evaluate(self):
new_lives = set()
for (x, y) in itertools.product(range(self.width), range(self.height)):
neighbor_count = 0
for (dx, dy) in itertools.product([-1, 0, 1], [-1, 0, 1]):
if dx == 0 and dy == 0:
continue
nx = x + dx
ny = y + dy
if self.quotient_x:
nx %= self.width
if self.quotient_y:
ny %= self.height
if (nx, ny) in self.lives:
neighbor_count += 1
if (x, y) in self.lives and neighbor_count in [2, 3]:
new_lives.add((x, y))
if (x, y) not in self.lives and neighbor_count == 3:
new_lives.add((x, y))
self.lives = new_lives

View File

@@ -2,21 +2,23 @@ import numpy as np
import tensorflow as tf
from tensorflow import keras
from Board import Board
import random
class NeuralSolver:
def __init__(self, width, height, quotientX=False, quotientY=False):
def __init__(self, width, height, quotient_x=False, quotient_y=False):
self.Width = width
self.Height = height
self.QuotientX = quotientX
self.QuotientY = quotientY
self.QuotientX = quotient_x
self.QuotientY = quotient_y
self._build_forward_model()
self._build_reverse_model()
def _build_forward_model(self):
inputs = keras.Input(shape=(self.Width, self.Height, 1), name="InitialState")
hidden = keras.Conv2D(32, 3, padding="same", activation="relu")(inputs)
hidden = keras.layers.Conv2D(32, 3, padding="same", activation="relu")(inputs)
hidden = keras.layers.Conv2D(32, 3, padding="same", activation="relu")(hidden)
outputs = keras.Conv2D(1, 1, padding="same", activation="sigmoid")(hidden)
outputs = keras.layers.Conv2D(1, 1, padding="same", activation="sigmoid")(hidden)
self.ForwardModel = keras.Model(inputs, outputs, name="ForwardModel")
self.ForwardModel.compile(
optimizer=keras.optimizers.Adam(learning_rate=0.001),
@@ -26,14 +28,14 @@ class NeuralSolver:
def _build_reverse_model(self):
inputs = keras.Input(shape=(self.Width, self.Height, 1), name="FinalState")
hidden = keras.Conv2D(32, 3, padding="same", activation="relu")(inputs)
hidden = keras.layers.Conv2D(32, 3, padding="same", activation="relu")(inputs)
hidden = keras.layers.Conv2D(32, 3, padding="same", activation="relu")(hidden)
outputs = keras.Conv2D(1, 1, padding="same", activation="sigmoid")(hidden)
outputs = keras.layers.Conv2D(1, 1, padding="same", activation="sigmoid")(hidden)
self.ReverseModel = keras.Model(inputs, outputs, name="ReverseModel")
def train_forward(self, dataset, batch_size=8, epochs=10):
x, y = dataset
def train_forward(self, dataset_size, batch_size=8, epochs=10):
x, y = self.generate_training_data(dataset_size)
self.ForwardModel.fit(
x=x,
y=y,
@@ -42,12 +44,19 @@ class NeuralSolver:
verbose=1
)
def train_backward(self, dataset, batch_size=8, epochs=10):
x, y = dataset
def train_backward(self, dataset_size, batch_size=8, epochs=10):
x, y = self.generate_training_data(dataset_size)
self.ForwardModel.trainable = False
self.ForwardModel.compile(
optimizer=keras.optimizers.Adam(learning_rate=0.001),
loss=keras.losses.BinaryCrossentropy(),
metrics=["accuracy"],
)
reverse_inputs = self.ReverseModel.inputs
reverse_outputs = self.ReverseModel.outputs
forward_outputs = self.ForwardModel(reverse_outputs)
composite_model = keras.Model(reverse_inputs, forward_outputs, name="CompositeModel")
composite_model.compile(
optimizer=keras.optimizers.Adam(learning_rate=0.001),
loss=keras.losses.BinaryCrossentropy(),
@@ -72,3 +81,20 @@ class NeuralSolver:
b = b[None, ..., None]
preds = self.ReverseModel.predict(b)
return preds > 0.5
def generate_training_data(self, dataset_size=1000):
x = np.zeros((dataset_size, self.Width, self.Height, 1), dtype=np.float32)
y = np.zeros((dataset_size, self.Width, self.Height, 1), dtype=np.float32)
for i in range(dataset_size):
board = Board(self.Width, self.Height, self.QuotientX, self.QuotientY)
ops = random.randint(self.Width * self.Height // 16, self.Width * self.Height)
for _ in range(ops):
x_ = random.randint(0, self.Width - 1)
y_ = random.randint(0, self.Height - 1)
board.toggle(x_, y_)
for (cx, cy) in board.lives:
x[i, cx, cy, 0] = 1.0
board.evaluate()
for (cx, cy) in board.lives:
y[i, cx, cy, 0] = 1.0
return x, y

71
app.py Normal file
View File

@@ -0,0 +1,71 @@
import itertools
from fastapi import FastAPI, BackgroundTasks
from pydantic import BaseModel
import numpy as np
from NeuralSolver import NeuralSolver
from typing import List, Tuple
app = FastAPI()
solver: NeuralSolver | None = None
status: str = "none"
def task():
global solver
global status
status = "training forward"
solver.train_forward(1000)
status = "training backward"
solver.train_backward(1000)
status = "trained"
class InitRequest(BaseModel):
width: int
height: int
quotientX: bool = False
quotientY: bool = False
@app.post("/initialize")
def initialize(request: InitRequest, background_tasks: BackgroundTasks):
global solver
global status
if status != "none":
return {"status": "instance already existed"}
solver = NeuralSolver(request.width, request.height, request.quotientX, request.quotientY)
background_tasks.add_task(task)
return {"status": "initializing"}
class BoardRequest(BaseModel):
lives: List[Tuple[int, int]]
direction: str
@app.post("/predict")
def predict(request: BoardRequest):
global solver
global status
if status != "trained":
return {"status": "not trained yet"}
inputs = np.zeros((1, solver.Width, solver.Height, 1))
for (x, y) in request.lives:
inputs[0, x, y, 0] = 1.0
res = None
if request.direction == "forward":
res = solver.predict_forward(inputs)
else:
res = solver.predict_reverse(inputs)
lives = set()
for (x, y) in itertools.product(range(solver.Width), range(solver.Height)):
if res[0, x, y, 0]:
lives.add((x, y))
return {"prediction": lives}
@app.post("/finish")
def finish():
global solver
global status
solver = None
status = "none"