Compare commits
1 Commits
26b64f8c15
...
acb1e2260f
| Author | SHA1 | Date | |
|---|---|---|---|
| acb1e2260f |
117
api/backup.py
117
api/backup.py
@@ -1,17 +1,19 @@
|
|||||||
import shutil
|
import shutil
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import tempfile
|
||||||
from flask import Blueprint, send_file, jsonify
|
import zipfile
|
||||||
|
from flask import Blueprint, send_file, jsonify, request
|
||||||
import os
|
import os
|
||||||
from api import require_auth
|
from api import require_auth
|
||||||
from db import get_db
|
from db import get_db
|
||||||
from db.models.Markdown import Markdown
|
from db.models.Markdown import Markdown
|
||||||
from db.models.Path import Path
|
from db.models.Path import Path
|
||||||
|
import threading
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
backup_bp = Blueprint('backup', __name__, url_prefix='/api/backup')
|
backup_bp = Blueprint('backup', __name__, url_prefix='/api/backup')
|
||||||
|
backup_lock = threading.Lock()
|
||||||
@backup_bp.route('/', methods=['GET'])
|
@backup_bp.route('/', methods=['GET'])
|
||||||
@require_auth(roles=['admin'])
|
@require_auth(roles=['admin'])
|
||||||
def get_backup():
|
def get_backup():
|
||||||
@@ -64,3 +66,112 @@ def traverse(path_id, paths):
|
|||||||
for child in children:
|
for child in children:
|
||||||
traverse(child.id, paths)
|
traverse(child.id, paths)
|
||||||
cd_back()
|
cd_back()
|
||||||
|
|
||||||
|
|
||||||
|
@backup_bp.route('/load', methods=['POST'])
|
||||||
|
@require_auth(roles=['admin'])
|
||||||
|
def load_backup():
|
||||||
|
if not backup_lock.acquire(blocking=False):
|
||||||
|
return jsonify({"error": "Another backup restore is in progress. Please try again later."}), 429
|
||||||
|
|
||||||
|
try:
|
||||||
|
if 'file' not in request.files:
|
||||||
|
return jsonify({"error": "No file provided"}), 400
|
||||||
|
uploaded_file = request.files['file']
|
||||||
|
|
||||||
|
temp_dir = tempfile.mkdtemp()
|
||||||
|
zip_path = os.path.join(temp_dir, "backup.zip")
|
||||||
|
uploaded_file.save(zip_path)
|
||||||
|
|
||||||
|
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
||||||
|
zip_ref.extractall(temp_dir)
|
||||||
|
|
||||||
|
root_dir = temp_dir
|
||||||
|
if not os.path.exists(root_dir):
|
||||||
|
return jsonify({"error": "Invalid backup format"}), 400
|
||||||
|
|
||||||
|
with get_db() as session:
|
||||||
|
path_mapping = {}
|
||||||
|
restore_tree(root_dir, None, session, path_mapping)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
shutil.rmtree(temp_dir)
|
||||||
|
|
||||||
|
return jsonify({"success": True, "message": "Backup restored and merged successfully"})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load backup: {e}")
|
||||||
|
return jsonify({"error": f"Failed to load backup {e}"}), 500
|
||||||
|
finally:
|
||||||
|
backup_lock.release()
|
||||||
|
|
||||||
|
|
||||||
|
def restore_tree(dir_path, parent_id, session, path_mapping):
|
||||||
|
|
||||||
|
dir_name = os.path.basename(dir_path)
|
||||||
|
|
||||||
|
existing_path = session.query(Path).filter_by(parent_id=parent_id, name=dir_name).first()
|
||||||
|
if parent_id is None:
|
||||||
|
new_path_id = 1
|
||||||
|
elif existing_path:
|
||||||
|
new_path_id = existing_path.id
|
||||||
|
else:
|
||||||
|
order = ''
|
||||||
|
meta_file_path = os.path.join(dir_path, ".meta")
|
||||||
|
if os.path.exists(meta_file_path):
|
||||||
|
with open(meta_file_path, "r") as meta_file:
|
||||||
|
for line in meta_file:
|
||||||
|
key, value = line.strip().split(": ", 1)
|
||||||
|
if key == "order":
|
||||||
|
order = value
|
||||||
|
|
||||||
|
new_path = Path(name=dir_name, parent_id=parent_id, order=order)
|
||||||
|
session.add(new_path)
|
||||||
|
session.flush()
|
||||||
|
new_path_id = new_path.id
|
||||||
|
|
||||||
|
path_mapping[dir_path] = new_path_id
|
||||||
|
|
||||||
|
for file in os.listdir(dir_path):
|
||||||
|
file_path = os.path.join(dir_path, file)
|
||||||
|
if file.endswith(".md"):
|
||||||
|
md_title = file[:-3]
|
||||||
|
|
||||||
|
mdmeta_path = file_path + "meta"
|
||||||
|
created_at = datetime.now()
|
||||||
|
order = ''
|
||||||
|
shortcut = ""
|
||||||
|
|
||||||
|
if os.path.exists(mdmeta_path):
|
||||||
|
with open(mdmeta_path, "r") as meta_file:
|
||||||
|
for line in meta_file:
|
||||||
|
key, value = line.strip().split(": ", 1)
|
||||||
|
if key == "created_at":
|
||||||
|
created_at = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||||
|
elif key == "order":
|
||||||
|
order = value
|
||||||
|
elif key == "shortcut":
|
||||||
|
shortcut = value
|
||||||
|
|
||||||
|
with open(file_path, "r", encoding="utf-8") as md_file:
|
||||||
|
content = md_file.read()
|
||||||
|
|
||||||
|
unique_title = get_unique_markdown_title(session, md_title, new_path_id)
|
||||||
|
|
||||||
|
new_md = Markdown(title=unique_title, content=content, path_id=new_path_id,
|
||||||
|
created_at=created_at, order=order, shortcut=shortcut)
|
||||||
|
session.add(new_md)
|
||||||
|
|
||||||
|
|
||||||
|
for item in os.listdir(dir_path):
|
||||||
|
item_path = os.path.join(dir_path, item)
|
||||||
|
if os.path.isdir(item_path):
|
||||||
|
restore_tree(item_path, new_path_id, session, path_mapping)
|
||||||
|
|
||||||
|
|
||||||
|
def get_unique_markdown_title(session, title, path_id):
|
||||||
|
existing_titles = {md.title for md in session.query(Markdown.title).filter_by(path_id=path_id).all()}
|
||||||
|
unique_title = title
|
||||||
|
while unique_title in existing_titles:
|
||||||
|
unique_title += ".bp"
|
||||||
|
return unique_title
|
||||||
58
api/tree.py
Normal file
58
api/tree.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
from flask import Blueprint, request, jsonify
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
import api
|
||||||
|
from api import require_auth, etag_response
|
||||||
|
from db import get_db
|
||||||
|
from db.models.Markdown import Markdown
|
||||||
|
from db.models.Path import Path
|
||||||
|
from api import limiter
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
tree_bp = Blueprint('tree', __name__, url_prefix='/api/tree')
|
||||||
|
|
||||||
|
|
||||||
|
def build_tree(db: Session, parent_id: int = None):
|
||||||
|
path_nodes = db.query(Path).filter(Path.parent_id == parent_id).all()
|
||||||
|
md_nodes = db.query(Markdown.id, Markdown.title, Markdown.order, Markdown.shortcut).filter(Markdown.path_id == parent_id).all()
|
||||||
|
t0 = [
|
||||||
|
{
|
||||||
|
"type": "markdown",
|
||||||
|
"id": node.id,
|
||||||
|
"title": node.title,
|
||||||
|
"order": node.order,
|
||||||
|
"shortcut": node.shortcut
|
||||||
|
} for node in md_nodes
|
||||||
|
]
|
||||||
|
t1 = [
|
||||||
|
{
|
||||||
|
"type": "path",
|
||||||
|
"id": node.id,
|
||||||
|
"name": node.name,
|
||||||
|
"order": node.order,
|
||||||
|
"children": build_tree(db, node.id)
|
||||||
|
} for node in path_nodes
|
||||||
|
]
|
||||||
|
for node in t1:
|
||||||
|
for child in node["children"]:
|
||||||
|
if "title" in child.keys() and child["title"] == "index":
|
||||||
|
node["index"] = True
|
||||||
|
break
|
||||||
|
return t0 + t1
|
||||||
|
|
||||||
|
@tree_bp.route('/', methods=['GET'])
|
||||||
|
@limiter.limit(api.get_rate_limit)
|
||||||
|
@etag_response
|
||||||
|
def get_tree():
|
||||||
|
with get_db() as session:
|
||||||
|
children = build_tree(session, 1)
|
||||||
|
return jsonify(
|
||||||
|
{
|
||||||
|
"type": "path",
|
||||||
|
"id": 1,
|
||||||
|
"name": "Root",
|
||||||
|
"index": any("title" in child.keys() and child["title"] == "index" for child in children),
|
||||||
|
"children": children
|
||||||
|
}
|
||||||
|
), 200
|
||||||
9
app.py
9
app.py
@@ -1,4 +1,3 @@
|
|||||||
# app.py
|
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
from logging_handlers.DatabaseLogHandler import DatabaseLogHandler
|
from logging_handlers.DatabaseLogHandler import DatabaseLogHandler
|
||||||
from api import limiter
|
from api import limiter
|
||||||
@@ -26,13 +25,17 @@ except Exception as e:
|
|||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.secret_key = env_provider.SESSION_SECRET_KEY
|
app.secret_key = env_provider.SESSION_SECRET_KEY
|
||||||
CORS(app, resources={r"/api/*": {"origins": [
|
CORS(app, resources={
|
||||||
|
r"/api/*": {
|
||||||
|
"origins": [
|
||||||
env_provider.KC_HOST,
|
env_provider.KC_HOST,
|
||||||
env_provider.FRONTEND_HOST,
|
env_provider.FRONTEND_HOST,
|
||||||
r"https?://localhost:\d+",
|
r"https?://localhost:\d+",
|
||||||
r"https?://127\.0\.0\.1:\d+",
|
r"https?://127\.0\.0\.1:\d+",
|
||||||
r"https?://localhost"
|
r"https?://localhost"
|
||||||
]}},
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
expose_headers=['Content-Disposition']
|
expose_headers=['Content-Disposition']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
#db/models/Resource.py
|
|
||||||
from sqlalchemy import Column, Text, LargeBinary, String
|
from sqlalchemy import Column, Text, LargeBinary, String
|
||||||
from db.models import Base
|
from db.models import Base
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user