diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..dfe0770 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect text files and perform LF normalization +* text=auto diff --git a/.gitignore b/.gitignore index c16fe5a..2c521e9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ -# lol -.vscode -ShareMii-main/ \ No newline at end of file +storage +*copy.* +.logs/* +__pycache__/ +*.pyc +venv/ diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..5e56ca7 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.languageServer": "Pylance", + "python.analysis.diagnosticSeverityOverrides": { + "reportMissingModuleSource": "none", + "reportShadowedImports": "none" + } +} \ No newline at end of file diff --git a/API.md b/API.md new file mode 100644 index 0000000..f39cc20 --- /dev/null +++ b/API.md @@ -0,0 +1,129 @@ +# ShareMii Upload API + +Public FastAPI API for sharing Mii files with optional metadata and up to 5 compressed preview images. + +## Features + +- Public endpoints (no API token) +- `.ltd` upload only +- Mii file size limit: `20 KB` +- Optional metadata on upload: + - `name` + - `author` + - `description` +- Optional image uploads (`images`, up to 5 files) +- Uploaded images are compressed to JPEG and stored in `storage/images` +- MySQL persistence for Mii metadata and image paths +- 6-character manual-friendly share codes (`file_id`) +- In-memory rate limiting by client hash for upload and download traffic + +## Endpoints + +- `GET /` + - Service info and rate-limit config. +- `POST /upload` + - Upload `.ltd` file with optional metadata and images. +- `GET /files/{file_id}` + - Download an `.ltd` file. +- `GET /images/{image_name}` + - Download one stored compressed image. +- `GET /miis/{file_id}` + - Retrieve metadata + image URLs for one Mii. + +## Database + +This project expects MySQL credentials in `config.json`: + +```json +{ + "mysql": { + "host": "localhost", + "port": 3306, + "user": "root", + "password": "password", + "database": "sharemii_upload" + } +} +``` + +On startup, the API initializes tables from `src/utils/sql/init_db.sql`. + +## Run + +```bash +pip install -r requirements.txt +python main.py +``` + +Default URLs: + +```text +FastAPI: http://127.0.0.1:3000 +Django (ShareMii UI): http://127.0.0.1:8000 +``` + +## Curl + +### 1) Upload only the Mii file + +```bash +curl -X POST "http://127.0.0.1:3000/upload" \ + -F "file=@sample.ltd" +``` + +### 2) Upload with metadata and up to 5 images + +```bash +curl -X POST "http://127.0.0.1:3000/upload" \ + -F "file=@sample.ltd" \ + -F "name=My Mii" \ + -F "author=MyUsername" \ + -F "description=Created on Switch" \ + -F "images=@preview1.png" \ + -F "images=@preview2.jpg" +``` + +Example response: + +```json +{ + "file_id": "A7K2Q9", + "name": "My Mii", + "author": "MyUsername", + "description": "Created on Switch", + "original_filename": "sample.ltd", + "stored_filename": "A7K2Q9.ltd", + "size_bytes": 436, + "download_url": "/files/A7K2Q9", + "images": [ + "/images/A7K2Q9_1.jpg", + "/images/A7K2Q9_2.jpg" + ] +} +``` + +### 3) Download Mii file by code + +```bash +curl "http://127.0.0.1:3000/files/A7K2Q9" --output downloaded.ltd +``` + +### 4) Get metadata by code + +```bash +curl "http://127.0.0.1:3000/miis/A7K2Q9" +``` + +### 5) Download one preview image + +```bash +curl "http://127.0.0.1:3000/images/A7K2Q9_1.jpg" --output preview.jpg +``` + +## Notes + +- Rate limits are memory-based and reset when the server restarts. +- Invalid `.ltd` upload extension returns `400`. +- Invalid share code (`file_id`) format returns `400`. +- Missing file/image/metadata entries return `404`. +- Upload/download over rate budget returns `429`. diff --git a/README.md b/README.md index 3c85978..9c4a56f 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Dump your save data using **Checkpoint** or **JKSV** on your Switch, (or your ga https://sharemii.qwkuns.me/ -![Preview](assets/preview.png) +![Preview](src/webui/static/webui/assets/preview.png) *Currently, `.ltd` files are compatible with the original python version of [ShareMii](https://github.com/Star-F0rce/ShareMii), created by [@Star-F0rce](https://github.com/Star-F0rce). As much as possible, we want to keep them compatible with newer versions of `.ltd` files.* diff --git a/config.json b/config.json new file mode 100644 index 0000000..41a428c --- /dev/null +++ b/config.json @@ -0,0 +1,35 @@ +{ + "api": { + "host": "0.0.0.0", + "port": 3000, + "base_url": "", + "allowed_extension": ".ltd", + "max_file_size": 20480, + "max_images": 5, + "max_single_image_upload_size": 10485760, + "upload_window_seconds": 600, + "upload_byte_limit": 25600, + "download_window_seconds": 60, + "download_byte_limit": 256000, + "file_id_length": 6, + "file_id_alphabet": "23456789ABCDEFGHJKLMNPQRSTUVWXYZ", + "file_id_max_attempts": 50 + }, + "django": { + "host": "127.0.0.1", + "port": 8000, + "debug": true, + "secret_key": "dev-only-change-me", + "allowed_hosts": [ + "127.0.0.1", + "localhost" + ] + }, + "mysql": { + "host": "localhost", + "port": 3306, + "user": "root", + "password": "password", + "database": "sharemii_upload" + } +} diff --git a/db.sqlite3 b/db.sqlite3 new file mode 100644 index 0000000..01c9c74 Binary files /dev/null and b/db.sqlite3 differ diff --git a/main.py b/main.py new file mode 100644 index 0000000..4174cc5 --- /dev/null +++ b/main.py @@ -0,0 +1,41 @@ +import os +import signal +import subprocess +import sys + +from src.app.main import app +from src.utils.config import config + + +if __name__ == "__main__": + import uvicorn + + api_config = config.get("api", {}) + django_config = config.get("django", {}) + + api_host = str(api_config.get("host", "0.0.0.0")) + api_port = int(api_config.get("port", 3000)) + django_host = str(django_config.get("host", "127.0.0.1")) + django_port = int(django_config.get("port", 8000)) + + django_env = os.environ.copy() + django_env["DJANGO_SETTINGS_MODULE"] = "src.sharemii_server.settings" + django_cmd = [ + sys.executable, + "manage.py", + "runserver", + f"{django_host}:{django_port}", + "--noreload", + ] + + django_process = subprocess.Popen(django_cmd, env=django_env) + + try: + uvicorn.run(app, host=api_host, port=api_port) + finally: + if django_process.poll() is None: + django_process.send_signal(signal.SIGTERM) + try: + django_process.wait(timeout=5) + except subprocess.TimeoutExpired: + django_process.kill() diff --git a/manage.py b/manage.py new file mode 100644 index 0000000..02393ff --- /dev/null +++ b/manage.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +import os +import sys + + +def main() -> None: + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "src.sharemii_server.settings") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable?" + ) from exc + + execute_from_command_line(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0256774 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +fastapi +uvicorn +django +requests +python-multipart +mysql-connector-python +Pillow +colorama diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/__init__.py b/src/app/__init__.py new file mode 100644 index 0000000..ab06acf --- /dev/null +++ b/src/app/__init__.py @@ -0,0 +1 @@ +from src.app.main import app diff --git a/src/app/main.py b/src/app/main.py new file mode 100644 index 0000000..267ba44 --- /dev/null +++ b/src/app/main.py @@ -0,0 +1,25 @@ +from fastapi import FastAPI + +from src.app.routers.download import router as download_router +from src.app.routers.index import router as index_router +from src.app.routers.miis import router as miis_router +from src.app.routers.upload import router as upload_router +from src.app.settings import ensure_directories +from src.utils.sql import init as init_db + +app = FastAPI(title="ShareMii Upload API") + + +@app.on_event("startup") +def startup() -> None: + ensure_directories() + try: + init_db() + except Exception as exc: + raise RuntimeError("Database initialization failed.") from exc + + +app.include_router(index_router) +app.include_router(upload_router) +app.include_router(download_router) +app.include_router(miis_router) diff --git a/src/app/rate_limit.py b/src/app/rate_limit.py new file mode 100644 index 0000000..b01e553 --- /dev/null +++ b/src/app/rate_limit.py @@ -0,0 +1,62 @@ +import hashlib +import time +from collections import defaultdict, deque +from threading import Lock + +from fastapi import HTTPException, Request + + +class ByteRateLimiter: + def __init__(self) -> None: + self._events: dict[str, deque[tuple[float, int]]] = defaultdict(deque) + self._lock = Lock() + + def allow(self, key: str, cost: int, window_seconds: int, byte_limit: int) -> int | None: + now = time.monotonic() + + with self._lock: + events = self._events[key] + while events and now - events[0][0] >= window_seconds: + events.popleft() + + used_bytes = sum(size for _, size in events) + if used_bytes + cost > byte_limit: + if not events: + return window_seconds + return max(1, int(window_seconds - (now - events[0][0]))) + + events.append((now, cost)) + return None + + +def get_client_key(request: Request) -> str: + forwarded_for = request.headers.get("x-forwarded-for", "").split(",")[0].strip() + client_host = forwarded_for or (request.client.host if request.client else "unknown") + return hashlib.sha256(client_host.encode("utf-8")).hexdigest() + + +def enforce_rate_limit( + limiter: ByteRateLimiter, + request: Request, + *, + cost: int, + window_seconds: int, + byte_limit: int, + detail: str, +) -> None: + retry_after = limiter.allow( + key=get_client_key(request), + cost=cost, + window_seconds=window_seconds, + byte_limit=byte_limit, + ) + if retry_after is not None: + raise HTTPException( + status_code=429, + detail=detail, + headers={"Retry-After": str(retry_after)}, + ) + + +upload_limiter = ByteRateLimiter() +download_limiter = ByteRateLimiter() diff --git a/src/app/routers/__init__.py b/src/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/routers/download.py b/src/app/routers/download.py new file mode 100644 index 0000000..27fc101 --- /dev/null +++ b/src/app/routers/download.py @@ -0,0 +1,58 @@ +from fastapi import APIRouter, HTTPException, Request +from fastapi.responses import FileResponse + +from src.app.rate_limit import download_limiter, enforce_rate_limit +from src.app.services.files import image_path, is_valid_file_id, is_valid_image_name, ltd_path +from src.app.settings import DOWNLOAD_BYTE_LIMIT, DOWNLOAD_WINDOW_SECONDS + +router = APIRouter() + + +@router.get("/files/{file_id}") +def download_file(file_id: str, request: Request) -> FileResponse: + if not is_valid_file_id(file_id): + raise HTTPException(status_code=400, detail="Invalid file identifier.") + + stored_path = ltd_path(file_id) + if not stored_path.is_file(): + raise HTTPException(status_code=404, detail="File not found.") + + enforce_rate_limit( + download_limiter, + request, + cost=stored_path.stat().st_size, + window_seconds=DOWNLOAD_WINDOW_SECONDS, + byte_limit=DOWNLOAD_BYTE_LIMIT, + detail="Download rate limit exceeded for this client.", + ) + + return FileResponse( + path=stored_path, + filename=stored_path.name, + media_type="application/octet-stream", + ) + + +@router.get("/images/{image_name}") +def download_image(image_name: str, request: Request) -> FileResponse: + if not is_valid_image_name(image_name): + raise HTTPException(status_code=400, detail="Invalid image name.") + + stored_image = image_path(image_name) + if not stored_image.is_file(): + raise HTTPException(status_code=404, detail="Image not found.") + + enforce_rate_limit( + download_limiter, + request, + cost=stored_image.stat().st_size, + window_seconds=DOWNLOAD_WINDOW_SECONDS, + byte_limit=DOWNLOAD_BYTE_LIMIT, + detail="Download rate limit exceeded for this client.", + ) + + return FileResponse( + path=stored_image, + filename=stored_image.name, + media_type="image/jpeg", + ) diff --git a/src/app/routers/index.py b/src/app/routers/index.py new file mode 100644 index 0000000..71682e7 --- /dev/null +++ b/src/app/routers/index.py @@ -0,0 +1,29 @@ +from fastapi import APIRouter + +from src.app.settings import ( + ALLOWED_EXTENSION, + DOWNLOAD_BYTE_LIMIT, + DOWNLOAD_WINDOW_SECONDS, + MAX_FILE_SIZE, + UPLOAD_BYTE_LIMIT, + UPLOAD_WINDOW_SECONDS, +) + +router = APIRouter() + + +@router.get("/") +def index() -> dict: + return { + "service": "ShareMii Upload API", + "allowed_extension": ALLOWED_EXTENSION, + "max_file_size_bytes": MAX_FILE_SIZE, + "upload_limit": { + "window_seconds": UPLOAD_WINDOW_SECONDS, + "byte_limit": UPLOAD_BYTE_LIMIT, + }, + "download_limit": { + "window_seconds": DOWNLOAD_WINDOW_SECONDS, + "byte_limit": DOWNLOAD_BYTE_LIMIT, + }, + } diff --git a/src/app/routers/miis.py b/src/app/routers/miis.py new file mode 100644 index 0000000..435463f --- /dev/null +++ b/src/app/routers/miis.py @@ -0,0 +1,31 @@ +import mysql.connector +from fastapi import APIRouter, HTTPException + +from src.app.services.files import build_download_url, build_image_url, is_valid_file_id +from src.utils.sql import get_mii + +router = APIRouter() + + +@router.get("/miis/{file_id}") +def get_mii_details(file_id: str) -> dict: + if not is_valid_file_id(file_id): + raise HTTPException(status_code=400, detail="Invalid file identifier.") + + try: + mii = get_mii(file_id) + except mysql.connector.Error as exc: + raise HTTPException(status_code=500, detail="Could not fetch Mii metadata.") from exc + + if not mii: + raise HTTPException(status_code=404, detail="Mii metadata not found.") + + return { + "file_id": mii["unique_id"], + "name": mii["name"], + "author": mii["author"], + "description": mii["description"], + "created_at": mii["created_at"], + "download_url": build_download_url(mii["unique_id"]), + "images": [build_image_url(image_name) for image_name in mii["images"]], + } diff --git a/src/app/routers/upload.py b/src/app/routers/upload.py new file mode 100644 index 0000000..eea030d --- /dev/null +++ b/src/app/routers/upload.py @@ -0,0 +1,99 @@ +import mysql.connector +from fastapi import APIRouter, File, Form, HTTPException, Request, UploadFile + +from src.app.rate_limit import enforce_rate_limit, upload_limiter +from src.app.services.files import ( + build_download_url, + build_image_url, + compress_image_to_jpeg, + generate_unique_file_id, + image_path, + ltd_path, + validate_filename, +) +from src.app.settings import ( + MAX_FILE_SIZE, + MAX_IMAGES, + MAX_SINGLE_IMAGE_UPLOAD_SIZE, + UPLOAD_BYTE_LIMIT, + UPLOAD_WINDOW_SECONDS, +) +from src.utils.sql import insert_mii, insert_mii_images + +router = APIRouter() + + +@router.post("/upload") +async def upload_file( + request: Request, + file: UploadFile = File(...), + name: str | None = Form(default=None), + author: str | None = Form(default=None), + description: str | None = Form(default=None), + images: list[UploadFile] | None = File(default=None), +) -> dict: + original_filename = validate_filename(file.filename) + content = await file.read(MAX_FILE_SIZE + 1) + + if len(content) > MAX_FILE_SIZE: + raise HTTPException( + status_code=413, + detail=f"File is too large. Maximum size is {MAX_FILE_SIZE} bytes.", + ) + + image_files = images or [] + if len(image_files) > MAX_IMAGES: + raise HTTPException(status_code=400, detail=f"Maximum {MAX_IMAGES} images allowed.") + + enforce_rate_limit( + upload_limiter, + request, + cost=len(content), + window_seconds=UPLOAD_WINDOW_SECONDS, + byte_limit=UPLOAD_BYTE_LIMIT, + detail="Upload rate limit exceeded for this client.", + ) + + raw_images: list[bytes] = [] + for image_file in image_files: + image_raw = await image_file.read(MAX_SINGLE_IMAGE_UPLOAD_SIZE + 1) + if len(image_raw) > MAX_SINGLE_IMAGE_UPLOAD_SIZE: + raise HTTPException( + status_code=413, + detail="One of the images is too large.", + ) + raw_images.append(image_raw) + + file_id = generate_unique_file_id() + stored_path = ltd_path(file_id) + stored_image_names: list[str] = [] + + try: + stored_path.write_bytes(content) + + for index, raw_image in enumerate(raw_images, start=1): + compressed_image = compress_image_to_jpeg(raw_image) + image_name = f"{file_id}_{index}.jpg" + image_file_path = image_path(image_name) + image_file_path.write_bytes(compressed_image) + stored_image_names.append(image_name) + + insert_mii(file_id, name=name, author=author, description=description) + insert_mii_images(file_id, stored_image_names) + except mysql.connector.Error as exc: + stored_path.unlink(missing_ok=True) + for image_name in stored_image_names: + image_path(image_name).unlink(missing_ok=True) + raise HTTPException(status_code=500, detail="Could not save Mii metadata.") from exc + + return { + "file_id": file_id, + "name": name, + "author": author, + "description": description, + "original_filename": original_filename, + "stored_filename": stored_path.name, + "size_bytes": len(content), + "download_url": build_download_url(file_id), + "images": [build_image_url(image_name) for image_name in stored_image_names], + } diff --git a/src/app/services/files.py b/src/app/services/files.py new file mode 100644 index 0000000..e104ee6 --- /dev/null +++ b/src/app/services/files.py @@ -0,0 +1,83 @@ +import io +import secrets +from pathlib import Path + +from PIL import Image, UnidentifiedImageError +from fastapi import HTTPException + +from src.app.settings import ( + ALLOWED_EXTENSION, + BASE_URL, + FILE_ID_ALPHABET, + FILE_ID_LENGTH, + FILE_ID_MAX_ATTEMPTS, + IMAGES_DIR, + STORAGE_DIR, +) + + +def validate_filename(filename: str | None) -> str: + if not filename: + raise HTTPException(status_code=400, detail="A filename is required.") + + safe_name = Path(filename).name + if safe_name != filename: + raise HTTPException(status_code=400, detail="Invalid filename.") + + if Path(safe_name).suffix.lower() != ALLOWED_EXTENSION: + raise HTTPException( + status_code=400, + detail=f"Only {ALLOWED_EXTENSION} files are allowed.", + ) + + return safe_name + + +def build_download_url(file_id: str) -> str: + configured_base_url = BASE_URL.rstrip("/") + if configured_base_url: + return f"{configured_base_url}/files/{file_id}" + return f"/files/{file_id}" + + +def build_image_url(image_name: str) -> str: + configured_base_url = BASE_URL.rstrip("/") + if configured_base_url: + return f"{configured_base_url}/images/{image_name}" + return f"/images/{image_name}" + + +def is_valid_file_id(file_id: str) -> bool: + return len(file_id) == FILE_ID_LENGTH and all(char in FILE_ID_ALPHABET for char in file_id) + + +def is_valid_image_name(image_name: str) -> bool: + safe_name = Path(image_name).name + return safe_name == image_name and image_name.endswith(".jpg") + + +def generate_unique_file_id() -> str: + for _ in range(FILE_ID_MAX_ATTEMPTS): + file_id = "".join(secrets.choice(FILE_ID_ALPHABET) for _ in range(FILE_ID_LENGTH)) + if not (STORAGE_DIR / f"{file_id}{ALLOWED_EXTENSION}").exists(): + return file_id + raise HTTPException(status_code=503, detail="Could not allocate a unique file id.") + + +def compress_image_to_jpeg(raw_bytes: bytes) -> bytes: + try: + with Image.open(io.BytesIO(raw_bytes)) as img: + img = img.convert("RGB") + output = io.BytesIO() + img.save(output, format="JPEG", optimize=True, quality=70) + return output.getvalue() + except (UnidentifiedImageError, OSError) as exc: + raise HTTPException(status_code=400, detail="Invalid image file uploaded.") from exc + + +def ltd_path(file_id: str): + return STORAGE_DIR / f"{file_id}{ALLOWED_EXTENSION}" + + +def image_path(image_name: str): + return IMAGES_DIR / image_name diff --git a/src/app/settings.py b/src/app/settings.py new file mode 100644 index 0000000..bdbf0aa --- /dev/null +++ b/src/app/settings.py @@ -0,0 +1,25 @@ +from pathlib import Path + +from src.utils.config import config + +api_config = config.get("api", {}) + +MAX_FILE_SIZE = int(api_config.get("max_file_size", 20 * 1024)) +ALLOWED_EXTENSION = str(api_config.get("allowed_extension", ".ltd")) +BASE_URL = str(api_config.get("base_url", "")) +STORAGE_DIR = Path("storage") +IMAGES_DIR = STORAGE_DIR / "images" +MAX_IMAGES = int(api_config.get("max_images", 5)) +MAX_SINGLE_IMAGE_UPLOAD_SIZE = int(api_config.get("max_single_image_upload_size", 10 * 1024 * 1024)) +UPLOAD_WINDOW_SECONDS = int(api_config.get("upload_window_seconds", 600)) +UPLOAD_BYTE_LIMIT = int(api_config.get("upload_byte_limit", 25 * 1024)) +DOWNLOAD_WINDOW_SECONDS = int(api_config.get("download_window_seconds", 60)) +DOWNLOAD_BYTE_LIMIT = int(api_config.get("download_byte_limit", 250 * 1024)) +FILE_ID_LENGTH = int(api_config.get("file_id_length", 6)) +FILE_ID_ALPHABET = str(api_config.get("file_id_alphabet", "23456789ABCDEFGHJKLMNPQRSTUVWXYZ")) +FILE_ID_MAX_ATTEMPTS = int(api_config.get("file_id_max_attempts", 50)) + + +def ensure_directories() -> None: + STORAGE_DIR.mkdir(parents=True, exist_ok=True) + IMAGES_DIR.mkdir(parents=True, exist_ok=True) diff --git a/src/sharemii_server/__init__.py b/src/sharemii_server/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sharemii_server/asgi.py b/src/sharemii_server/asgi.py new file mode 100644 index 0000000..bbb5097 --- /dev/null +++ b/src/sharemii_server/asgi.py @@ -0,0 +1,7 @@ +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "src.sharemii_server.settings") + +application = get_asgi_application() diff --git a/src/sharemii_server/settings.py b/src/sharemii_server/settings.py new file mode 100644 index 0000000..49ac78c --- /dev/null +++ b/src/sharemii_server/settings.py @@ -0,0 +1,70 @@ +from pathlib import Path + +from src.utils.config import config + +BASE_DIR = Path(__file__).resolve().parents[2] + +django_config = config.get("django", {}) + +SECRET_KEY = str(django_config.get("secret_key", "dev-only-change-me")) +DEBUG = bool(django_config.get("debug", True)) +ALLOWED_HOSTS = list(django_config.get("allowed_hosts", ["127.0.0.1", "localhost"])) + +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "src.webapi", +] + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +ROOT_URLCONF = "src.sharemii_server.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [BASE_DIR / "src" / "webui" / "templates"], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "src.sharemii_server.wsgi.application" +ASGI_APPLICATION = "src.sharemii_server.asgi.application" + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": BASE_DIR / "db.sqlite3", + } +} + +AUTH_PASSWORD_VALIDATORS = [] + +LANGUAGE_CODE = "en-us" +TIME_ZONE = "UTC" +USE_I18N = True +USE_TZ = True + +STATIC_URL = "/static/" +STATICFILES_DIRS = [BASE_DIR / "src" / "webui" / "static"] + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" diff --git a/src/sharemii_server/urls.py b/src/sharemii_server/urls.py new file mode 100644 index 0000000..5ffeb95 --- /dev/null +++ b/src/sharemii_server/urls.py @@ -0,0 +1,7 @@ +from django.contrib import admin +from django.urls import include, path + +urlpatterns = [ + path("admin/", admin.site.urls), + path("", include("src.webapi.urls")), +] diff --git a/src/sharemii_server/wsgi.py b/src/sharemii_server/wsgi.py new file mode 100644 index 0000000..de8b307 --- /dev/null +++ b/src/sharemii_server/wsgi.py @@ -0,0 +1,7 @@ +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "src.sharemii_server.settings") + +application = get_wsgi_application() diff --git a/src/utils/__init__.py b/src/utils/__init__.py new file mode 100644 index 0000000..36f4974 --- /dev/null +++ b/src/utils/__init__.py @@ -0,0 +1,2 @@ +from .config import config +from .logger import log diff --git a/src/utils/config.py b/src/utils/config.py new file mode 100644 index 0000000..0fbf14e --- /dev/null +++ b/src/utils/config.py @@ -0,0 +1,15 @@ +import json +from functools import lru_cache +from pathlib import Path + +PROJECT_ROOT = Path(__file__).resolve().parents[2] +CONFIG_PATH = PROJECT_ROOT / "config.json" + + +@lru_cache(maxsize=1) +def get_config() -> dict: + with CONFIG_PATH.open(encoding="utf-8") as config_file: + return json.load(config_file) + + +config = get_config() diff --git a/src/utils/logger.py b/src/utils/logger.py new file mode 100644 index 0000000..010bf8a --- /dev/null +++ b/src/utils/logger.py @@ -0,0 +1,147 @@ +# https://gist.github.com/Lenochxd/9d1346d83cec1b7d6c577828de7db206 + +import os +import traceback +from datetime import datetime +from colorama import init, Fore + +# Initialize colorama +init(autoreset=True) + +class Logger: + def __init__(self, from_updater=False): + """ + Initializes the Logger instance. + + This method creates a log directory if it doesn't exist and sets the log file path. + """ + self.log_dir = ".logs" + if not os.path.exists(self.log_dir): + os.makedirs(self.log_dir) + date_str = datetime.now().date().isoformat() + self.log_file = f"{self.log_dir}/{date_str}-bishokus.log" + self.debug_enabled = True + + def _write_log(self, level, message): + """ + Writes a log message to the log file with a given level and message. + + Returns: + str: The formatted log message. + """ + timestamp = datetime.now().strftime("%H:%M:%S") + log_message = f"[{level} @ {timestamp}] - {message}" + + date_str = datetime.now().date().isoformat() + self.log_file = f"{self.log_dir}/{date_str}-bishokus.log" + with open(self.log_file, 'a', encoding='utf-8') as file: + file.write(log_message + '\n') + + return log_message + + def set_log_file(self, log_file: str): + """ + Sets the log file path to the specified file. + + Args: + log_file (str): The path to the log file. + """ + self.log_file = log_file + + def enable_debug(self): + """ + Enables the printing of debug messages. + """ + self.debug_enabled = True + + def disable_debug(self): + """ + Disables the printing of debug messages. + """ + self.debug_enabled = False + + def info(self, message): + """ + Use this method for general information that highlights the progress of the application. + """ + log_message = self._write_log("INFO", message) + print(Fore.GREEN + log_message) + + def success(self, message): + """ + Use this method to indicate successful completion of an operation. + """ + log_message = self._write_log("SUCCESS", message) + print(Fore.GREEN + log_message) + + def notice(self, message): + """ + Use this method to indicate a redundant but correct action, such as trying to turn on something that is already on. + """ + log_message = self._write_log("NOTICE", message) + print(Fore.GREEN + log_message) + + def warning(self, message): + """ + Use this method to indicate a potential problem or important situation that should be noted. + """ + log_message = self._write_log("WARNING", message) + print(Fore.YELLOW + log_message) + + def debug(self, message): + """ + Use this method for detailed information, typically of interest only when diagnosing problems. + """ + log_message = self._write_log("DEBUG", message) + if self.debug_enabled: + print(Fore.BLUE + log_message) + + def error(self, message): + """ + Use this method to indicate a significant problem that has occurred. + """ + log_message = self._write_log("ERROR", message) + print(Fore.RED + log_message) + + def exception(self, exception, message=None, expected=False, log_traceback=True, print_log=True): + """ + Logs an exception message along with the traceback. + + Use this method to log exceptions that occur during the execution of the program. + + Args: + exception (Exception): The exception instance to log. + message (str, optional): Additional message to log with the exception. + expected (bool, optional): Indicates if the exception was expected (caught using try-except). Defaults to True. + log_traceback (bool, optional): Indicates if the traceback details should be logged. Defaults to True. + print_log (bool, optional): Indicates if the log message should be printed. Defaults to True. + """ + exception_title = f"{type(exception).__name__}:\n {str(exception)}\n" + if log_traceback: + exception_message = ''.join(traceback.format_exception(type(exception), exception, exception.__traceback__)) + else: + exception_message = str(exception) + exception_type = "EXPECTED EXCEPTION" if expected else "UNEXPECTED EXCEPTION" + if message: + log_message = self._write_log(exception_type, f"{message} - {exception_title}\n{exception_message}") + else: + log_message = self._write_log(exception_type, f"{exception_title}\n{exception_message}") + if print_log: + print(Fore.MAGENTA + log_message) + + +# Instantiate Logger at the module level +log = Logger() + + +if __name__ == '__main__': + # Usage example: + log.info('hello world') + log.success('task completed successfully') + log.warning('this is a warning') + log.debug('this is a debug message') + log.error('this is an error') + try: + 1 / 0 # This will raise a ZeroDivisionError + except ZeroDivisionError as e: + log.exception(e, "An error occurred while performing division") diff --git a/src/utils/sql/__init__.py b/src/utils/sql/__init__.py new file mode 100644 index 0000000..3a6f5e8 --- /dev/null +++ b/src/utils/sql/__init__.py @@ -0,0 +1,178 @@ +# https://github.com/Bishoko/Bishokus/blob/main/utils/sql/__init__.py + +import mysql.connector +import os +from typing import Any, cast +from src.utils.config import config +from src.utils.logger import log + +db_config = config["mysql"] + + +def get_db_connection(): + """ + Establishes and returns a connection to the MySQL database. + + This function attempts to connect to the database using the configuration + specified in the 'db_config' dictionary. If the initial connection fails + due to a ProgrammingError (which might occur if the database doesn't exist), + it will create the database and then attempt to connect again. + + Returns: + mysql.connector.connection.MySQLConnection: A connection object to the MySQL database. + + Raises: + mysql.connector.Error: If there's an error connecting to the database + that isn't resolved by creating the database. + + Note: + This function relies on the 'db_config' dictionary being properly + populated with the necessary connection parameters. + """ + db_cfg = dict(db_config) + try: + return mysql.connector.connect(**db_cfg) + except mysql.connector.errors.ProgrammingError: + + database = db_cfg['database'] + del db_cfg['database'] + + conn = mysql.connector.connect(**db_cfg) + cursor = conn.cursor() + + create_database(cursor, database) + + db_cfg['database'] = database + return mysql.connector.connect(**db_cfg) + + +def create_database(cursor, db_name): + """ + Creates a database if it does not exist. + + :param cursor: MySQL cursor object + :param db_name: Name of the database to create + """ + try: + cursor.execute(f"CREATE DATABASE IF NOT EXISTS {db_name} DEFAULT CHARACTER SET 'utf8'") + log.info(f"Database {db_name} created or already exists.") + except mysql.connector.Error as err: + log.exception(err, f"Failed to create database {db_name}") + exit(1) + +def execute_sql_file(cursor, sql_file_path, replacements=None): + """ + Executes a SQL file with caching. + + :param cursor: MySQL cursor object + :param sql_file_path: Path to the SQL file + :param replacements: Dictionary of replacements for %s placeholders + """ + cache = {} + + def read_file(path): + if path not in cache: + with open(path, 'r') as file: + cache[path] = file.read() + return cache[path] + + def clear_cache(): + cache.clear() + + sql_commands = read_file(sql_file_path).split(';') + for command in sql_commands: + command = command.strip() + if command: + if replacements: + cursor.execute(command, replacements) + else: + cursor.execute(command) + + # Clear cache if file has been modified + if os.path.getmtime(sql_file_path) > cache.get(sql_file_path + '_mtime', 0): + clear_cache() + cache[sql_file_path + '_mtime'] = os.path.getmtime(sql_file_path) + +def init(): + """ + Initializes the database connection, executes SQL file, and performs initial setup. + """ + + conn = get_db_connection() + cursor = conn.cursor() + + execute_sql_file(cursor, 'src/utils/sql/init_db.sql') + + conn.commit() + conn.close() + + +def insert_mii(unique_id, name=None, author=None, description=None): + conn = get_db_connection() + cursor = conn.cursor() + cursor.execute( + """ + INSERT INTO miis (unique_id, name, author, description) + VALUES (%s, %s, %s, %s) + """, + (unique_id, name, author, description), + ) + conn.commit() + conn.close() + + +def insert_mii_images(unique_id, image_paths): + if not image_paths: + return + + conn = get_db_connection() + cursor = conn.cursor() + cursor.executemany( + """ + INSERT INTO mii_images (mii_unique_id, image_path) + VALUES (%s, %s) + """, + [(unique_id, image_path) for image_path in image_paths], + ) + conn.commit() + conn.close() + + +def get_mii(unique_id): + conn = get_db_connection() + cursor = conn.cursor(dictionary=True) + + cursor.execute( + """ + SELECT unique_id, name, author, description, created_at + FROM miis + WHERE unique_id = %s + """, + (unique_id,), + ) + mii_row = cast(dict[str, Any] | None, cursor.fetchone()) + if not mii_row: + conn.close() + return None + + cursor.execute( + """ + SELECT image_path + FROM mii_images + WHERE mii_unique_id = %s + ORDER BY id ASC + """, + (unique_id,), + ) + image_rows = cast(list[dict[str, Any]], cursor.fetchall()) + images = [str(row["image_path"]) for row in image_rows] + conn.close() + + return { + "unique_id": mii_row["unique_id"], + "name": mii_row["name"], + "author": mii_row["author"], + "description": mii_row["description"], + "created_at": mii_row["created_at"], + "images": images, + } diff --git a/src/utils/sql/init_db.sql b/src/utils/sql/init_db.sql new file mode 100644 index 0000000..2d774e8 --- /dev/null +++ b/src/utils/sql/init_db.sql @@ -0,0 +1,16 @@ +CREATE TABLE IF NOT EXISTS miis ( + id INT AUTO_INCREMENT PRIMARY KEY, + unique_id VARCHAR(16) NOT NULL UNIQUE, + name VARCHAR(255), + author VARCHAR(255), + description TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE IF NOT EXISTS mii_images ( + id INT AUTO_INCREMENT PRIMARY KEY, + mii_unique_id VARCHAR(16) NOT NULL, + image_path VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (mii_unique_id) REFERENCES miis(unique_id) ON DELETE CASCADE +); diff --git a/src/webapi/__init__.py b/src/webapi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/webapi/apps.py b/src/webapi/apps.py new file mode 100644 index 0000000..b61fd87 --- /dev/null +++ b/src/webapi/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class WebapiConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "src.webapi" diff --git a/src/webapi/urls.py b/src/webapi/urls.py new file mode 100644 index 0000000..a1eb2ad --- /dev/null +++ b/src/webapi/urls.py @@ -0,0 +1,7 @@ +from django.urls import path + +from src.webapi.views import index + +urlpatterns = [ + path("", index, name="index"), +] diff --git a/src/webapi/views.py b/src/webapi/views.py new file mode 100644 index 0000000..2508e11 --- /dev/null +++ b/src/webapi/views.py @@ -0,0 +1,5 @@ +from django.shortcuts import render + + +def index(request): + return render(request, "webui/index.html") diff --git a/CNAME b/src/webui/CNAME similarity index 100% rename from CNAME rename to src/webui/CNAME diff --git a/app.js b/src/webui/static/webui/app.js similarity index 100% rename from app.js rename to src/webui/static/webui/app.js diff --git a/assets/preview.png b/src/webui/static/webui/assets/preview.png similarity index 100% rename from assets/preview.png rename to src/webui/static/webui/assets/preview.png diff --git a/config.js b/src/webui/static/webui/config.js similarity index 100% rename from config.js rename to src/webui/static/webui/config.js diff --git a/favicon.ico b/src/webui/static/webui/favicon.ico similarity index 100% rename from favicon.ico rename to src/webui/static/webui/favicon.ico diff --git a/index.css b/src/webui/static/webui/index.css similarity index 100% rename from index.css rename to src/webui/static/webui/index.css diff --git a/index.html b/src/webui/templates/webui/index.html similarity index 96% rename from index.html rename to src/webui/templates/webui/index.html index 8e6ecf0..0f30a24 100644 --- a/index.html +++ b/src/webui/templates/webui/index.html @@ -2,6 +2,7 @@ + {% load static %} ShareMii @@ -24,7 +25,8 @@ - + + @@ -114,8 +116,8 @@

About ShareMii

- - + + \ No newline at end of file diff --git a/testing.html b/testing.html new file mode 100644 index 0000000..e1620d3 --- /dev/null +++ b/testing.html @@ -0,0 +1,89 @@ + + + + + ShareMii API Tester + + +

ShareMii API Tester

+ +
+

Upload Mii

+
+

+

+

+

+

+ +
+

+
+  
+

Get Mii Metadata

+
+ + +
+

+
+  
+

Download Mii File

+
+ + +
+ +
+

View Image

+
+ + +
+
+ + + +