From bb4e72e91b79a60120738b0014dbfda3b2e6b1f4 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 08:33:23 +0000 Subject: [PATCH 1/4] Initial plan From 5c5336c6828ed44e45d3f1e92992de4f3d062a0a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 08:42:07 +0000 Subject: [PATCH 2/4] feat: build complete Faltric MVP scaffold MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - blockchain/: Solidity ERC-20 EnergyToken (ENRG) + TradeEngine escrow, Hardhat config, deploy script, and test suite - backend/: Express + Socket.io API server with JWT auth, MongoDB models (User, Installation, MeterReading, ChatMessage), REST routes for auth, installations, meter readings, and AI predictions; 15-min meter poller that mints ENRG tokens via ethers.js - frontend/: React + Vite + Tailwind SPA with AuthContext (JWT), Web3Context (MetaMask/ethers.js), Mapbox grid map with color-coded energy node markers, P2P exchange UI with TradeModal, AI prediction dashboard, Socket.io chat, and admin approval dashboard - ai-service/: FastAPI Python service with scikit-learn regression model, weather-weighted scoring (S = Σ W_i * V_i), and reportlab PDF reports - docker-compose.yml: orchestrates all 4 services + MongoDB + Redis - Dockerfiles for backend (Node), ai-service (Python), frontend (Nginx) - .gitignore covering node_modules, build artifacts, .env files Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .gitignore | 64 +++++ ai-service/.env.example | 3 + ai-service/Dockerfile | 7 + ai-service/main.py | 117 +++++++++ ai-service/models/recommendation.py | 248 ++++++++++++++++++ ai-service/requirements.txt | 8 + backend/.env.example | 25 ++ backend/Dockerfile | 7 + backend/package.json | 31 +++ backend/src/config/db.js | 40 +++ backend/src/index.js | 130 +++++++++ backend/src/middleware/auth.js | 35 +++ backend/src/models/ChatMessage.js | 28 ++ backend/src/models/Installation.js | 65 +++++ backend/src/models/MeterReading.js | 43 +++ backend/src/models/User.js | 53 ++++ backend/src/routes/auth.js | 119 +++++++++ backend/src/routes/installations.js | 173 ++++++++++++ backend/src/routes/meter.js | 114 ++++++++ backend/src/routes/predict.js | 68 +++++ backend/src/services/meterPoller.js | 95 +++++++ blockchain/contracts/EnergyToken.sol | 35 +++ blockchain/contracts/TradeEngine.sol | 115 ++++++++ blockchain/hardhat.config.js | 29 ++ blockchain/package.json | 19 ++ blockchain/scripts/deploy.js | 37 +++ blockchain/test/contracts.test.js | 122 +++++++++ docker-compose.yml | 98 +++++++ frontend/Dockerfile | 22 ++ frontend/index.html | 14 + frontend/nginx.conf | 30 +++ frontend/package.json | 33 +++ frontend/postcss.config.js | 6 + frontend/src/App.jsx | 97 +++++++ .../src/components/Admin/AdminDashboard.jsx | 178 +++++++++++++ frontend/src/components/Auth/Login.jsx | 101 +++++++ frontend/src/components/Connect/Connect.jsx | 139 ++++++++++ frontend/src/components/Exchange/Exchange.jsx | 132 ++++++++++ .../src/components/Exchange/TradeModal.jsx | 85 ++++++ frontend/src/components/GridMap/GridMap.jsx | 156 +++++++++++ frontend/src/components/GridMap/NodePopup.jsx | 51 ++++ frontend/src/components/Layout/Navbar.jsx | 52 ++++ frontend/src/components/Layout/Sidebar.jsx | 59 +++++ frontend/src/components/Predict/Predict.jsx | 123 +++++++++ frontend/src/contexts/AuthContext.jsx | 112 ++++++++ frontend/src/contexts/Web3Context.jsx | 140 ++++++++++ frontend/src/hooks/useWeb3.js | 11 + frontend/src/index.css | 32 +++ frontend/src/main.jsx | 10 + frontend/tailwind.config.js | 24 ++ frontend/vite.config.js | 20 ++ 51 files changed, 3555 insertions(+) create mode 100644 .gitignore create mode 100644 ai-service/.env.example create mode 100644 ai-service/Dockerfile create mode 100644 ai-service/main.py create mode 100644 ai-service/models/recommendation.py create mode 100644 ai-service/requirements.txt create mode 100644 backend/.env.example create mode 100644 backend/Dockerfile create mode 100644 backend/package.json create mode 100644 backend/src/config/db.js create mode 100644 backend/src/index.js create mode 100644 backend/src/middleware/auth.js create mode 100644 backend/src/models/ChatMessage.js create mode 100644 backend/src/models/Installation.js create mode 100644 backend/src/models/MeterReading.js create mode 100644 backend/src/models/User.js create mode 100644 backend/src/routes/auth.js create mode 100644 backend/src/routes/installations.js create mode 100644 backend/src/routes/meter.js create mode 100644 backend/src/routes/predict.js create mode 100644 backend/src/services/meterPoller.js create mode 100644 blockchain/contracts/EnergyToken.sol create mode 100644 blockchain/contracts/TradeEngine.sol create mode 100644 blockchain/hardhat.config.js create mode 100644 blockchain/package.json create mode 100644 blockchain/scripts/deploy.js create mode 100644 blockchain/test/contracts.test.js create mode 100644 docker-compose.yml create mode 100644 frontend/Dockerfile create mode 100644 frontend/index.html create mode 100644 frontend/nginx.conf create mode 100644 frontend/package.json create mode 100644 frontend/postcss.config.js create mode 100644 frontend/src/App.jsx create mode 100644 frontend/src/components/Admin/AdminDashboard.jsx create mode 100644 frontend/src/components/Auth/Login.jsx create mode 100644 frontend/src/components/Connect/Connect.jsx create mode 100644 frontend/src/components/Exchange/Exchange.jsx create mode 100644 frontend/src/components/Exchange/TradeModal.jsx create mode 100644 frontend/src/components/GridMap/GridMap.jsx create mode 100644 frontend/src/components/GridMap/NodePopup.jsx create mode 100644 frontend/src/components/Layout/Navbar.jsx create mode 100644 frontend/src/components/Layout/Sidebar.jsx create mode 100644 frontend/src/components/Predict/Predict.jsx create mode 100644 frontend/src/contexts/AuthContext.jsx create mode 100644 frontend/src/contexts/Web3Context.jsx create mode 100644 frontend/src/hooks/useWeb3.js create mode 100644 frontend/src/index.css create mode 100644 frontend/src/main.jsx create mode 100644 frontend/tailwind.config.js create mode 100644 frontend/vite.config.js diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2e379a6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,64 @@ +# Dependencies +node_modules/ +__pycache__/ +*.pyc +*.pyo +.venv/ +venv/ +env/ + +# Build outputs +dist/ +build/ +artifacts/ +cache/ +typechain/ +typechain-types/ + +# Environment files +.env +.env.local +.env.*.local + +# Logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +logs/ + +# OS +.DS_Store +Thumbs.db + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# Coverage +coverage/ +.nyc_output/ +htmlcov/ + +# Hardhat +blockchain/artifacts/ +blockchain/cache/ +blockchain/ignition/deployments/ + +# Frontend +frontend/dist/ + +# Python +*.egg-info/ +dist/ +.pytest_cache/ + +# Docker +.dockerignore + +# Misc +*.zip +*.tar.gz +deployments.json diff --git a/ai-service/.env.example b/ai-service/.env.example new file mode 100644 index 0000000..32fb374 --- /dev/null +++ b/ai-service/.env.example @@ -0,0 +1,3 @@ +# AI Service Environment Variables +PORT=8000 +REPORTS_DIR=/tmp/faltric_reports diff --git a/ai-service/Dockerfile b/ai-service/Dockerfile new file mode 100644 index 0000000..57f886f --- /dev/null +++ b/ai-service/Dockerfile @@ -0,0 +1,7 @@ +FROM python:3.11-slim +WORKDIR /app +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt +COPY . . +EXPOSE 8000 +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/ai-service/main.py b/ai-service/main.py new file mode 100644 index 0000000..44ba560 --- /dev/null +++ b/ai-service/main.py @@ -0,0 +1,117 @@ +import os +import uuid +from pathlib import Path +from typing import List, Optional + +from dotenv import load_dotenv +from fastapi import FastAPI, HTTPException +from fastapi.responses import FileResponse +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel + +from models.recommendation import RecommendationEngine + +load_dotenv() + +app = FastAPI( + title="Faltric AI Service", + description="Energy source recommendation and prediction API", + version="1.0.0", +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["*"], + allow_headers=["*"], +) + +REPORTS_DIR = Path(os.getenv("REPORTS_DIR", "/tmp/faltric_reports")) +REPORTS_DIR.mkdir(parents=True, exist_ok=True) + +engine = RecommendationEngine() + + +class HistoricalEntry(BaseModel): + generation: float + consumption: float + date: Optional[str] = None + + +class WeatherData(BaseModel): + solar_irradiance: float = 500.0 # W/m² + wind_speed: float = 5.0 # m/s + precipitation: float = 0.0 # mm + + +class PredictRequest(BaseModel): + installation_id: str + installation_type: Optional[str] = None + historical_data: List[HistoricalEntry] = [] + weather: WeatherData = WeatherData() + + +class PredictResponse(BaseModel): + recommended_source: str + score: float + estimated_earnings: float + report_url: Optional[str] = None + + +@app.get("/health") +def health(): + return {"status": "ok"} + + +@app.post("/predict", response_model=PredictResponse) +def predict(req: PredictRequest): + try: + historical = [ + {"generation": e.generation, "consumption": e.consumption} + for e in req.historical_data + ] + weather = { + "solar_irradiance": req.weather.solar_irradiance, + "wind_speed": req.weather.wind_speed, + "precipitation": req.weather.precipitation, + } + + result = engine.recommend( + installation_id=req.installation_id, + installation_type=req.installation_type, + historical_data=historical, + weather=weather, + ) + + # Generate PDF report + report_id = str(uuid.uuid4()) + report_path = REPORTS_DIR / f"report_{report_id}.pdf" + engine.generate_report( + path=str(report_path), + installation_id=req.installation_id, + result=result, + weather=weather, + ) + + result["report_url"] = f"/reports/{report_id}" + return result + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.get("/reports/{report_id}") +def get_report(report_id: str): + # Sanitize: only allow UUID-like names + if not all(c in "0123456789abcdef-" for c in report_id.lower()): + raise HTTPException(status_code=400, detail="Invalid report ID") + path = REPORTS_DIR / f"report_{report_id}.pdf" + if not path.exists(): + raise HTTPException(status_code=404, detail="Report not found") + return FileResponse(path, media_type="application/pdf", filename=f"faltric_report_{report_id}.pdf") + + +if __name__ == "__main__": + import uvicorn + port = int(os.getenv("PORT", 8000)) + uvicorn.run("main:app", host="0.0.0.0", port=port, reload=True) diff --git a/ai-service/models/recommendation.py b/ai-service/models/recommendation.py new file mode 100644 index 0000000..66ac3ad --- /dev/null +++ b/ai-service/models/recommendation.py @@ -0,0 +1,248 @@ +""" +Recommendation engine for Faltric AI service. + +Score formula: S = Σ(W_i * V_i) / Σ(W_i) * 100 +where W_i are weather weights and V_i are normalized historical efficiency values. +""" + +import io +from typing import Any, Dict, List, Optional + +import numpy as np + +try: + from sklearn.linear_model import LinearRegression + SKLEARN_AVAILABLE = True +except ImportError: + SKLEARN_AVAILABLE = False + +try: + from reportlab.lib.pagesizes import letter + from reportlab.lib.styles import getSampleStyleSheet + from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle + from reportlab.lib import colors + REPORTLAB_AVAILABLE = True +except ImportError: + REPORTLAB_AVAILABLE = False + +# Weather weights for each source type +WEATHER_WEIGHTS: Dict[str, Dict[str, float]] = { + "solar": {"solar_irradiance": 0.7, "wind_speed": 0.05, "precipitation": -0.25}, + "wind": {"solar_irradiance": 0.05, "wind_speed": 0.8, "precipitation": -0.15}, + "biogas": {"solar_irradiance": 0.1, "wind_speed": 0.1, "precipitation": -0.1}, +} + +# Normalized ranges for weather features +WEATHER_RANGES = { + "solar_irradiance": (0, 1000), # W/m² + "wind_speed": (0, 20), # m/s + "precipitation": (0, 50), # mm +} + +ETH_PER_KWH = 0.0001 # Mock price: 1 kWh = 0.0001 ETH + + +def normalize(value: float, min_val: float, max_val: float) -> float: + """Normalize value to [0, 1] range.""" + if max_val == min_val: + return 0.0 + return max(0.0, min(1.0, (value - min_val) / (max_val - min_val))) + + +class RecommendationEngine: + """ + Recommends the best energy source based on weather conditions + and historical generation/consumption efficiency. + """ + + def _compute_efficiency(self, historical_data: List[Dict]) -> float: + """Mean efficiency: generation / (generation + consumption).""" + if not historical_data: + return 0.5 # Default 50% + efficiencies = [] + for entry in historical_data: + g = float(entry.get("generation", 0)) + c = float(entry.get("consumption", 0)) + total = g + c + if total > 0: + efficiencies.append(g / total) + return float(np.mean(efficiencies)) if efficiencies else 0.5 + + def _regress_trend(self, historical_data: List[Dict]) -> float: + """ + Use linear regression on generation values to predict trend. + Returns slope normalized to [-1, 1]. + """ + if not SKLEARN_AVAILABLE or len(historical_data) < 3: + return 0.0 + gens = [float(e.get("generation", 0)) for e in historical_data] + X = np.arange(len(gens)).reshape(-1, 1) + y = np.array(gens) + model = LinearRegression().fit(X, y) + slope = model.coef_[0] + max_gen = max(gens) if max(gens) > 0 else 1 + return float(np.clip(slope / max_gen, -1, 1)) + + def _score_source( + self, + source: str, + weather: Dict[str, float], + efficiency: float, + trend: float, + ) -> float: + """ + Compute score S = Σ(W_i * V_i) where: + - W_i = weather weight for this source + - V_i = normalized weather feature value + Plus bonuses for efficiency and trend. + """ + weights = WEATHER_WEIGHTS.get(source, WEATHER_WEIGHTS["solar"]) + score = 0.0 + total_weight = sum(abs(w) for w in weights.values()) + + for feature, weight in weights.items(): + val = weather.get(feature, 0.0) + min_v, max_v = WEATHER_RANGES[feature] + norm_val = normalize(val, min_v, max_v) + # Negative weights penalize the score + score += weight * norm_val + + # Normalize to [0, 100] + score = (score / (total_weight or 1)) * 100 + + # Efficiency bonus (up to 15 points) + score += efficiency * 15 + + # Trend bonus (up to 5 points) + score += trend * 5 + + return round(max(0.0, min(100.0, score)), 2) + + def recommend( + self, + installation_id: str, + installation_type: Optional[str], + historical_data: List[Dict], + weather: Dict[str, float], + ) -> Dict[str, Any]: + """ + Main recommendation method. Returns recommended source, score, and estimated earnings. + """ + efficiency = self._compute_efficiency(historical_data) + trend = self._regress_trend(historical_data) + + sources = ["solar", "wind", "biogas"] + scores = { + src: self._score_source(src, weather, efficiency, trend) + for src in sources + } + + # If installation type is known, boost that source + if installation_type in sources: + scores[installation_type] = min(100.0, scores[installation_type] * 1.1) + + best_source = max(scores, key=scores.__getitem__) + best_score = scores[best_source] + + # Estimated surplus kWh based on efficiency and trend + if historical_data: + avg_gen = np.mean([float(e.get("generation", 0)) for e in historical_data]) + avg_con = np.mean([float(e.get("consumption", 0)) for e in historical_data]) + surplus_kwh = max(0.0, float(avg_gen - avg_con)) + else: + surplus_kwh = 0.0 + + estimated_earnings = round(surplus_kwh * ETH_PER_KWH, 6) + + return { + "recommended_source": best_source, + "score": best_score, + "estimated_earnings": estimated_earnings, + "all_scores": scores, + "efficiency": round(efficiency * 100, 2), + } + + def generate_report( + self, + path: str, + installation_id: str, + result: Dict[str, Any], + weather: Dict[str, float], + ) -> None: + """Generate a PDF report for the prediction result.""" + if not REPORTLAB_AVAILABLE: + # Write a placeholder text file if reportlab not available + with open(path.replace(".pdf", ".txt"), "w") as f: + f.write(f"Report for installation {installation_id}\n{result}") + return + + doc = SimpleDocTemplate(path, pagesize=letter) + styles = getSampleStyleSheet() + story = [] + + story.append(Paragraph("⚡ Faltric Energy Recommendation Report", styles["Title"])) + story.append(Spacer(1, 12)) + story.append(Paragraph(f"Installation ID: {installation_id}", styles["Normal"])) + story.append(Spacer(1, 12)) + + story.append(Paragraph("Recommendation Summary", styles["Heading2"])) + summary_data = [ + ["Field", "Value"], + ["Recommended Source", result.get("recommended_source", "—").capitalize()], + ["Score", f"{result.get('score', 0):.2f} / 100"], + ["Estimated Earnings", f"{result.get('estimated_earnings', 0):.6f} ETH"], + ["Historical Efficiency", f"{result.get('efficiency', 0):.1f}%"], + ] + t = Table(summary_data, colWidths=[200, 300]) + t.setStyle(TableStyle([ + ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#16a34a")), + ("TEXTCOLOR", (0, 0), (-1, 0), colors.white), + ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), + ("ROWBACKGROUNDS", (0, 1), (-1, -1), [colors.HexColor("#f0fdf4"), colors.white]), + ("GRID", (0, 0), (-1, -1), 0.5, colors.HexColor("#d1fae5")), + ("PADDING", (0, 0), (-1, -1), 8), + ])) + story.append(t) + story.append(Spacer(1, 16)) + + story.append(Paragraph("Weather Conditions", styles["Heading2"])) + weather_data = [ + ["Metric", "Value"], + ["Solar Irradiance", f"{weather.get('solar_irradiance', 0):.1f} W/m²"], + ["Wind Speed", f"{weather.get('wind_speed', 0):.1f} m/s"], + ["Precipitation", f"{weather.get('precipitation', 0):.1f} mm"], + ] + wt = Table(weather_data, colWidths=[200, 300]) + wt.setStyle(TableStyle([ + ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#1e40af")), + ("TEXTCOLOR", (0, 0), (-1, 0), colors.white), + ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), + ("ROWBACKGROUNDS", (0, 1), (-1, -1), [colors.HexColor("#eff6ff"), colors.white]), + ("GRID", (0, 0), (-1, -1), 0.5, colors.HexColor("#bfdbfe")), + ("PADDING", (0, 0), (-1, -1), 8), + ])) + story.append(wt) + story.append(Spacer(1, 16)) + + # All scores + all_scores = result.get("all_scores", {}) + if all_scores: + story.append(Paragraph("All Source Scores", styles["Heading2"])) + score_data = [["Source", "Score"]] + [ + [src.capitalize(), f"{score:.2f}"] for src, score in all_scores.items() + ] + st = Table(score_data, colWidths=[200, 300]) + st.setStyle(TableStyle([ + ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#374151")), + ("TEXTCOLOR", (0, 0), (-1, 0), colors.white), + ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), + ("ROWBACKGROUNDS", (0, 1), (-1, -1), [colors.HexColor("#f9fafb"), colors.white]), + ("GRID", (0, 0), (-1, -1), 0.5, colors.HexColor("#e5e7eb")), + ("PADDING", (0, 0), (-1, -1), 8), + ])) + story.append(st) + + story.append(Spacer(1, 20)) + story.append(Paragraph("Generated by Faltric AI Service", styles["Normal"])) + + doc.build(story) diff --git a/ai-service/requirements.txt b/ai-service/requirements.txt new file mode 100644 index 0000000..3ab2bf1 --- /dev/null +++ b/ai-service/requirements.txt @@ -0,0 +1,8 @@ +fastapi==0.111.0 +uvicorn[standard]==0.30.1 +pydantic==2.7.1 +scikit-learn==1.5.0 +numpy==1.26.4 +httpx==0.27.0 +reportlab==4.2.0 +python-dotenv==1.0.1 diff --git a/backend/.env.example b/backend/.env.example new file mode 100644 index 0000000..761641f --- /dev/null +++ b/backend/.env.example @@ -0,0 +1,25 @@ +# Backend Environment Variables +NODE_ENV=development +PORT=4000 + +# MongoDB +MONGODB_URI=mongodb://localhost:27017/faltric + +# Redis +REDIS_URL=redis://localhost:6379 + +# JWT +JWT_SECRET=your_super_secret_jwt_key_change_in_production +JWT_EXPIRES_IN=7d + +# Blockchain +DEPLOYER_PRIVATE_KEY=0x0000000000000000000000000000000000000000000000000000000000000001 +SEPOLIA_RPC_URL=https://rpc.sepolia.org +ENERGY_TOKEN_ADDRESS=0x0000000000000000000000000000000000000000 +TRADE_ENGINE_ADDRESS=0x0000000000000000000000000000000000000000 + +# AI Service +AI_SERVICE_URL=http://localhost:8000 + +# Etherscan (optional, for verification) +ETHERSCAN_API_KEY= diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..975bed1 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,7 @@ +FROM node:20-alpine +WORKDIR /app +COPY package*.json ./ +RUN npm ci --omit=dev +COPY . . +EXPOSE 4000 +CMD ["node", "src/index.js"] diff --git a/backend/package.json b/backend/package.json new file mode 100644 index 0000000..a57e01e --- /dev/null +++ b/backend/package.json @@ -0,0 +1,31 @@ +{ + "name": "faltric-backend", + "version": "1.0.0", + "description": "Faltric backend API server", + "main": "src/index.js", + "scripts": { + "start": "node src/index.js", + "dev": "nodemon src/index.js", + "test": "jest --detectOpenHandles" + }, + "dependencies": { + "bcryptjs": "^2.4.3", + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "ethers": "^6.11.0", + "express": "^4.19.2", + "express-rate-limit": "^7.3.1", + "express-validator": "^7.1.0", + "ioredis": "^5.3.2", + "jsonwebtoken": "^9.0.2", + "mongoose": "^8.4.0", + "morgan": "^1.10.0", + "socket.io": "^4.7.5", + "uuid": "^10.0.0" + }, + "devDependencies": { + "nodemon": "^3.1.3", + "jest": "^29.7.0", + "supertest": "^7.0.0" + } +} diff --git a/backend/src/config/db.js b/backend/src/config/db.js new file mode 100644 index 0000000..9f2bfd7 --- /dev/null +++ b/backend/src/config/db.js @@ -0,0 +1,40 @@ +const mongoose = require("mongoose"); +const Redis = require("ioredis"); + +let redisClient; + +async function connectDB() { + const uri = process.env.MONGODB_URI || "mongodb://localhost:27017/faltric"; + try { + await mongoose.connect(uri); + console.log("MongoDB connected:", uri); + } catch (err) { + console.error("MongoDB connection error:", err.message); + process.exit(1); + } +} + +async function connectRedis() { + const url = process.env.REDIS_URL || "redis://localhost:6379"; + redisClient = new Redis(url, { + maxRetriesPerRequest: 3, + lazyConnect: true, + }); + + redisClient.on("error", (err) => { + console.error("Redis error:", err.message); + }); + + try { + await redisClient.connect(); + console.log("Redis connected:", url); + } catch (err) { + console.warn("Redis connection failed (continuing without cache):", err.message); + } +} + +function getRedis() { + return redisClient; +} + +module.exports = { connectDB, connectRedis, getRedis }; diff --git a/backend/src/index.js b/backend/src/index.js new file mode 100644 index 0000000..b094b13 --- /dev/null +++ b/backend/src/index.js @@ -0,0 +1,130 @@ +require("dotenv").config(); +const express = require("express"); +const http = require("http"); +const cors = require("cors"); +const morgan = require("morgan"); +const { Server } = require("socket.io"); +const rateLimit = require("express-rate-limit"); + +const { connectDB, connectRedis } = require("./config/db"); +const authRoutes = require("./routes/auth"); +const installationsRoutes = require("./routes/installations"); +const meterRoutes = require("./routes/meter"); +const predictRoutes = require("./routes/predict"); +const { verifyToken } = require("./middleware/auth"); +const { startMeterPoller } = require("./services/meterPoller"); +const ChatMessage = require("./models/ChatMessage"); + +const app = express(); +const server = http.createServer(app); + +const io = new Server(server, { + cors: { + origin: process.env.FRONTEND_URL || "*", + methods: ["GET", "POST"], + }, +}); + +// Middleware +app.use(cors({ origin: process.env.FRONTEND_URL || "*" })); +app.use(express.json()); +app.use(morgan(process.env.NODE_ENV === "production" ? "combined" : "dev")); + +// Rate limiting +const apiLimiter = rateLimit({ + windowMs: 15 * 60 * 1000, + max: 100, + standardHeaders: true, + legacyHeaders: false, +}); +app.use("/api/", apiLimiter); + +// Routes +app.use("/api/auth", authRoutes); +app.use("/api/installations", installationsRoutes); +app.use("/api/meter", meterRoutes); +app.use("/api/predict", predictRoutes); + +// Health check +app.get("/health", (req, res) => res.json({ status: "ok", timestamp: new Date().toISOString() })); + +// 404 handler +app.use((req, res) => res.status(404).json({ error: "Not found" })); + +// Error handler +app.use((err, req, res, next) => { + console.error(err.stack); + res.status(err.status || 500).json({ error: err.message || "Internal server error" }); +}); + +// Socket.io - Real-time chat +io.use((socket, next) => { + const token = socket.handshake.auth?.token; + if (!token) return next(new Error("Authentication required")); + try { + const decoded = verifyToken(token); + socket.user = decoded; + next(); + } catch (err) { + next(new Error("Invalid token")); + } +}); + +io.on("connection", (socket) => { + console.log(`Socket connected: ${socket.id} (${socket.user?.walletAddress || socket.user?.email})`); + + socket.on("join_room", (room) => { + socket.join(room); + socket.emit("joined", { room }); + }); + + socket.on("send_message", async (data) => { + try { + const { room, message } = data; + const sender = socket.user?.walletAddress || socket.user?.email; + const chatMsg = await ChatMessage.create({ sender, message, room }); + io.to(room).emit("new_message", { + id: chatMsg._id, + sender: chatMsg.sender, + message: chatMsg.message, + timestamp: chatMsg.timestamp, + }); + } catch (err) { + socket.emit("error", { message: "Failed to save message" }); + } + }); + + socket.on("get_history", async ({ room, limit = 50 }) => { + try { + const messages = await ChatMessage.find({ room }) + .sort({ timestamp: -1 }) + .limit(limit) + .lean(); + socket.emit("message_history", messages.reverse()); + } catch (err) { + socket.emit("error", { message: "Failed to fetch history" }); + } + }); + + socket.on("disconnect", () => { + console.log(`Socket disconnected: ${socket.id}`); + }); +}); + +const PORT = process.env.PORT || 4000; + +async function start() { + await connectDB(); + await connectRedis(); + server.listen(PORT, () => { + console.log(`Faltric backend running on port ${PORT}`); + startMeterPoller(); + }); +} + +start().catch((err) => { + console.error("Failed to start server:", err); + process.exit(1); +}); + +module.exports = { app, server }; diff --git a/backend/src/middleware/auth.js b/backend/src/middleware/auth.js new file mode 100644 index 0000000..f2575ab --- /dev/null +++ b/backend/src/middleware/auth.js @@ -0,0 +1,35 @@ +const jwt = require("jsonwebtoken"); + +const JWT_SECRET = process.env.JWT_SECRET || "fallback_secret_do_not_use_in_production"; +const JWT_EXPIRES_IN = process.env.JWT_EXPIRES_IN || "7d"; + +function signToken(payload) { + return jwt.sign(payload, JWT_SECRET, { expiresIn: JWT_EXPIRES_IN }); +} + +function verifyToken(token) { + return jwt.verify(token, JWT_SECRET); +} + +function authenticate(req, res, next) { + const authHeader = req.headers.authorization; + if (!authHeader || !authHeader.startsWith("Bearer ")) { + return res.status(401).json({ error: "No token provided" }); + } + const token = authHeader.slice(7); + try { + req.user = verifyToken(token); + next(); + } catch (err) { + return res.status(401).json({ error: "Invalid or expired token" }); + } +} + +function requireAdmin(req, res, next) { + if (!req.user || req.user.role !== "admin") { + return res.status(403).json({ error: "Admin access required" }); + } + next(); +} + +module.exports = { signToken, verifyToken, authenticate, requireAdmin }; diff --git a/backend/src/models/ChatMessage.js b/backend/src/models/ChatMessage.js new file mode 100644 index 0000000..16cede0 --- /dev/null +++ b/backend/src/models/ChatMessage.js @@ -0,0 +1,28 @@ +const mongoose = require("mongoose"); + +const chatMessageSchema = new mongoose.Schema( + { + sender: { + type: String, // walletAddress or email + required: true, + trim: true, + }, + message: { + type: String, + required: true, + trim: true, + maxlength: 2000, + }, + room: { + type: String, + required: true, + default: "general", + index: true, + }, + }, + { + timestamps: { createdAt: "timestamp", updatedAt: false }, + } +); + +module.exports = mongoose.model("ChatMessage", chatMessageSchema); diff --git a/backend/src/models/Installation.js b/backend/src/models/Installation.js new file mode 100644 index 0000000..a2cd213 --- /dev/null +++ b/backend/src/models/Installation.js @@ -0,0 +1,65 @@ +const mongoose = require("mongoose"); + +const installationSchema = new mongoose.Schema( + { + name: { + type: String, + required: true, + trim: true, + }, + type: { + type: String, + enum: ["solar", "wind", "biogas"], + required: true, + }, + coordinates: { + type: { + type: String, + enum: ["Point"], + default: "Point", + }, + coordinates: { + type: [Number], // [longitude, latitude] + required: true, + }, + }, + owner: { + type: mongoose.Schema.Types.ObjectId, + ref: "User", + required: true, + }, + walletAddress: { + type: String, + required: true, + lowercase: true, + trim: true, + }, + capacityKw: { + type: Number, + required: true, + min: 0, + }, + verified: { + type: Boolean, + default: false, + }, + status: { + type: String, + enum: ["pending", "approved", "rejected"], + default: "pending", + }, + rejectionReason: { + type: String, + default: null, + }, + polygon: { + type: mongoose.Schema.Types.Mixed, // GeoJSON polygon from admin draw + default: null, + }, + }, + { timestamps: true } +); + +installationSchema.index({ coordinates: "2dsphere" }); + +module.exports = mongoose.model("Installation", installationSchema); diff --git a/backend/src/models/MeterReading.js b/backend/src/models/MeterReading.js new file mode 100644 index 0000000..1d91841 --- /dev/null +++ b/backend/src/models/MeterReading.js @@ -0,0 +1,43 @@ +const mongoose = require("mongoose"); + +const meterReadingSchema = new mongoose.Schema( + { + installationId: { + type: mongoose.Schema.Types.ObjectId, + ref: "Installation", + required: true, + index: true, + }, + generation: { + type: Number, // kWh generated + required: true, + min: 0, + }, + consumption: { + type: Number, // kWh consumed + required: true, + min: 0, + }, + surplus: { + type: Number, // max(0, generation - consumption) + required: true, + min: 0, + }, + tokensMinted: { + type: Number, + default: 0, + }, + txHash: { + type: String, + default: null, + }, + source: { + type: String, + enum: ["poller", "manual", "api"], + default: "poller", + }, + }, + { timestamps: true } +); + +module.exports = mongoose.model("MeterReading", meterReadingSchema); diff --git a/backend/src/models/User.js b/backend/src/models/User.js new file mode 100644 index 0000000..7ec63fd --- /dev/null +++ b/backend/src/models/User.js @@ -0,0 +1,53 @@ +const mongoose = require("mongoose"); +const bcrypt = require("bcryptjs"); + +const userSchema = new mongoose.Schema( + { + email: { + type: String, + required: true, + unique: true, + lowercase: true, + trim: true, + }, + passwordHash: { + type: String, + required: true, + }, + walletAddress: { + type: String, + unique: true, + sparse: true, + lowercase: true, + trim: true, + }, + role: { + type: String, + enum: ["user", "admin"], + default: "user", + }, + isActive: { + type: Boolean, + default: true, + }, + }, + { timestamps: true } +); + +userSchema.pre("save", async function (next) { + if (!this.isModified("passwordHash")) return next(); + this.passwordHash = await bcrypt.hash(this.passwordHash, 12); + next(); +}); + +userSchema.methods.comparePassword = function (password) { + return bcrypt.compare(password, this.passwordHash); +}; + +userSchema.methods.toSafeObject = function () { + const obj = this.toObject(); + delete obj.passwordHash; + return obj; +}; + +module.exports = mongoose.model("User", userSchema); diff --git a/backend/src/routes/auth.js b/backend/src/routes/auth.js new file mode 100644 index 0000000..6a78705 --- /dev/null +++ b/backend/src/routes/auth.js @@ -0,0 +1,119 @@ +const express = require("express"); +const { body, validationResult } = require("express-validator"); +const User = require("../models/User"); +const { signToken, authenticate } = require("../middleware/auth"); + +const router = express.Router(); + +// POST /api/auth/register +router.post( + "/register", + [ + body("email").isEmail().normalizeEmail(), + body("password").isLength({ min: 8 }).withMessage("Password must be at least 8 characters"), + ], + async (req, res) => { + const errors = validationResult(req); + if (!errors.isEmpty()) return res.status(400).json({ errors: errors.array() }); + + try { + const { email, password } = req.body; + const existing = await User.findOne({ email }); + if (existing) return res.status(409).json({ error: "Email already registered" }); + + const user = await User.create({ email, passwordHash: password }); + const token = signToken({ id: user._id, email: user.email, role: user.role }); + + res.status(201).json({ token, user: user.toSafeObject() }); + } catch (err) { + console.error("Register error:", err); + res.status(500).json({ error: "Registration failed" }); + } + } +); + +// POST /api/auth/login +router.post( + "/login", + [ + body("email").isEmail().normalizeEmail(), + body("password").notEmpty(), + ], + async (req, res) => { + const errors = validationResult(req); + if (!errors.isEmpty()) return res.status(400).json({ errors: errors.array() }); + + try { + const { email, password } = req.body; + const user = await User.findOne({ email }); + if (!user) return res.status(401).json({ error: "Invalid credentials" }); + + const isValid = await user.comparePassword(password); + if (!isValid) return res.status(401).json({ error: "Invalid credentials" }); + + const token = signToken({ + id: user._id, + email: user.email, + role: user.role, + walletAddress: user.walletAddress, + }); + + res.json({ token, user: user.toSafeObject() }); + } catch (err) { + console.error("Login error:", err); + res.status(500).json({ error: "Login failed" }); + } + } +); + +// POST /api/auth/link-wallet +router.post( + "/link-wallet", + authenticate, + [body("walletAddress").matches(/^0x[a-fA-F0-9]{40}$/).withMessage("Invalid wallet address")], + async (req, res) => { + const errors = validationResult(req); + if (!errors.isEmpty()) return res.status(400).json({ errors: errors.array() }); + + try { + const { walletAddress } = req.body; + const normalized = walletAddress.toLowerCase(); + + const existing = await User.findOne({ walletAddress: normalized }); + if (existing && existing._id.toString() !== req.user.id) { + return res.status(409).json({ error: "Wallet already linked to another account" }); + } + + const user = await User.findByIdAndUpdate( + req.user.id, + { walletAddress: normalized }, + { new: true } + ); + + const token = signToken({ + id: user._id, + email: user.email, + role: user.role, + walletAddress: user.walletAddress, + }); + + res.json({ token, user: user.toSafeObject() }); + } catch (err) { + console.error("Link wallet error:", err); + res.status(500).json({ error: "Failed to link wallet" }); + } + } +); + +// GET /api/auth/me +router.get("/me", authenticate, async (req, res) => { + try { + const user = await User.findById(req.user.id).select("-passwordHash"); + if (!user) return res.status(404).json({ error: "User not found" }); + res.json({ user }); + } catch (err) { + res.status(500).json({ error: "Failed to fetch user" }); + } +}); + +module.exports = router; diff --git a/backend/src/routes/installations.js b/backend/src/routes/installations.js new file mode 100644 index 0000000..bbdbd9e --- /dev/null +++ b/backend/src/routes/installations.js @@ -0,0 +1,173 @@ +const express = require("express"); +const { body, param, validationResult } = require("express-validator"); +const Installation = require("../models/Installation"); +const { authenticate, requireAdmin } = require("../middleware/auth"); + +const router = express.Router(); + +// GET /api/installations - list all verified installations (public map data) +router.get("/", async (req, res) => { + try { + const { type, verified } = req.query; + const filter = {}; + if (type) filter.type = type; + if (verified !== undefined) filter.verified = verified === "true"; + + const installations = await Installation.find(filter) + .populate("owner", "email walletAddress") + .lean(); + + // Anonymize owner details + const sanitized = installations.map((inst) => ({ + ...inst, + owner: inst.owner?.walletAddress + ? `${inst.owner.walletAddress.slice(0, 6)}...${inst.owner.walletAddress.slice(-4)}` + : "Unknown", + })); + + res.json({ installations: sanitized }); + } catch (err) { + console.error("List installations error:", err); + res.status(500).json({ error: "Failed to fetch installations" }); + } +}); + +// GET /api/installations/my - list user's own installations +router.get("/my", authenticate, async (req, res) => { + try { + const installations = await Installation.find({ owner: req.user.id }).lean(); + res.json({ installations }); + } catch (err) { + res.status(500).json({ error: "Failed to fetch installations" }); + } +}); + +// GET /api/installations/:id +router.get("/:id", async (req, res) => { + try { + const installation = await Installation.findById(req.params.id) + .populate("owner", "walletAddress") + .lean(); + if (!installation) return res.status(404).json({ error: "Installation not found" }); + res.json({ installation }); + } catch (err) { + res.status(500).json({ error: "Failed to fetch installation" }); + } +}); + +// POST /api/installations - create installation +router.post( + "/", + authenticate, + [ + body("name").notEmpty().trim(), + body("type").isIn(["solar", "wind", "biogas"]), + body("coordinates").isArray({ min: 2, max: 2 }), + body("capacityKw").isFloat({ min: 0 }), + ], + async (req, res) => { + const errors = validationResult(req); + if (!errors.isEmpty()) return res.status(400).json({ errors: errors.array() }); + + try { + const { name, type, coordinates, capacityKw, polygon } = req.body; + const walletAddress = req.user.walletAddress; + if (!walletAddress) { + return res.status(400).json({ error: "Wallet not linked. Please link your wallet first." }); + } + + const installation = await Installation.create({ + name, + type, + coordinates: { type: "Point", coordinates }, + owner: req.user.id, + walletAddress, + capacityKw, + polygon: polygon || null, + }); + + res.status(201).json({ installation }); + } catch (err) { + console.error("Create installation error:", err); + res.status(500).json({ error: "Failed to create installation" }); + } + } +); + +// PATCH /api/installations/:id - update own installation +router.patch("/:id", authenticate, async (req, res) => { + try { + const installation = await Installation.findOne({ _id: req.params.id, owner: req.user.id }); + if (!installation) return res.status(404).json({ error: "Installation not found" }); + + const allowedFields = ["name", "capacityKw", "polygon"]; + allowedFields.forEach((field) => { + if (req.body[field] !== undefined) installation[field] = req.body[field]; + }); + + await installation.save(); + res.json({ installation }); + } catch (err) { + res.status(500).json({ error: "Failed to update installation" }); + } +}); + +// DELETE /api/installations/:id +router.delete("/:id", authenticate, async (req, res) => { + try { + const filter = { _id: req.params.id }; + if (req.user.role !== "admin") filter.owner = req.user.id; + + const installation = await Installation.findOneAndDelete(filter); + if (!installation) return res.status(404).json({ error: "Installation not found" }); + + res.json({ message: "Installation deleted" }); + } catch (err) { + res.status(500).json({ error: "Failed to delete installation" }); + } +}); + +// PATCH /api/installations/:id/approve - Admin: approve +router.patch("/:id/approve", authenticate, requireAdmin, async (req, res) => { + try { + const installation = await Installation.findByIdAndUpdate( + req.params.id, + { status: "approved", verified: true, rejectionReason: null }, + { new: true } + ); + if (!installation) return res.status(404).json({ error: "Installation not found" }); + res.json({ installation }); + } catch (err) { + res.status(500).json({ error: "Failed to approve installation" }); + } +}); + +// PATCH /api/installations/:id/reject - Admin: reject +router.patch("/:id/reject", authenticate, requireAdmin, async (req, res) => { + try { + const { reason } = req.body; + const installation = await Installation.findByIdAndUpdate( + req.params.id, + { status: "rejected", verified: false, rejectionReason: reason || "No reason provided" }, + { new: true } + ); + if (!installation) return res.status(404).json({ error: "Installation not found" }); + res.json({ installation }); + } catch (err) { + res.status(500).json({ error: "Failed to reject installation" }); + } +}); + +// GET /api/installations/admin/pending - Admin: list pending +router.get("/admin/pending", authenticate, requireAdmin, async (req, res) => { + try { + const installations = await Installation.find({ status: "pending" }) + .populate("owner", "email walletAddress") + .lean(); + res.json({ installations }); + } catch (err) { + res.status(500).json({ error: "Failed to fetch pending installations" }); + } +}); + +module.exports = router; diff --git a/backend/src/routes/meter.js b/backend/src/routes/meter.js new file mode 100644 index 0000000..116391f --- /dev/null +++ b/backend/src/routes/meter.js @@ -0,0 +1,114 @@ +const express = require("express"); +const { body, validationResult } = require("express-validator"); +const MeterReading = require("../models/MeterReading"); +const Installation = require("../models/Installation"); +const { authenticate } = require("../middleware/auth"); + +const router = express.Router(); + +// GET /api/meter/readings - list readings for user's installations +router.get("/readings", authenticate, async (req, res) => { + try { + const { installationId, limit = 100, page = 1 } = req.query; + + // Verify the installation belongs to the user (or user is admin) + const filter = {}; + if (installationId) { + if (req.user.role !== "admin") { + const inst = await Installation.findOne({ _id: installationId, owner: req.user.id }); + if (!inst) return res.status(403).json({ error: "Access denied" }); + } + filter.installationId = installationId; + } else if (req.user.role !== "admin") { + const userInstallations = await Installation.find({ owner: req.user.id }).select("_id"); + filter.installationId = { $in: userInstallations.map((i) => i._id) }; + } + + const skip = (parseInt(page) - 1) * parseInt(limit); + const readings = await MeterReading.find(filter) + .sort({ createdAt: -1 }) + .skip(skip) + .limit(parseInt(limit)) + .lean(); + + const total = await MeterReading.countDocuments(filter); + + res.json({ readings, total, page: parseInt(page), limit: parseInt(limit) }); + } catch (err) { + console.error("Meter readings error:", err); + res.status(500).json({ error: "Failed to fetch readings" }); + } +}); + +// POST /api/meter/sync - manually sync a meter reading +router.post( + "/sync", + authenticate, + [ + body("installationId").notEmpty(), + body("generation").isFloat({ min: 0 }), + body("consumption").isFloat({ min: 0 }), + ], + async (req, res) => { + const errors = validationResult(req); + if (!errors.isEmpty()) return res.status(400).json({ errors: errors.array() }); + + try { + const { installationId, generation, consumption } = req.body; + + const installation = await Installation.findOne({ + _id: installationId, + owner: req.user.id, + verified: true, + }); + if (!installation) return res.status(404).json({ error: "Verified installation not found" }); + + const surplus = Math.max(0, generation - consumption); + + const reading = await MeterReading.create({ + installationId, + generation, + consumption, + surplus, + source: "manual", + }); + + res.status(201).json({ reading }); + } catch (err) { + console.error("Meter sync error:", err); + res.status(500).json({ error: "Failed to sync reading" }); + } + } +); + +// GET /api/meter/stats/:installationId - aggregated stats +router.get("/stats/:installationId", authenticate, async (req, res) => { + try { + const { installationId } = req.params; + + if (req.user.role !== "admin") { + const inst = await Installation.findOne({ _id: installationId, owner: req.user.id }); + if (!inst) return res.status(403).json({ error: "Access denied" }); + } + + const stats = await MeterReading.aggregate([ + { $match: { installationId: new (require("mongoose").Types.ObjectId)(installationId) } }, + { + $group: { + _id: null, + totalGeneration: { $sum: "$generation" }, + totalConsumption: { $sum: "$consumption" }, + totalSurplus: { $sum: "$surplus" }, + totalTokensMinted: { $sum: "$tokensMinted" }, + readingCount: { $sum: 1 }, + }, + }, + ]); + + res.json({ stats: stats[0] || {} }); + } catch (err) { + res.status(500).json({ error: "Failed to fetch stats" }); + } +}); + +module.exports = router; diff --git a/backend/src/routes/predict.js b/backend/src/routes/predict.js new file mode 100644 index 0000000..012458f --- /dev/null +++ b/backend/src/routes/predict.js @@ -0,0 +1,68 @@ +const express = require("express"); +const { authenticate } = require("../middleware/auth"); +const Installation = require("../models/Installation"); +const MeterReading = require("../models/MeterReading"); + +const router = express.Router(); + +const AI_SERVICE_URL = process.env.AI_SERVICE_URL || "http://localhost:8000"; + +// GET /api/predict/:installationId +router.get("/:installationId", authenticate, async (req, res) => { + try { + const { installationId } = req.params; + + // Verify access + const filter = { _id: installationId }; + if (req.user.role !== "admin") filter.owner = req.user.id; + + const installation = await Installation.findOne(filter).lean(); + if (!installation) return res.status(404).json({ error: "Installation not found" }); + + // Gather historical readings (last 30) + const readings = await MeterReading.find({ installationId }) + .sort({ createdAt: -1 }) + .limit(30) + .lean(); + + const historical_data = readings.map((r) => ({ + generation: r.generation, + consumption: r.consumption, + date: r.createdAt, + })); + + // Default weather payload (in real app, fetch from weather API) + const weather = { + solar_irradiance: 500, + wind_speed: 5.0, + precipitation: 0.0, + }; + + const payload = { + installation_id: installationId, + installation_type: installation.type, + historical_data, + weather, + }; + + const aiRes = await fetch(`${AI_SERVICE_URL}/predict`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload), + }); + + if (!aiRes.ok) { + const errText = await aiRes.text(); + console.error("AI service error:", errText); + return res.status(502).json({ error: "AI service unavailable" }); + } + + const prediction = await aiRes.json(); + res.json({ prediction, installation: { id: installationId, type: installation.type } }); + } catch (err) { + console.error("Predict error:", err); + res.status(500).json({ error: "Failed to get prediction" }); + } +}); + +module.exports = router; diff --git a/backend/src/services/meterPoller.js b/backend/src/services/meterPoller.js new file mode 100644 index 0000000..917d2b4 --- /dev/null +++ b/backend/src/services/meterPoller.js @@ -0,0 +1,95 @@ +const { ethers } = require("ethers"); +const Installation = require("../models/Installation"); +const MeterReading = require("../models/MeterReading"); + +const POLL_INTERVAL_MS = 15 * 60 * 1000; // 15 minutes + +// Minimal ERC-20 mint ABI +const ENERGY_TOKEN_ABI = [ + "function mint(address to, uint256 amount) external", + "function balanceOf(address account) view returns (uint256)", +]; + +function getMockMeterData(installation) { + // Mock smart meter data - in production, call real meter API + const baseGeneration = installation.capacityKw * (0.3 + Math.random() * 0.6); + const baseConsumption = installation.capacityKw * (0.1 + Math.random() * 0.3); + return { + generation: parseFloat(baseGeneration.toFixed(4)), + consumption: parseFloat(baseConsumption.toFixed(4)), + }; +} + +async function pollAndMint() { + try { + const installations = await Installation.find({ verified: true }).lean(); + if (!installations.length) return; + + const rpcUrl = process.env.SEPOLIA_RPC_URL || "http://127.0.0.1:8545"; + const privateKey = process.env.DEPLOYER_PRIVATE_KEY; + const tokenAddress = process.env.ENERGY_TOKEN_ADDRESS; + + if (!privateKey || !tokenAddress || tokenAddress === "0x0000000000000000000000000000000000000000") { + console.warn("MeterPoller: Blockchain config missing, skipping token minting."); + // Still record readings without minting + for (const installation of installations) { + const { generation, consumption } = getMockMeterData(installation); + const surplus = Math.max(0, generation - consumption); + await MeterReading.create({ + installationId: installation._id, + generation, + consumption, + surplus, + source: "poller", + }); + } + return; + } + + const provider = new ethers.JsonRpcProvider(rpcUrl); + const wallet = new ethers.Wallet(privateKey, provider); + const token = new ethers.Contract(tokenAddress, ENERGY_TOKEN_ABI, wallet); + + for (const installation of installations) { + try { + const { generation, consumption } = getMockMeterData(installation); + const surplus = Math.max(0, generation - consumption); + + let txHash = null; + let tokensMinted = 0; + + if (surplus > 0) { + // T = surplus kWh → mint T tokens (18 decimals) + const amount = ethers.parseEther(surplus.toFixed(6)); + const tx = await token.mint(installation.walletAddress, amount); + await tx.wait(); + txHash = tx.hash; + tokensMinted = surplus; + console.log(`Minted ${surplus} ENRG for ${installation.walletAddress} (tx: ${txHash})`); + } + + await MeterReading.create({ + installationId: installation._id, + generation, + consumption, + surplus, + tokensMinted, + txHash, + source: "poller", + }); + } catch (instErr) { + console.error(`MeterPoller: Error for installation ${installation._id}:`, instErr.message); + } + } + } catch (err) { + console.error("MeterPoller: Poll cycle error:", err.message); + } +} + +function startMeterPoller() { + console.log(`MeterPoller started (interval: ${POLL_INTERVAL_MS / 60000} min)`); + pollAndMint(); // Run immediately on startup + setInterval(pollAndMint, POLL_INTERVAL_MS); +} + +module.exports = { startMeterPoller, pollAndMint }; diff --git a/blockchain/contracts/EnergyToken.sol b/blockchain/contracts/EnergyToken.sol new file mode 100644 index 0000000..7ce6060 --- /dev/null +++ b/blockchain/contracts/EnergyToken.sol @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; +import "@openzeppelin/contracts/access/AccessControl.sol"; + +/** + * @title FaltricEnergyToken + * @dev ERC-20 token where 1 token = 1 kWh of energy. + * Only accounts with MINTER_ROLE can mint new tokens. + */ +contract EnergyToken is ERC20, AccessControl { + bytes32 public constant MINTER_ROLE = keccak256("MINTER_ROLE"); + + constructor(address defaultAdmin) ERC20("FaltricEnergyToken", "ENRG") { + _grantRole(DEFAULT_ADMIN_ROLE, defaultAdmin); + _grantRole(MINTER_ROLE, defaultAdmin); + } + + /** + * @dev Mint tokens to an address. Restricted to MINTER_ROLE. + * @param to Recipient address + * @param amount Amount in token units (18 decimals) + */ + function mint(address to, uint256 amount) external onlyRole(MINTER_ROLE) { + _mint(to, amount); + } + + /** + * @dev Burn tokens from the caller's balance. + */ + function burn(uint256 amount) external { + _burn(msg.sender, amount); + } +} diff --git a/blockchain/contracts/TradeEngine.sol b/blockchain/contracts/TradeEngine.sol new file mode 100644 index 0000000..dd711e9 --- /dev/null +++ b/blockchain/contracts/TradeEngine.sol @@ -0,0 +1,115 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import "@openzeppelin/contracts/access/Ownable.sol"; +import "@openzeppelin/contracts/utils/ReentrancyGuard.sol"; + +/** + * @title TradeEngine + * @dev Escrow contract for P2P energy token trading. + * Seller locks ENRG tokens, buyer locks ETH. + * Owner arbitrates via releaseTrade / cancelTrade. + */ +contract TradeEngine is Ownable, ReentrancyGuard { + IERC20 public immutable energyToken; + + enum TradeStatus { Open, Released, Cancelled } + + struct Trade { + address seller; + address buyer; + uint256 tokenAmount; + uint256 ethAmount; + TradeStatus status; + } + + uint256 public nextTradeId; + mapping(uint256 => Trade) public trades; + + event TradeCreated(uint256 indexed tradeId, address indexed seller, address indexed buyer, uint256 tokenAmount, uint256 ethAmount); + event TradeReleased(uint256 indexed tradeId); + event TradeCancelled(uint256 indexed tradeId); + + constructor(address tokenAddress, address initialOwner) Ownable(initialOwner) { + require(tokenAddress != address(0), "Invalid token address"); + energyToken = IERC20(tokenAddress); + } + + /** + * @dev Seller initiates trade by locking tokens. Buyer locks ETH simultaneously. + * @param buyer Address of the buyer + * @param tokenAmount Amount of ENRG tokens to trade + */ + function lockTrade(address buyer, uint256 tokenAmount) external payable nonReentrant returns (uint256 tradeId) { + require(buyer != address(0) && buyer != msg.sender, "Invalid buyer"); + require(tokenAmount > 0, "Token amount must be > 0"); + require(msg.value > 0, "ETH amount must be > 0"); + + // Transfer tokens from seller to this contract + bool transferred = energyToken.transferFrom(msg.sender, address(this), tokenAmount); + require(transferred, "Token transfer failed"); + + tradeId = nextTradeId++; + trades[tradeId] = Trade({ + seller: msg.sender, + buyer: buyer, + tokenAmount: tokenAmount, + ethAmount: msg.value, + status: TradeStatus.Open + }); + + emit TradeCreated(tradeId, msg.sender, buyer, tokenAmount, msg.value); + } + + /** + * @dev Release trade: sends tokens to buyer, ETH to seller. + * Only callable by owner (backend arbitrator) or seller/buyer by agreement. + */ + function releaseTrade(uint256 tradeId) external nonReentrant { + Trade storage trade = trades[tradeId]; + require(trade.status == TradeStatus.Open, "Trade not open"); + require( + msg.sender == owner() || msg.sender == trade.seller || msg.sender == trade.buyer, + "Not authorized" + ); + + trade.status = TradeStatus.Released; + + energyToken.transfer(trade.buyer, trade.tokenAmount); + + (bool sent, ) = trade.seller.call{value: trade.ethAmount}(""); + require(sent, "ETH transfer failed"); + + emit TradeReleased(tradeId); + } + + /** + * @dev Cancel trade: refunds tokens to seller, ETH to buyer. + * Only callable by owner or the original parties. + */ + function cancelTrade(uint256 tradeId) external nonReentrant { + Trade storage trade = trades[tradeId]; + require(trade.status == TradeStatus.Open, "Trade not open"); + require( + msg.sender == owner() || msg.sender == trade.seller || msg.sender == trade.buyer, + "Not authorized" + ); + + trade.status = TradeStatus.Cancelled; + + energyToken.transfer(trade.seller, trade.tokenAmount); + + (bool sent, ) = trade.buyer.call{value: trade.ethAmount}(""); + require(sent, "ETH refund failed"); + + emit TradeCancelled(tradeId); + } + + /** + * @dev Get trade details. + */ + function getTrade(uint256 tradeId) external view returns (Trade memory) { + return trades[tradeId]; + } +} diff --git a/blockchain/hardhat.config.js b/blockchain/hardhat.config.js new file mode 100644 index 0000000..d58ee2a --- /dev/null +++ b/blockchain/hardhat.config.js @@ -0,0 +1,29 @@ +require("@nomicfoundation/hardhat-toolbox"); +require("dotenv").config({ path: "../.env" }); + +const PRIVATE_KEY = process.env.DEPLOYER_PRIVATE_KEY || "0x" + "0".repeat(64); +const SEPOLIA_RPC = process.env.SEPOLIA_RPC_URL || "https://rpc.sepolia.org"; +const ETHERSCAN_KEY = process.env.ETHERSCAN_API_KEY || ""; + +/** @type import('hardhat/config').HardhatUserConfig */ +module.exports = { + solidity: { + version: "0.8.24", + settings: { + optimizer: { enabled: true, runs: 200 }, + }, + }, + networks: { + localhost: { + url: "http://127.0.0.1:8545", + }, + sepolia: { + url: SEPOLIA_RPC, + accounts: [PRIVATE_KEY], + chainId: 11155111, + }, + }, + etherscan: { + apiKey: ETHERSCAN_KEY, + }, +}; diff --git a/blockchain/package.json b/blockchain/package.json new file mode 100644 index 0000000..e053495 --- /dev/null +++ b/blockchain/package.json @@ -0,0 +1,19 @@ +{ + "name": "faltric-blockchain", + "version": "1.0.0", + "description": "Faltric smart contracts for energy token trading", + "scripts": { + "compile": "hardhat compile", + "test": "hardhat test", + "deploy:local": "hardhat run scripts/deploy.js --network localhost", + "deploy:sepolia": "hardhat run scripts/deploy.js --network sepolia", + "node": "hardhat node" + }, + "devDependencies": { + "@nomicfoundation/hardhat-toolbox": "^4.0.0", + "hardhat": "^2.22.0" + }, + "dependencies": { + "@openzeppelin/contracts": "^5.0.0" + } +} diff --git a/blockchain/scripts/deploy.js b/blockchain/scripts/deploy.js new file mode 100644 index 0000000..8643958 --- /dev/null +++ b/blockchain/scripts/deploy.js @@ -0,0 +1,37 @@ +const { ethers } = require("hardhat"); + +async function main() { + const [deployer] = await ethers.getSigners(); + console.log("Deploying contracts with account:", deployer.address); + + const balance = await ethers.provider.getBalance(deployer.address); + console.log("Account balance:", ethers.formatEther(balance), "ETH"); + + // Deploy EnergyToken + const EnergyToken = await ethers.getContractFactory("EnergyToken"); + const energyToken = await EnergyToken.deploy(deployer.address); + await energyToken.waitForDeployment(); + const tokenAddress = await energyToken.getAddress(); + console.log("EnergyToken deployed to:", tokenAddress); + + // Deploy TradeEngine + const TradeEngine = await ethers.getContractFactory("TradeEngine"); + const tradeEngine = await TradeEngine.deploy(tokenAddress, deployer.address); + await tradeEngine.waitForDeployment(); + const tradeEngineAddress = await tradeEngine.getAddress(); + console.log("TradeEngine deployed to:", tradeEngineAddress); + + console.log("\nDeployment Summary:"); + console.log("==================="); + console.log(`ENERGY_TOKEN_ADDRESS=${tokenAddress}`); + console.log(`TRADE_ENGINE_ADDRESS=${tradeEngineAddress}`); + console.log(`DEPLOYER_ADDRESS=${deployer.address}`); + console.log(`NETWORK=${(await ethers.provider.getNetwork()).name}`); +} + +main() + .then(() => process.exit(0)) + .catch((error) => { + console.error(error); + process.exit(1); + }); diff --git a/blockchain/test/contracts.test.js b/blockchain/test/contracts.test.js new file mode 100644 index 0000000..cbc351e --- /dev/null +++ b/blockchain/test/contracts.test.js @@ -0,0 +1,122 @@ +const { expect } = require("chai"); +const { ethers } = require("hardhat"); + +describe("EnergyToken", function () { + let energyToken; + let owner, minter, user; + + beforeEach(async function () { + [owner, minter, user] = await ethers.getSigners(); + const EnergyToken = await ethers.getContractFactory("EnergyToken"); + energyToken = await EnergyToken.deploy(owner.address); + await energyToken.waitForDeployment(); + }); + + it("has correct name and symbol", async function () { + expect(await energyToken.name()).to.equal("FaltricEnergyToken"); + expect(await energyToken.symbol()).to.equal("ENRG"); + }); + + it("allows MINTER_ROLE to mint", async function () { + const amount = ethers.parseEther("100"); + await energyToken.mint(user.address, amount); + expect(await energyToken.balanceOf(user.address)).to.equal(amount); + }); + + it("prevents non-minter from minting", async function () { + const amount = ethers.parseEther("100"); + await expect( + energyToken.connect(user).mint(user.address, amount) + ).to.be.reverted; + }); + + it("allows granting MINTER_ROLE", async function () { + const MINTER_ROLE = await energyToken.MINTER_ROLE(); + await energyToken.grantRole(MINTER_ROLE, minter.address); + const amount = ethers.parseEther("50"); + await energyToken.connect(minter).mint(user.address, amount); + expect(await energyToken.balanceOf(user.address)).to.equal(amount); + }); + + it("allows burning tokens", async function () { + const amount = ethers.parseEther("100"); + await energyToken.mint(user.address, amount); + await energyToken.connect(user).burn(ethers.parseEther("30")); + expect(await energyToken.balanceOf(user.address)).to.equal(ethers.parseEther("70")); + }); +}); + +describe("TradeEngine", function () { + let energyToken; + let tradeEngine; + let owner, seller, buyer; + const tokenAmount = ethers.parseEther("100"); + const ethAmount = ethers.parseEther("0.1"); + + beforeEach(async function () { + [owner, seller, buyer] = await ethers.getSigners(); + + const EnergyToken = await ethers.getContractFactory("EnergyToken"); + energyToken = await EnergyToken.deploy(owner.address); + await energyToken.waitForDeployment(); + + const TradeEngine = await ethers.getContractFactory("TradeEngine"); + tradeEngine = await TradeEngine.deploy(await energyToken.getAddress(), owner.address); + await tradeEngine.waitForDeployment(); + + // Mint tokens to seller + await energyToken.mint(seller.address, tokenAmount); + // Approve TradeEngine to spend seller tokens + await energyToken.connect(seller).approve(await tradeEngine.getAddress(), tokenAmount); + }); + + it("locks a trade and emits TradeCreated", async function () { + await expect( + tradeEngine.connect(seller).lockTrade(buyer.address, tokenAmount, { value: ethAmount }) + ) + .to.emit(tradeEngine, "TradeCreated") + .withArgs(0, seller.address, buyer.address, tokenAmount, ethAmount); + + const trade = await tradeEngine.getTrade(0); + expect(trade.seller).to.equal(seller.address); + expect(trade.buyer).to.equal(buyer.address); + expect(trade.tokenAmount).to.equal(tokenAmount); + expect(trade.ethAmount).to.equal(ethAmount); + expect(trade.status).to.equal(0); // Open + }); + + it("releases a trade", async function () { + await tradeEngine.connect(seller).lockTrade(buyer.address, tokenAmount, { value: ethAmount }); + + const sellerBalanceBefore = await ethers.provider.getBalance(seller.address); + await expect(tradeEngine.connect(owner).releaseTrade(0)) + .to.emit(tradeEngine, "TradeReleased") + .withArgs(0); + + expect(await energyToken.balanceOf(buyer.address)).to.equal(tokenAmount); + const sellerBalanceAfter = await ethers.provider.getBalance(seller.address); + expect(sellerBalanceAfter).to.be.gt(sellerBalanceBefore); + + const trade = await tradeEngine.getTrade(0); + expect(trade.status).to.equal(1); // Released + }); + + it("cancels a trade and refunds", async function () { + await tradeEngine.connect(seller).lockTrade(buyer.address, tokenAmount, { value: ethAmount }); + + await expect(tradeEngine.connect(owner).cancelTrade(0)) + .to.emit(tradeEngine, "TradeCancelled") + .withArgs(0); + + expect(await energyToken.balanceOf(seller.address)).to.equal(tokenAmount); + + const trade = await tradeEngine.getTrade(0); + expect(trade.status).to.equal(2); // Cancelled + }); + + it("prevents releasing a non-open trade", async function () { + await tradeEngine.connect(seller).lockTrade(buyer.address, tokenAmount, { value: ethAmount }); + await tradeEngine.connect(owner).releaseTrade(0); + await expect(tradeEngine.connect(owner).releaseTrade(0)).to.be.revertedWith("Trade not open"); + }); +}); diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..103046b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,98 @@ +version: "3.9" + +services: + mongo: + image: mongo:7.0 + container_name: faltric-mongo + restart: unless-stopped + ports: + - "27017:27017" + volumes: + - mongo_data:/data/db + environment: + MONGO_INITDB_DATABASE: faltric + healthcheck: + test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"] + interval: 10s + timeout: 5s + retries: 5 + + redis: + image: redis:7.2-alpine + container_name: faltric-redis + restart: unless-stopped + ports: + - "6379:6379" + volumes: + - redis_data:/data + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + + backend: + build: + context: ./backend + dockerfile: Dockerfile + container_name: faltric-backend + restart: unless-stopped + ports: + - "4000:4000" + environment: + NODE_ENV: production + PORT: 4000 + MONGODB_URI: mongodb://mongo:27017/faltric + REDIS_URL: redis://redis:6379 + JWT_SECRET: ${JWT_SECRET:-change_me_in_production} + JWT_EXPIRES_IN: ${JWT_EXPIRES_IN:-7d} + AI_SERVICE_URL: http://ai-service:8000 + ENERGY_TOKEN_ADDRESS: ${ENERGY_TOKEN_ADDRESS:-} + TRADE_ENGINE_ADDRESS: ${TRADE_ENGINE_ADDRESS:-} + DEPLOYER_PRIVATE_KEY: ${DEPLOYER_PRIVATE_KEY:-} + SEPOLIA_RPC_URL: ${SEPOLIA_RPC_URL:-} + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + depends_on: + mongo: + condition: service_healthy + redis: + condition: service_healthy + volumes: + - ./backend:/app + - /app/node_modules + + ai-service: + build: + context: ./ai-service + dockerfile: Dockerfile + container_name: faltric-ai + restart: unless-stopped + ports: + - "8000:8000" + environment: + PORT: 8000 + REPORTS_DIR: /tmp/faltric_reports + volumes: + - ai_reports:/tmp/faltric_reports + + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + args: + VITE_API_URL: ${VITE_API_URL:-http://localhost:4000/api} + VITE_SOCKET_URL: ${VITE_SOCKET_URL:-http://localhost:4000} + VITE_MAPBOX_TOKEN: ${VITE_MAPBOX_TOKEN:-} + VITE_TOKEN_ADDRESS: ${VITE_TOKEN_ADDRESS:-} + VITE_TRADE_ENGINE_ADDRESS: ${VITE_TRADE_ENGINE_ADDRESS:-} + container_name: faltric-frontend + restart: unless-stopped + ports: + - "3000:80" + depends_on: + - backend + +volumes: + mongo_data: + redis_data: + ai_reports: diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..5684283 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,22 @@ +FROM node:20-alpine AS builder +WORKDIR /app +COPY package*.json ./ +RUN npm ci +COPY . . +ARG VITE_API_URL +ARG VITE_SOCKET_URL +ARG VITE_MAPBOX_TOKEN +ARG VITE_TOKEN_ADDRESS +ARG VITE_TRADE_ENGINE_ADDRESS +ENV VITE_API_URL=$VITE_API_URL +ENV VITE_SOCKET_URL=$VITE_SOCKET_URL +ENV VITE_MAPBOX_TOKEN=$VITE_MAPBOX_TOKEN +ENV VITE_TOKEN_ADDRESS=$VITE_TOKEN_ADDRESS +ENV VITE_TRADE_ENGINE_ADDRESS=$VITE_TRADE_ENGINE_ADDRESS +RUN npm run build + +FROM nginx:alpine +COPY --from=builder /app/dist /usr/share/nginx/html +COPY nginx.conf /etc/nginx/conf.d/default.conf +EXPOSE 80 +CMD ["nginx", "-g", "daemon off;"] diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..744d6f6 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,14 @@ + + + + + + + Faltric – P2P Energy Trading + + + +
+ + + diff --git a/frontend/nginx.conf b/frontend/nginx.conf new file mode 100644 index 0000000..b44dd6d --- /dev/null +++ b/frontend/nginx.conf @@ -0,0 +1,30 @@ +server { + listen 80; + server_name _; + root /usr/share/nginx/html; + index index.html; + + # SPA fallback + location / { + try_files $uri $uri/ /index.html; + } + + # Proxy API requests to backend + location /api/ { + proxy_pass http://backend:4000/api/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + } + + # Proxy socket.io + location /socket.io/ { + proxy_pass http://backend:4000/socket.io/; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + } + + gzip on; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml; +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..e73b106 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,33 @@ +{ + "name": "faltric-frontend", + "version": "1.0.0", + "description": "Faltric P2P energy trading frontend", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview", + "lint": "eslint src --ext .js,.jsx" + }, + "dependencies": { + "ethers": "^6.11.0", + "mapbox-gl": "^3.4.0", + "@mapbox/mapbox-gl-draw": "^1.4.3", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-router-dom": "^6.23.1", + "socket.io-client": "^4.7.5" + }, + "devDependencies": { + "@types/react": "^18.3.3", + "@types/react-dom": "^18.3.0", + "@vitejs/plugin-react": "^4.3.1", + "autoprefixer": "^10.4.19", + "eslint": "^8.57.0", + "eslint-plugin-react": "^7.34.2", + "eslint-plugin-react-hooks": "^4.6.2", + "postcss": "^8.4.38", + "tailwindcss": "^3.4.4", + "vite": "^5.3.1" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000..2aa7205 --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx new file mode 100644 index 0000000..2376340 --- /dev/null +++ b/frontend/src/App.jsx @@ -0,0 +1,97 @@ +import { BrowserRouter, Routes, Route, Navigate } from "react-router-dom"; +import { AuthProvider } from "./contexts/AuthContext.jsx"; +import { Web3Provider } from "./contexts/Web3Context.jsx"; +import Navbar from "./components/Layout/Navbar.jsx"; +import Sidebar from "./components/Layout/Sidebar.jsx"; +import Login from "./components/Auth/Login.jsx"; +import GridMap from "./components/GridMap/GridMap.jsx"; +import Exchange from "./components/Exchange/Exchange.jsx"; +import Predict from "./components/Predict/Predict.jsx"; +import Connect from "./components/Connect/Connect.jsx"; +import AdminDashboard from "./components/Admin/AdminDashboard.jsx"; +import { useAuth } from "./contexts/AuthContext.jsx"; + +function ProtectedRoute({ children, adminOnly = false }) { + const { user, loading } = useAuth(); + if (loading) return
Loading...
; + if (!user) return ; + if (adminOnly && user.role !== "admin") return ; + return children; +} + +function AppLayout({ children }) { + return ( +
+ +
+ +
{children}
+
+
+ ); +} + +export default function App() { + return ( + + + + + } /> + + + + + + } + /> + + + + + + } + /> + + + + + + } + /> + + + + + + } + /> + + + + + + } + /> + } /> + + + + + ); +} diff --git a/frontend/src/components/Admin/AdminDashboard.jsx b/frontend/src/components/Admin/AdminDashboard.jsx new file mode 100644 index 0000000..7f5cbd7 --- /dev/null +++ b/frontend/src/components/Admin/AdminDashboard.jsx @@ -0,0 +1,178 @@ +import { useState, useEffect } from "react"; +import { useAuth } from "../../contexts/AuthContext.jsx"; + +export default function AdminDashboard() { + const { authFetch } = useAuth(); + const [pending, setPending] = useState([]); + const [loading, setLoading] = useState(true); + const [actionId, setActionId] = useState(null); + const [rejectReason, setRejectReason] = useState(""); + const [showRejectModal, setShowRejectModal] = useState(null); + const [stats, setStats] = useState({ total: 0, approved: 0, rejected: 0, pending: 0 }); + + useEffect(() => { + fetchPending(); + fetchStats(); + }, []); + + async function fetchPending() { + setLoading(true); + try { + const res = await authFetch("/api/installations/admin/pending"); + const data = await res.json(); + setPending(data.installations || []); + } catch (err) { + console.error(err); + } finally { + setLoading(false); + } + } + + async function fetchStats() { + try { + const res = await authFetch("/api/installations?verified=false"); + // Simplified: just show pending count + } catch {} + } + + async function approve(id) { + setActionId(id); + try { + const res = await authFetch(`/api/installations/${id}/approve`, { method: "PATCH" }); + if (res.ok) { + setPending((prev) => prev.filter((i) => i._id !== id)); + } + } catch (err) { + console.error(err); + } finally { + setActionId(null); + } + } + + async function reject(id, reason) { + setActionId(id); + try { + const res = await authFetch(`/api/installations/${id}/reject`, { + method: "PATCH", + body: JSON.stringify({ reason }), + }); + if (res.ok) { + setPending((prev) => prev.filter((i) => i._id !== id)); + } + } catch (err) { + console.error(err); + } finally { + setActionId(null); + setShowRejectModal(null); + setRejectReason(""); + } + } + + return ( +
+

🛡️ Admin Dashboard

+ +
+ {[ + { label: "Pending Approvals", value: pending.length, color: "text-yellow-400" }, + { label: "Total Approved", value: "—", color: "text-green-400" }, + { label: "Total Rejected", value: "—", color: "text-red-400" }, + { label: "Active Users", value: "—", color: "text-blue-400" }, + ].map((s) => ( +
+

{s.label}

+

{s.value}

+
+ ))} +
+ +
+

Pending Installation Approvals

+ + {loading ? ( +

Loading...

+ ) : pending.length === 0 ? ( +

✅ No pending approvals

+ ) : ( +
+ {pending.map((inst) => ( +
+
+
+ {inst.name} + + {inst.type} + +
+

+ Owner: {inst.owner?.email || "—"} •{" "} + + {inst.walletAddress?.slice(0, 10)}... + +

+

Capacity: {inst.capacityKw} kW

+
+
+ + +
+
+ ))} +
+ )} +
+ + {/* Reject modal */} + {showRejectModal && ( +
+
+

Reject Installation

+