diff --git a/.windsurfrules b/.windsurfrules index cb21241..e23bc6a 100644 --- a/.windsurfrules +++ b/.windsurfrules @@ -50,8 +50,8 @@ Empower health-conscious individuals to effortlessly monitor and understand thei - Encrypted if desired by advanced users ### 4. **Lightweight Tech Stack** -- **Static site** built with vanilla JS (or minimal framework) + HTML/CSS -- **Data layer**: read/write `data.json` via JavaScript filesystem APIs (or embedded file picker) +- **Site** built with vanilla JS (or minimal framework) + HTML/CSS +- **Data layer**: read/write `data.json` - **Dockerized**: simple `Dockerfile` wrapping a static file server (e.g., Caddy, NGINX) - **Deploy**: Coolify recipe, GitHub → Docker → Cloud @@ -80,7 +80,7 @@ Empower health-conscious individuals to effortlessly monitor and understand thei ```text ┌─────────────────────┐ ┌───────────────┐ -│ Static Web Server │◀────▶│ data.json │ +│ Web Server │◀────▶│ data.json │ │ (NGINX/Caddy) │ │ (local FS) │ └─────────────────────┘ └───────────────┘ ▲ ▲ diff --git a/Dockerfile b/Dockerfile index 1e8c2ad..fc78db8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,20 +1,4 @@ -# Stage 1: Build Node.js application -FROM node:18-alpine AS builder - -WORKDIR /app - -# Copy package.json and package-lock.json (if available) -COPY package.json ./ -# COPY package-lock.json ./ - -# Install dependencies -RUN npm install --production - -# Copy the rest of the application files (API code) -COPY data-api.js ./ -# Add any other necessary files for the API here - -# Stage 2: Setup Nginx and Supervisor +# Stage 1: Setup Nginx and Supervisor FROM nginx:alpine # Install Supervisor, Node.js runtime, and apache2-utils (for htpasswd) @@ -41,6 +25,14 @@ RUN htpasswd -cb /etc/nginx/.htpasswd ${AUTH_USERNAME} ${AUTH_PASSWORD} \ # Copy Supervisor configuration COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf +# Set up Node.js application for the API +WORKDIR /app +COPY package.json ./ +# If you have a package-lock.json, uncomment the next line +COPY package-lock.json ./ +RUN npm install --production +COPY server-api.js ./ + # Copy static website content (HTML, CSS, JS for the frontend) COPY index.html /usr/share/nginx/html/ COPY js/ /usr/share/nginx/html/js/ @@ -48,16 +40,12 @@ COPY css/ /usr/share/nginx/html/css/ # Add other static asset directories if they exist (e.g., images, fonts, libs) # COPY images/ /usr/share/nginx/html/images/ -# Create app directory for Node.js API and copy from builder stage -RUN mkdir -p /app # Explicitly create /app -WORKDIR /app # Make sure this WORKDIR is set before copying to /app -COPY --from=builder /app . - -RUN pwd # DEBUG: Print current working directory -RUN ls -la /app # DEBUG: List contents of /app before supervisor starts - # Create data directory (volume will be mounted here by docker-compose) # Ensure Nginx (and Node.js if it needs to write logs/pid here) has permissions +# Ensure the /app directory is owned by nginx user if Node.js needs to write logs there under nginx user +# Or run Node.js process as a non-root user that has permissions to /app +# For now, Node.js will run as root by default in Supervisor, then switch to 'nginx' user as per supervisord.conf + RUN mkdir -p /data && chown nginx:nginx /data # Or appropriate user for Node.js if it writes here VOLUME /data diff --git a/nginx.conf b/nginx.conf index d0f970d..e168e4f 100644 --- a/nginx.conf +++ b/nginx.conf @@ -20,48 +20,46 @@ http { error_log /dev/stderr warn; server { - listen 80; - server_name localhost; - root /usr/share/nginx/html; - index index.html; - - # Enable gzip compression - gzip on; - gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript; - - # Main application location - # API proxy: All requests to /app-data/* will be forwarded to the Node.js API - location /app-data/ { - include /etc/nginx/nginx-auth.conf; # Apply Basic Auth to API - - proxy_pass http://localhost:3000/; # Assuming Node API runs on port 3000 - proxy_http_version 1.1; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection 'upgrade'; - proxy_set_header Host $host; - proxy_cache_bypass $http_upgrade; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } - - location / { - include /etc/nginx/nginx-auth.conf; # Apply Basic Auth to the whole site - try_files $uri $uri/ /index.html; - } - - # Enable browser caching for static assets - location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ { - include /etc/nginx/nginx-auth.conf; # Apply Basic Auth to static assets too - expires 30d; - add_header Cache-Control "public, no-transform"; - } - - # Error pages - error_page 404 /index.html; - error_page 500 502 503 504 /50x.html; - location = /50x.html { + listen 80; + server_name localhost; root /usr/share/nginx/html; - } -} -} + index index.html; + + location /app-data/ { + include /etc/nginx/nginx-auth.conf; # Apply Basic Auth to API + + proxy_pass http://127.0.0.1:3000/; # Node API listens on port 3000 locally + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_cache_bypass $http_upgrade; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + # Enable gzip compression + gzip on; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript; + + location / { + include /etc/nginx/nginx-auth.conf; # Apply Basic Auth to the whole site + try_files $uri $uri/ /index.html; + } + + # Enable browser caching for static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ { + include /etc/nginx/nginx-auth.conf; # Apply Basic Auth to static assets too + expires 30d; + add_header Cache-Control "public, no-transform"; + } + + # Error pages + error_page 404 /index.html; # Consider a specific 404.html if you have one + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; # Ensure 50x.html is in the root + } + } # End of server block +} # End of http block diff --git a/package.json b/package.json index 93dd519..ad1f213 100644 --- a/package.json +++ b/package.json @@ -2,21 +2,14 @@ "name": "weight-tracker", "version": "1.0.0", "description": "Minimalist weight and meal tracking application", - "main": "data-api.js", + "main": "server-api.js", "private": true, "scripts": { - "start": "node data-api.js", - "backup": "node backup-s3.js" + "start": "node server-api.js" }, "dependencies": { - "@aws-sdk/client-s3": "^3.425.0", - "body-parser": "^1.20.2", - "connect-redis": "^7.1.0", - "cookie-parser": "^1.4.6", - "cors": "^2.8.5", - "express": "^4.18.2", - "express-session": "^1.17.3", - "node-cron": "^3.0.2" + "express": "^4.19.2", + "body-parser": "^1.20.2" }, "engines": { "node": ">=14.0.0" diff --git a/server-api.js b/server-api.js new file mode 100644 index 0000000..7f39cb0 --- /dev/null +++ b/server-api.js @@ -0,0 +1,67 @@ +const express = require('express'); +const bodyParser = require('body-parser'); +const fs = require('fs'); +const path = require('path'); + +const app = express(); +const PORT = process.env.PORT || 3000; +const DATA_FILE_PATH = process.env.DATA_FILE_PATH || path.join(__dirname, 'data', 'weight-tracker-data.json'); // Default path for local dev, override in Docker + +// Ensure data directory exists (especially for local dev, Docker volume handles it in container) +const dataDir = path.dirname(DATA_FILE_PATH); +if (!fs.existsSync(dataDir)) { + fs.mkdirSync(dataDir, { recursive: true }); +} + +app.use(bodyParser.json({ limit: '10mb' })); // Generous limit for the data file + +// Middleware to log requests +app.use((req, res, next) => { + console.log(`${new Date().toISOString()} - ${req.method} ${req.originalUrl}`); + next(); +}); + +// GET endpoint to retrieve data +app.get('/', (req, res) => { + fs.readFile(DATA_FILE_PATH, 'utf8', (err, data) => { + if (err) { + if (err.code === 'ENOENT') { + console.log(`Data file not found at ${DATA_FILE_PATH}, returning empty object.`); + return res.json({}); // If file doesn't exist, return empty JSON object + } + console.error('Error reading data file:', err); + return res.status(500).send('Error reading data file'); + } + try { + res.json(JSON.parse(data)); + } catch (parseErr) { + console.error('Error parsing data file content:', parseErr); + // If file content is not valid JSON, perhaps return empty or handle error + res.status(500).send('Error parsing data file content'); + } + }); +}); + +// PUT endpoint to save data +app.put('/', (req, res) => { + const dataToSave = JSON.stringify(req.body, null, 2); // Pretty print JSON + fs.writeFile(DATA_FILE_PATH, dataToSave, 'utf8', (err) => { + if (err) { + console.error('Error writing data file:', err); + return res.status(500).send('Error writing data file'); + } + console.log(`Data saved successfully to ${DATA_FILE_PATH}`); + res.status(200).send('Data saved successfully'); + }); +}); + +app.listen(PORT, '0.0.0.0', () => { + console.log(`Server API listening on port ${PORT}`); + console.log(`Data file path: ${DATA_FILE_PATH}`); +}); + +// Basic error handler +app.use((err, req, res, next) => { + console.error(err.stack); + res.status(500).send('Something broke!'); +}); diff --git a/supervisord.conf b/supervisord.conf index 436ea45..15bcc62 100644 --- a/supervisord.conf +++ b/supervisord.conf @@ -14,16 +14,18 @@ stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 -[program:data-api] -command=node data-api.js +[program:server-api] +command=node server-api.js directory=/app -environment=DATA_DIR="/data",PORT="3000" autostart=true autorestart=true stdout_logfile=/dev/stdout stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 +user=nginx ; Run as nginx user, ensure this user has write access to /data if API needs it, or adjust DATA_FILE_PATH ownership. +; For DATA_FILE_PATH, it's better to set it via an environment variable in supervisord or Dockerfile for flexibility. +environment=PORT="3000",DATA_FILE_PATH="/data/weight-tracker-data.json" ;[program:s3-backup] ;command=node backup-s3.js