This commit is contained in:
Isaac Johnson 2025-06-15 09:47:15 -05:00
commit 529a5466f1
12 changed files with 610 additions and 0 deletions

3
.bolt/config.json Normal file
View File

@ -0,0 +1,3 @@
{
"template": "node"
}

10
.env.example Normal file
View File

@ -0,0 +1,10 @@
# Supabase Configuration
VITE_SUPABASE_URL=your_supabase_url_here
VITE_SUPABASE_ANON_KEY=your_supabase_anon_key_here
# Authentication
AUTH_USERNAME=admin
AUTH_PASSWORD=password123
# Server Configuration
PORT=3000

5
.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
node_modules
.env
.DS_Store
*Zone.Identifier
**/*Zone.Identifier

104
README.md Normal file
View File

@ -0,0 +1,104 @@
# Log Microservice
A Node.js microservice that receives log data via POST requests with basic authentication and stores them in a PostgreSQL database via Supabase.
## Features
- **HTTP API**: RESTful endpoints for receiving log data
- **Basic Authentication**: Secure endpoints with username/password authentication
- **PostgreSQL Storage**: Stores logs in Supabase PostgreSQL database
- **Input Validation**: Validates incoming data and provides meaningful error messages
- **Default Values**: Automatically applies default values for optional fields
- **Health Check**: Built-in health check endpoint
## API Endpoints
### POST /logs
Receives log data and stores it in the database.
**Authentication**: Basic Auth required
**Request Body**:
```json
{
"message": "This is a log message (required)",
"project": "My Project (optional, defaults to 'Project 1')",
"type": "Error (optional, defaults to 'Info')",
"owner": "john.doe (optional, defaults to 'N/A')",
"avatar_src": "/custom-avatar.png (optional, defaults to '/rectangle-15.png')",
"status": "Active (optional, defaults to 'Pending')"
}
```
**Response**:
```json
{
"success": true,
"message": "Log entry created successfully",
"id": 1703123456
}
```
### GET /health
Health check endpoint to verify service status.
**Response**:
```json
{
"status": "healthy",
"timestamp": "2024-01-01T12:00:00.000Z"
}
```
## Setup
1. **Set up Supabase**: Click the "Connect to Supabase" button to configure your database
2. **Configure Environment**: Copy `.env.example` to `.env` and update the values
3. **Run the Service**: Use `npm start` to start the microservice
## Database Schema
The service automatically creates a `logs` table with the following structure:
- `id` (bigint) - Epoch time identifier
- `body` (text) - First 200 characters of the message
- `project` (text) - Project name
- `type` (text) - Log type (Info, Error, Warning, etc.)
- `date` (date) - Date of the log entry
- `avatar_src` (text) - Avatar image source
- `owner` (text) - Owner of the log entry
- `description` (text) - Full log message
- `created_at` (timestamptz) - Creation timestamp
- `status` (text) - Status of the log entry
## Authentication
The service uses HTTP Basic Authentication. Default credentials:
- Username: `admin`
- Password: `password123`
Update these in your `.env` file for production use.
## Usage Example
```bash
# Using curl to send a log entry
curl -X POST http://localhost:3000/logs \
-H "Content-Type: application/json" \
-H "Authorization: Basic YWRtaW46cGFzc3dvcmQxMjM=" \
-d '{
"message": "User login successful",
"project": "Authentication Service",
"type": "Info",
"owner": "auth-service"
}'
```
## Error Handling
The service provides detailed error responses:
- `400 Bad Request`: Invalid input data
- `401 Unauthorized`: Missing or invalid authentication
- `404 Not Found`: Unknown endpoint
- `500 Internal Server Error`: Database or server errors

3
index.js Normal file
View File

@ -0,0 +1,3 @@
// run `node index.js` in the terminal
console.log(`Hello Node.js v${process.versions.node}!`);

38
middleware/auth.js Normal file
View File

@ -0,0 +1,38 @@
export function authenticateBasic(req) {
const authHeader = req.headers.authorization;
if (!authHeader || !authHeader.startsWith('Basic ')) {
return {
success: false,
message: 'Missing or invalid Authorization header'
};
}
try {
const base64Credentials = authHeader.split(' ')[1];
const credentials = Buffer.from(base64Credentials, 'base64').toString('ascii');
const [username, password] = credentials.split(':');
// Simple hardcoded credentials for demo
// In production, these should be stored securely and hashed
const validUsername = process.env.AUTH_USERNAME || 'admin';
const validPassword = process.env.AUTH_PASSWORD || 'password123';
if (username === validUsername && password === validPassword) {
return {
success: true,
username
};
} else {
return {
success: false,
message: 'Invalid credentials'
};
}
} catch (error) {
return {
success: false,
message: 'Invalid Authorization header format'
};
}
}

147
package-lock.json generated Normal file
View File

@ -0,0 +1,147 @@
{
"name": "log-microservice",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "log-microservice",
"version": "1.0.0",
"dependencies": {
"pg": "^8.11.3"
}
},
"node_modules/pg": {
"version": "8.16.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.16.0.tgz",
"integrity": "sha512-7SKfdvP8CTNXjMUzfcVTaI+TDzBEeaUnVwiVGZQD1Hh33Kpev7liQba9uLd4CfN8r9mCVsD0JIpq03+Unpz+kg==",
"dependencies": {
"pg-connection-string": "^2.9.0",
"pg-pool": "^3.10.0",
"pg-protocol": "^1.10.0",
"pg-types": "2.2.0",
"pgpass": "1.0.5"
},
"engines": {
"node": ">= 8.0.0"
},
"optionalDependencies": {
"pg-cloudflare": "^1.2.5"
},
"peerDependencies": {
"pg-native": ">=3.0.1"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-cloudflare": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.5.tgz",
"integrity": "sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg==",
"optional": true
},
"node_modules/pg-connection-string": {
"version": "2.9.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.0.tgz",
"integrity": "sha512-P2DEBKuvh5RClafLngkAuGe9OUlFV7ebu8w1kmaaOgPcpJd1RIFh7otETfI6hAR8YupOLFTY7nuvvIn7PLciUQ=="
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.10.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.0.tgz",
"integrity": "sha512-DzZ26On4sQ0KmqnO34muPcmKbhrjmyiO4lCCR0VwEd7MjmiKf5NTg/6+apUEu0NF7ESa37CGzFxH513CoUmWnA==",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.0.tgz",
"integrity": "sha512-IpdytjudNuLv8nhlHs/UrVBhU0e78J0oIS/0AVdTbWxSOkFUVdsHC/NrorO6nXsQNDTT1kzDSOMJubBQviX18Q=="
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/split2": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
"engines": {
"node": ">= 10.x"
}
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"engines": {
"node": ">=0.4"
}
}
}
}

15
package.json Normal file
View File

@ -0,0 +1,15 @@
{
"name": "log-microservice",
"version": "1.0.0",
"description": "Microservice for receiving and storing log data",
"main": "server.js",
"type": "module",
"scripts": {
"start": "node server.js",
"dev": "node server.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"dependencies": {
"pg": "^8.11.3"
}
}

136
server.js Normal file
View File

@ -0,0 +1,136 @@
import { createServer } from 'http';
import { URL } from 'url';
import pkg from 'pg';
import { authenticateBasic } from './middleware/auth.js';
import { validateLogData } from './utils/validation.js';
import { createLogEntry } from './services/logService.js';
const { Client } = pkg;
const PORT = process.env.PORT || 3000;
// PostgreSQL connection configuration from environment variables
const dbConfig = {
host: process.env.DB_HOST || 'localhost',
port: process.env.DB_PORT || 5432,
database: process.env.DB_NAME || 'postgres',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'password',
ssl: process.env.DB_SSL === 'true' ? { rejectUnauthorized: false } : false
};
// Initialize PostgreSQL client
const client = new Client(dbConfig);
// Connect to PostgreSQL
try {
await client.connect();
console.log('Connected to PostgreSQL database');
// Create logs table if it doesn't exist
await client.query(`
CREATE TABLE IF NOT EXISTS logs (
id bigint PRIMARY KEY,
body text NOT NULL,
project text NOT NULL DEFAULT 'Project 1',
type text NOT NULL DEFAULT 'Info',
date date NOT NULL,
avatar_src text NOT NULL DEFAULT '/rectangle-15.png',
owner text NOT NULL DEFAULT 'N/A',
description text NOT NULL,
created_at timestamptz NOT NULL DEFAULT now(),
status text NOT NULL DEFAULT 'Pending'
)
`);
console.log('Logs table ready');
} catch (error) {
console.error('Failed to connect to PostgreSQL:', error);
process.exit(1);
}
const server = createServer(async (req, res) => {
// Set CORS headers
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization');
// Handle preflight requests
if (req.method === 'OPTIONS') {
res.writeHead(200);
res.end();
return;
}
const url = new URL(req.url, `http://localhost:${PORT}`);
try {
// Health check endpoint
if (req.method === 'GET' && url.pathname === '/health') {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ status: 'healthy', timestamp: new Date().toISOString() }));
return;
}
// Log entry endpoint
if (req.method === 'POST' && url.pathname === '/logs') {
// Authenticate request
const authResult = authenticateBasic(req);
if (!authResult.success) {
res.writeHead(401, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: 'Unauthorized', message: authResult.message }));
return;
}
// Parse request body
let body = '';
req.on('data', chunk => {
body += chunk.toString();
});
req.on('end', async () => {
try {
const data = JSON.parse(body);
// Validate input data
const validation = validateLogData(data);
if (!validation.isValid) {
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: 'Invalid input', errors: validation.errors }));
return;
}
// Create log entry
const logEntry = await createLogEntry(client, data);
res.writeHead(201, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: true,
message: 'Log entry created successfully',
id: logEntry.id
}));
} catch (error) {
console.error('Error processing request:', error);
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: 'Internal server error' }));
}
});
return;
}
// 404 for unknown routes
res.writeHead(404, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: 'Not found' }));
} catch (error) {
console.error('Server error:', error);
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: 'Internal server error' }));
}
});
server.listen(PORT, () => {
console.log(`Log microservice running on port ${PORT}`);
console.log(`Health check available at: http://localhost:${PORT}/health`);
console.log(`Log endpoint available at: http://localhost:${PORT}/logs`);
});

55
services/logService.js Normal file
View File

@ -0,0 +1,55 @@
export async function createLogEntry(client, data) {
const now = new Date();
const epochTime = Math.floor(now.getTime() / 1000);
// Create log entry with defaults
const logEntry = {
id: epochTime,
body: data.message.substring(0, 200), // First 200 characters
project: data.project || 'Project 1',
type: data.type || 'Info',
date: now.toISOString().split('T')[0], // YYYY-MM-DD format
avatar_src: data.avatar_src || '/rectangle-15.png',
owner: data.owner || 'N/A',
description: data.message, // Full message
created_at: now.toISOString(),
status: data.status || 'Pending'
};
// Insert into database
const query = `
INSERT INTO logs (id, body, project, type, date, avatar_src, owner, description, created_at, status)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
RETURNING *
`;
const values = [
logEntry.id,
logEntry.body,
logEntry.project,
logEntry.type,
logEntry.date,
logEntry.avatar_src,
logEntry.owner,
logEntry.description,
logEntry.created_at,
logEntry.status
];
try {
const result = await client.query(query, values);
const insertedData = result.rows[0];
console.log('Log entry created:', {
id: insertedData.id,
project: insertedData.project,
type: insertedData.type,
timestamp: insertedData.created_at
});
return insertedData;
} catch (error) {
console.error('Database error:', error);
throw new Error(`Failed to insert log entry: ${error.message}`);
}
}

View File

@ -0,0 +1,56 @@
/*
# Create logs table for microservice
1. New Tables
- `logs`
- `id` (bigint, primary key) - Epoch time identifier
- `body` (text) - First 200 characters of log message
- `project` (text) - Project name, defaults to "Project 1"
- `type` (text) - Log type, defaults to "Info"
- `date` (date) - Date of the log entry
- `avatar_src` (text) - Avatar source URL, defaults to "/rectangle-15.png"
- `owner` (text) - Owner of the log entry, defaults to "N/A"
- `description` (text) - Full log message
- `created_at` (timestamptz) - Timestamp when record was created
- `status` (text) - Status of the log entry, defaults to "Pending"
2. Security
- Enable RLS on `logs` table
- Add policy for authenticated users to insert and read logs
*/
CREATE TABLE IF NOT EXISTS logs (
id bigint PRIMARY KEY,
body text NOT NULL,
project text NOT NULL DEFAULT 'Project 1',
type text NOT NULL DEFAULT 'Info',
date date NOT NULL,
avatar_src text NOT NULL DEFAULT '/rectangle-15.png',
owner text NOT NULL DEFAULT 'N/A',
description text NOT NULL,
created_at timestamptz NOT NULL DEFAULT now(),
status text NOT NULL DEFAULT 'Pending'
);
ALTER TABLE logs ENABLE ROW LEVEL SECURITY;
-- Policy to allow authenticated users to insert logs
CREATE POLICY "Allow authenticated users to insert logs"
ON logs
FOR INSERT
TO authenticated
WITH CHECK (true);
-- Policy to allow authenticated users to read logs
CREATE POLICY "Allow authenticated users to read logs"
ON logs
FOR SELECT
TO authenticated
USING (true);
-- Policy to allow service role to perform all operations
CREATE POLICY "Allow service role full access"
ON logs
FOR ALL
TO service_role
USING (true);

38
utils/validation.js Normal file
View File

@ -0,0 +1,38 @@
export function validateLogData(data) {
const errors = [];
// Check if data exists
if (!data || typeof data !== 'object') {
errors.push('Request body must be a valid JSON object');
return { isValid: false, errors };
}
// Validate required message field
if (!data.message || typeof data.message !== 'string') {
errors.push('message field is required and must be a string');
} else if (data.message.trim().length === 0) {
errors.push('message field cannot be empty');
}
// Validate optional fields if provided
if (data.project !== undefined && typeof data.project !== 'string') {
errors.push('project field must be a string if provided');
}
if (data.type !== undefined && typeof data.type !== 'string') {
errors.push('type field must be a string if provided');
}
if (data.owner !== undefined && typeof data.owner !== 'string') {
errors.push('owner field must be a string if provided');
}
if (data.avatar_src !== undefined && typeof data.avatar_src !== 'string') {
errors.push('avatar_src field must be a string if provided');
}
return {
isValid: errors.length === 0,
errors
};
}