chore(skills): bootstrap repo-local skills under .agents/skills
This commit is contained in:
343
.agents/skills/api-authentication/SKILL.md
Normal file
343
.agents/skills/api-authentication/SKILL.md
Normal file
@@ -0,0 +1,343 @@
|
|||||||
|
---
|
||||||
|
name: api-authentication
|
||||||
|
description: Implement secure API authentication with JWT, OAuth 2.0, API keys, and session management. Use when securing APIs, managing tokens, or implementing user authentication flows.
|
||||||
|
---
|
||||||
|
|
||||||
|
# API Authentication
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Implement comprehensive authentication strategies for APIs including JWT tokens, OAuth 2.0, API keys, and session management with proper security practices.
|
||||||
|
|
||||||
|
## When to Use
|
||||||
|
|
||||||
|
- Securing API endpoints
|
||||||
|
- Implementing user login/logout flows
|
||||||
|
- Managing access tokens and refresh tokens
|
||||||
|
- Integrating OAuth 2.0 providers
|
||||||
|
- Protecting sensitive data
|
||||||
|
- Implementing API key authentication
|
||||||
|
|
||||||
|
## Instructions
|
||||||
|
|
||||||
|
### 1. **JWT Authentication**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Node.js JWT Implementation
|
||||||
|
const express = require('express');
|
||||||
|
const jwt = require('jsonwebtoken');
|
||||||
|
const bcrypt = require('bcrypt');
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const SECRET_KEY = process.env.JWT_SECRET || 'your-secret-key';
|
||||||
|
const REFRESH_SECRET = process.env.REFRESH_SECRET || 'your-refresh-secret';
|
||||||
|
|
||||||
|
// User login endpoint
|
||||||
|
app.post('/api/auth/login', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { email, password } = req.body;
|
||||||
|
|
||||||
|
// Find user in database
|
||||||
|
const user = await User.findOne({ email });
|
||||||
|
if (!user) {
|
||||||
|
return res.status(401).json({ error: 'Invalid credentials' });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify password
|
||||||
|
const isValid = await bcrypt.compare(password, user.password);
|
||||||
|
if (!isValid) {
|
||||||
|
return res.status(401).json({ error: 'Invalid credentials' });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate tokens
|
||||||
|
const accessToken = jwt.sign(
|
||||||
|
{ userId: user.id, email: user.email, role: user.role },
|
||||||
|
SECRET_KEY,
|
||||||
|
{ expiresIn: '15m' }
|
||||||
|
);
|
||||||
|
|
||||||
|
const refreshToken = jwt.sign(
|
||||||
|
{ userId: user.id },
|
||||||
|
REFRESH_SECRET,
|
||||||
|
{ expiresIn: '7d' }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Store refresh token in database
|
||||||
|
await RefreshToken.create({ token: refreshToken, userId: user.id });
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
accessToken,
|
||||||
|
refreshToken,
|
||||||
|
expiresIn: 900,
|
||||||
|
user: { id: user.id, email: user.email, role: user.role }
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).json({ error: 'Authentication failed' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Refresh token endpoint
|
||||||
|
app.post('/api/auth/refresh', (req, res) => {
|
||||||
|
const { refreshToken } = req.body;
|
||||||
|
|
||||||
|
if (!refreshToken) {
|
||||||
|
return res.status(401).json({ error: 'Refresh token required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const decoded = jwt.verify(refreshToken, REFRESH_SECRET);
|
||||||
|
|
||||||
|
// Verify token exists in database
|
||||||
|
const storedToken = await RefreshToken.findOne({
|
||||||
|
token: refreshToken,
|
||||||
|
userId: decoded.userId
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!storedToken) {
|
||||||
|
return res.status(401).json({ error: 'Invalid refresh token' });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate new access token
|
||||||
|
const newAccessToken = jwt.sign(
|
||||||
|
{ userId: decoded.userId },
|
||||||
|
SECRET_KEY,
|
||||||
|
{ expiresIn: '15m' }
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({ accessToken: newAccessToken, expiresIn: 900 });
|
||||||
|
} catch (error) {
|
||||||
|
res.status(401).json({ error: 'Invalid refresh token' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Middleware to verify JWT
|
||||||
|
const verifyToken = (req, res, next) => {
|
||||||
|
const authHeader = req.headers['authorization'];
|
||||||
|
const token = authHeader && authHeader.split(' ')[1]; // Bearer token
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
return res.status(401).json({ error: 'Access token required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const decoded = jwt.verify(token, SECRET_KEY);
|
||||||
|
req.user = decoded;
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
if (error.name === 'TokenExpiredError') {
|
||||||
|
return res.status(401).json({ error: 'Token expired', code: 'TOKEN_EXPIRED' });
|
||||||
|
}
|
||||||
|
res.status(403).json({ error: 'Invalid token' });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Protected endpoint
|
||||||
|
app.get('/api/profile', verifyToken, (req, res) => {
|
||||||
|
res.json({ user: req.user });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Logout endpoint
|
||||||
|
app.post('/api/auth/logout', verifyToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
await RefreshToken.deleteOne({ userId: req.user.userId });
|
||||||
|
res.json({ message: 'Logged out successfully' });
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).json({ error: 'Logout failed' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. **OAuth 2.0 Implementation**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const passport = require('passport');
|
||||||
|
const GoogleStrategy = require('passport-google-oauth20').Strategy;
|
||||||
|
|
||||||
|
passport.use(new GoogleStrategy(
|
||||||
|
{
|
||||||
|
clientID: process.env.GOOGLE_CLIENT_ID,
|
||||||
|
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
|
||||||
|
callbackURL: '/api/auth/google/callback'
|
||||||
|
},
|
||||||
|
async (accessToken, refreshToken, profile, done) => {
|
||||||
|
try {
|
||||||
|
let user = await User.findOne({ googleId: profile.id });
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
user = await User.create({
|
||||||
|
googleId: profile.id,
|
||||||
|
email: profile.emails[0].value,
|
||||||
|
firstName: profile.name.givenName,
|
||||||
|
lastName: profile.name.familyName
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return done(null, user);
|
||||||
|
} catch (error) {
|
||||||
|
return done(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
));
|
||||||
|
|
||||||
|
// OAuth routes
|
||||||
|
app.get('/api/auth/google',
|
||||||
|
passport.authenticate('google', { scope: ['profile', 'email'] })
|
||||||
|
);
|
||||||
|
|
||||||
|
app.get('/api/auth/google/callback',
|
||||||
|
passport.authenticate('google', { failureRedirect: '/login' }),
|
||||||
|
(req, res) => {
|
||||||
|
const token = jwt.sign(
|
||||||
|
{ userId: req.user.id, email: req.user.email },
|
||||||
|
SECRET_KEY,
|
||||||
|
{ expiresIn: '7d' }
|
||||||
|
);
|
||||||
|
res.redirect(`/dashboard?token=${token}`);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. **API Key Authentication**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// API Key middleware
|
||||||
|
const verifyApiKey = (req, res, next) => {
|
||||||
|
const apiKey = req.headers['x-api-key'];
|
||||||
|
|
||||||
|
if (!apiKey) {
|
||||||
|
return res.status(401).json({ error: 'API key required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Verify API key format and existence
|
||||||
|
const keyHash = crypto.createHash('sha256').update(apiKey).digest('hex');
|
||||||
|
const apiKeyRecord = await ApiKey.findOne({ key_hash: keyHash, active: true });
|
||||||
|
|
||||||
|
if (!apiKeyRecord) {
|
||||||
|
return res.status(401).json({ error: 'Invalid API key' });
|
||||||
|
}
|
||||||
|
|
||||||
|
req.apiKey = apiKeyRecord;
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).json({ error: 'Authentication failed' });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Generate API key endpoint
|
||||||
|
app.post('/api/apikeys/generate', verifyToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const apiKey = crypto.randomBytes(32).toString('hex');
|
||||||
|
const keyHash = crypto.createHash('sha256').update(apiKey).digest('hex');
|
||||||
|
|
||||||
|
const record = await ApiKey.create({
|
||||||
|
userId: req.user.userId,
|
||||||
|
key_hash: keyHash,
|
||||||
|
name: req.body.name,
|
||||||
|
active: true
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({ apiKey, message: 'Save this key securely' });
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).json({ error: 'Failed to generate API key' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Protected endpoint with API key
|
||||||
|
app.get('/api/data', verifyApiKey, (req, res) => {
|
||||||
|
res.json({ data: 'sensitive data for API key holder' });
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. **Python Authentication Implementation**
|
||||||
|
|
||||||
|
```python
|
||||||
|
from flask import Flask, request, jsonify
|
||||||
|
from flask_jwt_extended import JWTManager, create_access_token, jwt_required
|
||||||
|
from werkzeug.security import generate_password_hash, check_password_hash
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
app.config['JWT_SECRET_KEY'] = 'secret-key'
|
||||||
|
jwt = JWTManager(app)
|
||||||
|
|
||||||
|
@app.route('/api/auth/login', methods=['POST'])
|
||||||
|
def login():
|
||||||
|
data = request.get_json()
|
||||||
|
user = User.query.filter_by(email=data['email']).first()
|
||||||
|
|
||||||
|
if not user or not check_password_hash(user.password, data['password']):
|
||||||
|
return jsonify({'error': 'Invalid credentials'}), 401
|
||||||
|
|
||||||
|
access_token = create_access_token(
|
||||||
|
identity=user.id,
|
||||||
|
additional_claims={'email': user.email, 'role': user.role}
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
'accessToken': access_token,
|
||||||
|
'user': {'id': user.id, 'email': user.email}
|
||||||
|
}), 200
|
||||||
|
|
||||||
|
@app.route('/api/protected', methods=['GET'])
|
||||||
|
@jwt_required()
|
||||||
|
def protected():
|
||||||
|
from flask_jwt_extended import get_jwt_identity
|
||||||
|
user_id = get_jwt_identity()
|
||||||
|
return jsonify({'userId': user_id}), 200
|
||||||
|
|
||||||
|
def require_role(role):
|
||||||
|
def decorator(fn):
|
||||||
|
@wraps(fn)
|
||||||
|
@jwt_required()
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
from flask_jwt_extended import get_jwt
|
||||||
|
claims = get_jwt()
|
||||||
|
if claims.get('role') != role:
|
||||||
|
return jsonify({'error': 'Forbidden'}), 403
|
||||||
|
return fn(*args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
@app.route('/api/admin', methods=['GET'])
|
||||||
|
@require_role('admin')
|
||||||
|
def admin_endpoint():
|
||||||
|
return jsonify({'message': 'Admin data'}), 200
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### ✅ DO
|
||||||
|
- Use HTTPS for all authentication
|
||||||
|
- Store tokens securely (HttpOnly cookies)
|
||||||
|
- Implement token refresh mechanism
|
||||||
|
- Set appropriate token expiration times
|
||||||
|
- Hash and salt passwords
|
||||||
|
- Use strong secret keys
|
||||||
|
- Validate tokens on every request
|
||||||
|
- Implement rate limiting on auth endpoints
|
||||||
|
- Log authentication attempts
|
||||||
|
- Rotate secrets regularly
|
||||||
|
|
||||||
|
### ❌ DON'T
|
||||||
|
- Store passwords in plain text
|
||||||
|
- Send tokens in URL parameters
|
||||||
|
- Use weak secret keys
|
||||||
|
- Store sensitive data in JWT payload
|
||||||
|
- Ignore token expiration
|
||||||
|
- Disable HTTPS in production
|
||||||
|
- Log sensitive tokens
|
||||||
|
- Reuse API keys across services
|
||||||
|
- Store credentials in code
|
||||||
|
|
||||||
|
## Security Headers
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
app.use((req, res, next) => {
|
||||||
|
res.setHeader('X-Content-Type-Options', 'nosniff');
|
||||||
|
res.setHeader('X-Frame-Options', 'DENY');
|
||||||
|
res.setHeader('X-XSS-Protection', '1; mode=block');
|
||||||
|
res.setHeader('Strict-Transport-Security', 'max-age=31536000; includeSubDomains');
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
```
|
||||||
624
.agents/skills/api-contract-testing/SKILL.md
Normal file
624
.agents/skills/api-contract-testing/SKILL.md
Normal file
@@ -0,0 +1,624 @@
|
|||||||
|
---
|
||||||
|
name: api-contract-testing
|
||||||
|
description: Verify API contracts between services to ensure compatibility and prevent breaking changes. Use for contract testing, Pact, API contract validation, schema validation, and consumer-driven contracts.
|
||||||
|
---
|
||||||
|
|
||||||
|
# API Contract Testing
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Contract testing verifies that APIs honor their contracts between consumers and providers. It ensures that service changes don't break dependent consumers without requiring full integration tests. Contract tests validate request/response formats, data types, and API behavior independently.
|
||||||
|
|
||||||
|
## When to Use
|
||||||
|
|
||||||
|
- Testing microservices communication
|
||||||
|
- Preventing breaking API changes
|
||||||
|
- Validating API versioning
|
||||||
|
- Testing consumer-provider contracts
|
||||||
|
- Ensuring backward compatibility
|
||||||
|
- Validating OpenAPI/Swagger specifications
|
||||||
|
- Testing third-party API integrations
|
||||||
|
- Catching contract violations in CI
|
||||||
|
|
||||||
|
## Key Concepts
|
||||||
|
|
||||||
|
- **Consumer**: Service that calls an API
|
||||||
|
- **Provider**: Service that exposes the API
|
||||||
|
- **Contract**: Agreement on API request/response format
|
||||||
|
- **Pact**: Consumer-defined expectations
|
||||||
|
- **Schema**: Structure definition (OpenAPI, JSON Schema)
|
||||||
|
- **Stub**: Generated mock from contract
|
||||||
|
- **Broker**: Central repository for contracts
|
||||||
|
|
||||||
|
## Instructions
|
||||||
|
|
||||||
|
### 1. **Pact for Consumer-Driven Contracts**
|
||||||
|
|
||||||
|
#### Consumer Test (Jest/Pact)
|
||||||
|
```typescript
|
||||||
|
// tests/pact/user-service.pact.test.ts
|
||||||
|
import { PactV3, MatchersV3 } from '@pact-foundation/pact';
|
||||||
|
import { UserService } from '../../src/services/UserService';
|
||||||
|
|
||||||
|
const { like, eachLike, iso8601DateTimeWithMillis } = MatchersV3;
|
||||||
|
|
||||||
|
const provider = new PactV3({
|
||||||
|
consumer: 'OrderService',
|
||||||
|
provider: 'UserService',
|
||||||
|
port: 1234,
|
||||||
|
dir: './pacts',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('User Service Contract', () => {
|
||||||
|
const userService = new UserService('http://localhost:1234');
|
||||||
|
|
||||||
|
describe('GET /users/:id', () => {
|
||||||
|
test('returns user when found', async () => {
|
||||||
|
await provider
|
||||||
|
.given('user with ID 123 exists')
|
||||||
|
.uponReceiving('a request for user 123')
|
||||||
|
.withRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/users/123',
|
||||||
|
headers: {
|
||||||
|
Authorization: like('Bearer token'),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.willRespondWith({
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
id: like('123'),
|
||||||
|
email: like('user@example.com'),
|
||||||
|
name: like('John Doe'),
|
||||||
|
age: like(30),
|
||||||
|
createdAt: iso8601DateTimeWithMillis('2024-01-01T00:00:00.000Z'),
|
||||||
|
role: like('user'),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.executeTest(async (mockServer) => {
|
||||||
|
const user = await userService.getUser('123');
|
||||||
|
|
||||||
|
expect(user.id).toBe('123');
|
||||||
|
expect(user.email).toBeDefined();
|
||||||
|
expect(user.name).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('returns 404 when user not found', async () => {
|
||||||
|
await provider
|
||||||
|
.given('user with ID 999 does not exist')
|
||||||
|
.uponReceiving('a request for non-existent user')
|
||||||
|
.withRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/users/999',
|
||||||
|
})
|
||||||
|
.willRespondWith({
|
||||||
|
status: 404,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
error: like('User not found'),
|
||||||
|
code: like('USER_NOT_FOUND'),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.executeTest(async (mockServer) => {
|
||||||
|
await expect(userService.getUser('999')).rejects.toThrow(
|
||||||
|
'User not found'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('POST /users', () => {
|
||||||
|
test('creates new user', async () => {
|
||||||
|
await provider
|
||||||
|
.given('user does not exist')
|
||||||
|
.uponReceiving('a request to create user')
|
||||||
|
.withRequest({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/users',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
email: like('newuser@example.com'),
|
||||||
|
name: like('New User'),
|
||||||
|
age: like(25),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.willRespondWith({
|
||||||
|
status: 201,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
id: like('new-123'),
|
||||||
|
email: like('newuser@example.com'),
|
||||||
|
name: like('New User'),
|
||||||
|
age: like(25),
|
||||||
|
createdAt: iso8601DateTimeWithMillis(),
|
||||||
|
role: 'user',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.executeTest(async (mockServer) => {
|
||||||
|
const user = await userService.createUser({
|
||||||
|
email: 'newuser@example.com',
|
||||||
|
name: 'New User',
|
||||||
|
age: 25,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(user.id).toBeDefined();
|
||||||
|
expect(user.email).toBe('newuser@example.com');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('GET /users/:id/orders', () => {
|
||||||
|
test('returns user orders', async () => {
|
||||||
|
await provider
|
||||||
|
.given('user 123 has orders')
|
||||||
|
.uponReceiving('a request for user orders')
|
||||||
|
.withRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/users/123/orders',
|
||||||
|
query: {
|
||||||
|
limit: '10',
|
||||||
|
offset: '0',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.willRespondWith({
|
||||||
|
status: 200,
|
||||||
|
body: {
|
||||||
|
orders: eachLike({
|
||||||
|
id: like('order-1'),
|
||||||
|
total: like(99.99),
|
||||||
|
status: like('completed'),
|
||||||
|
createdAt: iso8601DateTimeWithMillis(),
|
||||||
|
}),
|
||||||
|
total: like(5),
|
||||||
|
hasMore: like(false),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.executeTest(async (mockServer) => {
|
||||||
|
const response = await userService.getUserOrders('123', {
|
||||||
|
limit: 10,
|
||||||
|
offset: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.orders).toBeDefined();
|
||||||
|
expect(Array.isArray(response.orders)).toBe(true);
|
||||||
|
expect(response.total).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Provider Test (Verify Contract)
|
||||||
|
```typescript
|
||||||
|
// tests/pact/user-service.provider.test.ts
|
||||||
|
import { Verifier } from '@pact-foundation/pact';
|
||||||
|
import path from 'path';
|
||||||
|
import { app } from '../../src/app';
|
||||||
|
import { setupTestDB, teardownTestDB } from '../helpers/db';
|
||||||
|
|
||||||
|
describe('Pact Provider Verification', () => {
|
||||||
|
let server;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await setupTestDB();
|
||||||
|
server = app.listen(3001);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await teardownTestDB();
|
||||||
|
server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('validates the expectations of OrderService', () => {
|
||||||
|
return new Verifier({
|
||||||
|
provider: 'UserService',
|
||||||
|
providerBaseUrl: 'http://localhost:3001',
|
||||||
|
pactUrls: [
|
||||||
|
path.resolve(__dirname, '../../pacts/orderservice-userservice.json'),
|
||||||
|
],
|
||||||
|
// Provider state setup
|
||||||
|
stateHandlers: {
|
||||||
|
'user with ID 123 exists': async () => {
|
||||||
|
await createTestUser({ id: '123', name: 'John Doe' });
|
||||||
|
},
|
||||||
|
'user with ID 999 does not exist': async () => {
|
||||||
|
await deleteUser('999');
|
||||||
|
},
|
||||||
|
'user 123 has orders': async () => {
|
||||||
|
await createTestUser({ id: '123' });
|
||||||
|
await createTestOrder({ userId: '123' });
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.verifyProvider()
|
||||||
|
.then((output) => {
|
||||||
|
console.log('Pact Verification Complete!');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. **OpenAPI Schema Validation**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// tests/contract/openapi.test.ts
|
||||||
|
import request from 'supertest';
|
||||||
|
import { app } from '../../src/app';
|
||||||
|
import OpenAPIValidator from 'express-openapi-validator';
|
||||||
|
import fs from 'fs';
|
||||||
|
import yaml from 'js-yaml';
|
||||||
|
|
||||||
|
describe('OpenAPI Contract Validation', () => {
|
||||||
|
let validator;
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
const spec = yaml.load(
|
||||||
|
fs.readFileSync('./openapi.yaml', 'utf8')
|
||||||
|
);
|
||||||
|
|
||||||
|
validator = OpenAPIValidator.middleware({
|
||||||
|
apiSpec: spec,
|
||||||
|
validateRequests: true,
|
||||||
|
validateResponses: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('GET /users/:id matches schema', async () => {
|
||||||
|
const response = await request(app)
|
||||||
|
.get('/users/123')
|
||||||
|
.expect(200);
|
||||||
|
|
||||||
|
// Validate against OpenAPI schema
|
||||||
|
expect(response.body).toMatchObject({
|
||||||
|
id: expect.any(String),
|
||||||
|
email: expect.stringMatching(/^[\w-\.]+@([\w-]+\.)+[\w-]{2,4}$/),
|
||||||
|
name: expect.any(String),
|
||||||
|
age: expect.any(Number),
|
||||||
|
createdAt: expect.stringMatching(/^\d{4}-\d{2}-\d{2}T/),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('POST /users validates request body', async () => {
|
||||||
|
const invalidUser = {
|
||||||
|
email: 'invalid-email', // Should fail validation
|
||||||
|
name: 'Test',
|
||||||
|
};
|
||||||
|
|
||||||
|
await request(app)
|
||||||
|
.post('/users')
|
||||||
|
.send(invalidUser)
|
||||||
|
.expect(400);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. **JSON Schema Validation**
|
||||||
|
|
||||||
|
```python
|
||||||
|
# tests/contract/test_schema_validation.py
|
||||||
|
import pytest
|
||||||
|
import jsonschema
|
||||||
|
from jsonschema import validate
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Define schemas
|
||||||
|
USER_SCHEMA = {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["id", "email", "name"],
|
||||||
|
"properties": {
|
||||||
|
"id": {"type": "string"},
|
||||||
|
"email": {"type": "string", "format": "email"},
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer", "minimum": 0, "maximum": 150},
|
||||||
|
"role": {"type": "string", "enum": ["user", "admin"]},
|
||||||
|
"createdAt": {"type": "string", "format": "date-time"},
|
||||||
|
},
|
||||||
|
"additionalProperties": False
|
||||||
|
}
|
||||||
|
|
||||||
|
ORDER_SCHEMA = {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["id", "userId", "total", "status"],
|
||||||
|
"properties": {
|
||||||
|
"id": {"type": "string"},
|
||||||
|
"userId": {"type": "string"},
|
||||||
|
"total": {"type": "number", "minimum": 0},
|
||||||
|
"status": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["pending", "paid", "shipped", "delivered", "cancelled"]
|
||||||
|
},
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["productId", "quantity", "price"],
|
||||||
|
"properties": {
|
||||||
|
"productId": {"type": "string"},
|
||||||
|
"quantity": {"type": "integer", "minimum": 1},
|
||||||
|
"price": {"type": "number", "minimum": 0},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class TestAPIContracts:
|
||||||
|
def test_get_user_response_schema(self, api_client):
|
||||||
|
"""Validate user endpoint response against schema."""
|
||||||
|
response = api_client.get('/api/users/123')
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Validate against schema
|
||||||
|
validate(instance=data, schema=USER_SCHEMA)
|
||||||
|
|
||||||
|
def test_create_user_request_schema(self, api_client):
|
||||||
|
"""Validate create user request body."""
|
||||||
|
valid_user = {
|
||||||
|
"email": "test@example.com",
|
||||||
|
"name": "Test User",
|
||||||
|
"age": 30,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = api_client.post('/api/users', json=valid_user)
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
# Response should also match schema
|
||||||
|
validate(instance=response.json(), schema=USER_SCHEMA)
|
||||||
|
|
||||||
|
def test_invalid_request_rejected(self, api_client):
|
||||||
|
"""Invalid requests should be rejected."""
|
||||||
|
invalid_user = {
|
||||||
|
"email": "not-an-email",
|
||||||
|
"age": -5, # Invalid age
|
||||||
|
}
|
||||||
|
|
||||||
|
response = api_client.post('/api/users', json=invalid_user)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
def test_order_response_schema(self, api_client):
|
||||||
|
"""Validate order endpoint response."""
|
||||||
|
response = api_client.get('/api/orders/order-123')
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
validate(instance=response.json(), schema=ORDER_SCHEMA)
|
||||||
|
|
||||||
|
def test_order_items_array_validation(self, api_client):
|
||||||
|
"""Validate nested array schema."""
|
||||||
|
order_data = {
|
||||||
|
"userId": "user-123",
|
||||||
|
"items": [
|
||||||
|
{"productId": "prod-1", "quantity": 2, "price": 29.99},
|
||||||
|
{"productId": "prod-2", "quantity": 1, "price": 49.99},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
response = api_client.post('/api/orders', json=order_data)
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
result = response.json()
|
||||||
|
validate(instance=result, schema=ORDER_SCHEMA)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. **REST Assured for Java**
|
||||||
|
|
||||||
|
```java
|
||||||
|
// ContractTest.java
|
||||||
|
import io.restassured.RestAssured;
|
||||||
|
import io.restassured.module.jsv.JsonSchemaValidator;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import static io.restassured.RestAssured.*;
|
||||||
|
import static org.hamcrest.Matchers.*;
|
||||||
|
|
||||||
|
public class UserAPIContractTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getUserShouldMatchSchema() {
|
||||||
|
given()
|
||||||
|
.pathParam("id", "123")
|
||||||
|
.when()
|
||||||
|
.get("/api/users/{id}")
|
||||||
|
.then()
|
||||||
|
.statusCode(200)
|
||||||
|
.body(JsonSchemaValidator.matchesJsonSchemaInClasspath("schemas/user-schema.json"))
|
||||||
|
.body("id", notNullValue())
|
||||||
|
.body("email", matchesPattern("^[\\w-\\.]+@([\\w-]+\\.)+[\\w-]{2,4}$"))
|
||||||
|
.body("age", greaterThanOrEqualTo(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void createUserShouldValidateRequest() {
|
||||||
|
String userJson = """
|
||||||
|
{
|
||||||
|
"email": "test@example.com",
|
||||||
|
"name": "Test User",
|
||||||
|
"age": 30
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
given()
|
||||||
|
.contentType("application/json")
|
||||||
|
.body(userJson)
|
||||||
|
.when()
|
||||||
|
.post("/api/users")
|
||||||
|
.then()
|
||||||
|
.statusCode(201)
|
||||||
|
.body("id", notNullValue())
|
||||||
|
.body("email", equalTo("test@example.com"))
|
||||||
|
.body("createdAt", matchesPattern("\\d{4}-\\d{2}-\\d{2}T.*"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getUserOrdersShouldReturnArray() {
|
||||||
|
given()
|
||||||
|
.pathParam("id", "123")
|
||||||
|
.queryParam("limit", 10)
|
||||||
|
.when()
|
||||||
|
.get("/api/users/{id}/orders")
|
||||||
|
.then()
|
||||||
|
.statusCode(200)
|
||||||
|
.body("orders", isA(java.util.List.class))
|
||||||
|
.body("orders[0].id", notNullValue())
|
||||||
|
.body("orders[0].status", isIn(Arrays.asList(
|
||||||
|
"pending", "paid", "shipped", "delivered", "cancelled"
|
||||||
|
)))
|
||||||
|
.body("total", greaterThanOrEqualTo(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void invalidRequestShouldReturn400() {
|
||||||
|
String invalidUser = """
|
||||||
|
{
|
||||||
|
"email": "not-an-email",
|
||||||
|
"age": -5
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
given()
|
||||||
|
.contentType("application/json")
|
||||||
|
.body(invalidUser)
|
||||||
|
.when()
|
||||||
|
.post("/api/users")
|
||||||
|
.then()
|
||||||
|
.statusCode(400)
|
||||||
|
.body("error", notNullValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. **Contract Testing with Postman**
|
||||||
|
|
||||||
|
```json
|
||||||
|
// postman-collection.json
|
||||||
|
{
|
||||||
|
"info": {
|
||||||
|
"name": "User API Contract Tests"
|
||||||
|
},
|
||||||
|
"item": [
|
||||||
|
{
|
||||||
|
"name": "Get User",
|
||||||
|
"request": {
|
||||||
|
"method": "GET",
|
||||||
|
"url": "{{baseUrl}}/users/{{userId}}"
|
||||||
|
},
|
||||||
|
"test": "
|
||||||
|
pm.test('Response status is 200', () => {
|
||||||
|
pm.response.to.have.status(200);
|
||||||
|
});
|
||||||
|
|
||||||
|
pm.test('Response matches schema', () => {
|
||||||
|
const schema = {
|
||||||
|
type: 'object',
|
||||||
|
required: ['id', 'email', 'name'],
|
||||||
|
properties: {
|
||||||
|
id: { type: 'string' },
|
||||||
|
email: { type: 'string', format: 'email' },
|
||||||
|
name: { type: 'string' },
|
||||||
|
age: { type: 'integer' }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pm.response.to.have.jsonSchema(schema);
|
||||||
|
});
|
||||||
|
|
||||||
|
pm.test('Email format is valid', () => {
|
||||||
|
const data = pm.response.json();
|
||||||
|
pm.expect(data.email).to.match(/^[\\w-\\.]+@([\\w-]+\\.)+[\\w-]{2,4}$/);
|
||||||
|
});
|
||||||
|
"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6. **Pact Broker Integration**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# .github/workflows/contract-tests.yml
|
||||||
|
name: Contract Tests
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
consumer-tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
|
||||||
|
- run: npm ci
|
||||||
|
- run: npm run test:pact
|
||||||
|
|
||||||
|
- name: Publish Pacts
|
||||||
|
run: |
|
||||||
|
npx pact-broker publish ./pacts \
|
||||||
|
--consumer-app-version=${{ github.sha }} \
|
||||||
|
--broker-base-url=${{ secrets.PACT_BROKER_URL }} \
|
||||||
|
--broker-token=${{ secrets.PACT_BROKER_TOKEN }}
|
||||||
|
|
||||||
|
provider-tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: consumer-tests
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
|
||||||
|
- run: npm ci
|
||||||
|
- run: npm run test:pact:provider
|
||||||
|
|
||||||
|
- name: Can I Deploy?
|
||||||
|
run: |
|
||||||
|
npx pact-broker can-i-deploy \
|
||||||
|
--pacticipant=UserService \
|
||||||
|
--version=${{ github.sha }} \
|
||||||
|
--to-environment=production \
|
||||||
|
--broker-base-url=${{ secrets.PACT_BROKER_URL }} \
|
||||||
|
--broker-token=${{ secrets.PACT_BROKER_TOKEN }}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### ✅ DO
|
||||||
|
- Test contracts from consumer perspective
|
||||||
|
- Use matchers for flexible matching
|
||||||
|
- Validate schema structure, not specific values
|
||||||
|
- Version your contracts
|
||||||
|
- Test error responses
|
||||||
|
- Use Pact broker for contract sharing
|
||||||
|
- Run contract tests in CI
|
||||||
|
- Test backward compatibility
|
||||||
|
|
||||||
|
### ❌ DON'T
|
||||||
|
- Test business logic in contract tests
|
||||||
|
- Hard-code specific values in contracts
|
||||||
|
- Skip error scenarios
|
||||||
|
- Test UI in contract tests
|
||||||
|
- Ignore contract versioning
|
||||||
|
- Deploy without contract verification
|
||||||
|
- Test implementation details
|
||||||
|
- Mock contract tests
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
- **Pact**: Consumer-driven contracts (multiple languages)
|
||||||
|
- **Spring Cloud Contract**: JVM contract testing
|
||||||
|
- **OpenAPI/Swagger**: API specification and validation
|
||||||
|
- **Postman**: API contract testing
|
||||||
|
- **REST Assured**: Java API testing
|
||||||
|
- **Dredd**: OpenAPI/API Blueprint testing
|
||||||
|
- **Spectral**: OpenAPI linting
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
See also: integration-testing, api-versioning-strategy, continuous-testing for comprehensive API testing strategies.
|
||||||
659
.agents/skills/api-security-hardening/SKILL.md
Normal file
659
.agents/skills/api-security-hardening/SKILL.md
Normal file
@@ -0,0 +1,659 @@
|
|||||||
|
---
|
||||||
|
name: api-security-hardening
|
||||||
|
description: Secure REST APIs with authentication, rate limiting, CORS, input validation, and security middleware. Use when building or hardening API endpoints against common attacks.
|
||||||
|
---
|
||||||
|
|
||||||
|
# API Security Hardening
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Implement comprehensive API security measures including authentication, authorization, rate limiting, input validation, and attack prevention to protect against common vulnerabilities.
|
||||||
|
|
||||||
|
## When to Use
|
||||||
|
|
||||||
|
- New API development
|
||||||
|
- Security audit remediation
|
||||||
|
- Production API hardening
|
||||||
|
- Compliance requirements
|
||||||
|
- High-traffic API protection
|
||||||
|
- Public API exposure
|
||||||
|
|
||||||
|
## Implementation Examples
|
||||||
|
|
||||||
|
### 1. **Node.js/Express API Security**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// secure-api.js - Comprehensive API security
|
||||||
|
const express = require('express');
|
||||||
|
const helmet = require('helmet');
|
||||||
|
const rateLimit = require('express-rate-limit');
|
||||||
|
const mongoSanitize = require('express-mongo-sanitize');
|
||||||
|
const xss = require('xss-clean');
|
||||||
|
const hpp = require('hpp');
|
||||||
|
const cors = require('cors');
|
||||||
|
const jwt = require('jsonwebtoken');
|
||||||
|
const validator = require('validator');
|
||||||
|
|
||||||
|
class SecureAPIServer {
|
||||||
|
constructor() {
|
||||||
|
this.app = express();
|
||||||
|
this.setupSecurityMiddleware();
|
||||||
|
this.setupRoutes();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupSecurityMiddleware() {
|
||||||
|
// 1. Helmet - Set security headers
|
||||||
|
this.app.use(helmet({
|
||||||
|
contentSecurityPolicy: {
|
||||||
|
directives: {
|
||||||
|
defaultSrc: ["'self'"],
|
||||||
|
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||||
|
scriptSrc: ["'self'"],
|
||||||
|
imgSrc: ["'self'", "data:", "https:"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
hsts: {
|
||||||
|
maxAge: 31536000,
|
||||||
|
includeSubDomains: true,
|
||||||
|
preload: true
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
// 2. CORS configuration
|
||||||
|
const corsOptions = {
|
||||||
|
origin: (origin, callback) => {
|
||||||
|
const whitelist = [
|
||||||
|
'https://example.com',
|
||||||
|
'https://app.example.com'
|
||||||
|
];
|
||||||
|
|
||||||
|
if (!origin || whitelist.includes(origin)) {
|
||||||
|
callback(null, true);
|
||||||
|
} else {
|
||||||
|
callback(new Error('Not allowed by CORS'));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
credentials: true,
|
||||||
|
optionsSuccessStatus: 200,
|
||||||
|
methods: ['GET', 'POST', 'PUT', 'DELETE'],
|
||||||
|
allowedHeaders: ['Content-Type', 'Authorization']
|
||||||
|
};
|
||||||
|
|
||||||
|
this.app.use(cors(corsOptions));
|
||||||
|
|
||||||
|
// 3. Rate limiting
|
||||||
|
const generalLimiter = rateLimit({
|
||||||
|
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||||
|
max: 100, // limit each IP to 100 requests per windowMs
|
||||||
|
message: 'Too many requests from this IP',
|
||||||
|
standardHeaders: true,
|
||||||
|
legacyHeaders: false,
|
||||||
|
handler: (req, res) => {
|
||||||
|
res.status(429).json({
|
||||||
|
error: 'rate_limit_exceeded',
|
||||||
|
message: 'Too many requests, please try again later',
|
||||||
|
retryAfter: req.rateLimit.resetTime
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const authLimiter = rateLimit({
|
||||||
|
windowMs: 15 * 60 * 1000,
|
||||||
|
max: 5, // Stricter limit for auth endpoints
|
||||||
|
skipSuccessfulRequests: true
|
||||||
|
});
|
||||||
|
|
||||||
|
this.app.use('/api/', generalLimiter);
|
||||||
|
this.app.use('/api/auth/', authLimiter);
|
||||||
|
|
||||||
|
// 4. Body parsing with size limits
|
||||||
|
this.app.use(express.json({ limit: '10kb' }));
|
||||||
|
this.app.use(express.urlencoded({ extended: true, limit: '10kb' }));
|
||||||
|
|
||||||
|
// 5. NoSQL injection prevention
|
||||||
|
this.app.use(mongoSanitize());
|
||||||
|
|
||||||
|
// 6. XSS protection
|
||||||
|
this.app.use(xss());
|
||||||
|
|
||||||
|
// 7. HTTP Parameter Pollution prevention
|
||||||
|
this.app.use(hpp());
|
||||||
|
|
||||||
|
// 8. Request ID for tracking
|
||||||
|
this.app.use((req, res, next) => {
|
||||||
|
req.id = require('crypto').randomUUID();
|
||||||
|
res.setHeader('X-Request-ID', req.id);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
|
||||||
|
// 9. Security logging
|
||||||
|
this.app.use(this.securityLogger());
|
||||||
|
}
|
||||||
|
|
||||||
|
securityLogger() {
|
||||||
|
return (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
res.on('finish', () => {
|
||||||
|
const duration = Date.now() - startTime;
|
||||||
|
|
||||||
|
const logEntry = {
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
requestId: req.id,
|
||||||
|
method: req.method,
|
||||||
|
path: req.path,
|
||||||
|
statusCode: res.statusCode,
|
||||||
|
duration,
|
||||||
|
ip: req.ip,
|
||||||
|
userAgent: req.get('user-agent')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log suspicious activity
|
||||||
|
if (res.statusCode === 401 || res.statusCode === 403) {
|
||||||
|
console.warn('Security event:', logEntry);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res.statusCode >= 500) {
|
||||||
|
console.error('Server error:', logEntry);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// JWT authentication middleware
|
||||||
|
authenticateJWT() {
|
||||||
|
return (req, res, next) => {
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
|
||||||
|
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'unauthorized',
|
||||||
|
message: 'Missing or invalid authorization header'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = authHeader.substring(7);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const decoded = jwt.verify(token, process.env.JWT_SECRET, {
|
||||||
|
algorithms: ['HS256'],
|
||||||
|
issuer: 'api.example.com',
|
||||||
|
audience: 'api.example.com'
|
||||||
|
});
|
||||||
|
|
||||||
|
req.user = decoded;
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
if (error.name === 'TokenExpiredError') {
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'token_expired',
|
||||||
|
message: 'Token has expired'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'invalid_token',
|
||||||
|
message: 'Invalid token'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Input validation middleware
|
||||||
|
validateInput(schema) {
|
||||||
|
return (req, res, next) => {
|
||||||
|
const errors = [];
|
||||||
|
|
||||||
|
// Validate request body
|
||||||
|
if (schema.body) {
|
||||||
|
for (const [field, rules] of Object.entries(schema.body)) {
|
||||||
|
const value = req.body[field];
|
||||||
|
|
||||||
|
if (rules.required && !value) {
|
||||||
|
errors.push(`${field} is required`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value) {
|
||||||
|
// Type validation
|
||||||
|
if (rules.type === 'email' && !validator.isEmail(value)) {
|
||||||
|
errors.push(`${field} must be a valid email`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rules.type === 'uuid' && !validator.isUUID(value)) {
|
||||||
|
errors.push(`${field} must be a valid UUID`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rules.type === 'url' && !validator.isURL(value)) {
|
||||||
|
errors.push(`${field} must be a valid URL`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Length validation
|
||||||
|
if (rules.minLength && value.length < rules.minLength) {
|
||||||
|
errors.push(`${field} must be at least ${rules.minLength} characters`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rules.maxLength && value.length > rules.maxLength) {
|
||||||
|
errors.push(`${field} must be at most ${rules.maxLength} characters`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pattern validation
|
||||||
|
if (rules.pattern && !rules.pattern.test(value)) {
|
||||||
|
errors.push(`${field} format is invalid`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors.length > 0) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'validation_error',
|
||||||
|
message: 'Input validation failed',
|
||||||
|
details: errors
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authorization middleware
|
||||||
|
authorize(...roles) {
|
||||||
|
return (req, res, next) => {
|
||||||
|
if (!req.user) {
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'unauthorized',
|
||||||
|
message: 'Authentication required'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (roles.length > 0 && !roles.includes(req.user.role)) {
|
||||||
|
return res.status(403).json({
|
||||||
|
error: 'forbidden',
|
||||||
|
message: 'Insufficient permissions'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
setupRoutes() {
|
||||||
|
// Public endpoint
|
||||||
|
this.app.get('/api/health', (req, res) => {
|
||||||
|
res.json({ status: 'healthy' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Protected endpoint with validation
|
||||||
|
this.app.post('/api/users',
|
||||||
|
this.authenticateJWT(),
|
||||||
|
this.authorize('admin'),
|
||||||
|
this.validateInput({
|
||||||
|
body: {
|
||||||
|
email: { required: true, type: 'email' },
|
||||||
|
name: { required: true, minLength: 2, maxLength: 100 },
|
||||||
|
password: { required: true, minLength: 8 }
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Sanitized and validated input
|
||||||
|
const { email, name, password } = req.body;
|
||||||
|
|
||||||
|
// Process request
|
||||||
|
res.status(201).json({
|
||||||
|
message: 'User created successfully',
|
||||||
|
userId: '123'
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'internal_error',
|
||||||
|
message: 'An error occurred'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
this.app.use((err, req, res, next) => {
|
||||||
|
console.error('Unhandled error:', err);
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'internal_error',
|
||||||
|
message: 'An unexpected error occurred',
|
||||||
|
requestId: req.id
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
start(port = 3000) {
|
||||||
|
this.app.listen(port, () => {
|
||||||
|
console.log(`Secure API server running on port ${port}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Usage
|
||||||
|
const server = new SecureAPIServer();
|
||||||
|
server.start(3000);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. **Python FastAPI Security**
|
||||||
|
|
||||||
|
```python
|
||||||
|
# secure_api.py
|
||||||
|
from fastapi import FastAPI, HTTPException, Depends, Security, status
|
||||||
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.middleware.trustedhost import TrustedHostMiddleware
|
||||||
|
from slowapi import Limiter, _rate_limit_exceeded_handler
|
||||||
|
from slowapi.util import get_remote_address
|
||||||
|
from slowapi.errors import RateLimitExceeded
|
||||||
|
from pydantic import BaseModel, EmailStr, validator, Field
|
||||||
|
import jwt
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import re
|
||||||
|
from typing import Optional, List
|
||||||
|
import secrets
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
security = HTTPBearer()
|
||||||
|
limiter = Limiter(key_func=get_remote_address)
|
||||||
|
|
||||||
|
# Rate limiting
|
||||||
|
app.state.limiter = limiter
|
||||||
|
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||||
|
|
||||||
|
# CORS configuration
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=[
|
||||||
|
"https://example.com",
|
||||||
|
"https://app.example.com"
|
||||||
|
],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["GET", "POST", "PUT", "DELETE"],
|
||||||
|
allow_headers=["Content-Type", "Authorization"],
|
||||||
|
max_age=3600
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trusted hosts
|
||||||
|
app.add_middleware(
|
||||||
|
TrustedHostMiddleware,
|
||||||
|
allowed_hosts=["example.com", "*.example.com"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Security headers middleware
|
||||||
|
@app.middleware("http")
|
||||||
|
async def add_security_headers(request, call_next):
|
||||||
|
response = await call_next(request)
|
||||||
|
|
||||||
|
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||||
|
response.headers["X-Frame-Options"] = "DENY"
|
||||||
|
response.headers["X-XSS-Protection"] = "1; mode=block"
|
||||||
|
response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
|
||||||
|
response.headers["Content-Security-Policy"] = "default-src 'self'"
|
||||||
|
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
|
||||||
|
response.headers["Permissions-Policy"] = "geolocation=(), microphone=(), camera=()"
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
# Input validation models
|
||||||
|
class CreateUserRequest(BaseModel):
|
||||||
|
email: EmailStr
|
||||||
|
name: str = Field(..., min_length=2, max_length=100)
|
||||||
|
password: str = Field(..., min_length=8)
|
||||||
|
|
||||||
|
@validator('password')
|
||||||
|
def validate_password(cls, v):
|
||||||
|
if not re.search(r'[A-Z]', v):
|
||||||
|
raise ValueError('Password must contain uppercase letter')
|
||||||
|
if not re.search(r'[a-z]', v):
|
||||||
|
raise ValueError('Password must contain lowercase letter')
|
||||||
|
if not re.search(r'\d', v):
|
||||||
|
raise ValueError('Password must contain digit')
|
||||||
|
if not re.search(r'[!@#$%^&*]', v):
|
||||||
|
raise ValueError('Password must contain special character')
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator('name')
|
||||||
|
def validate_name(cls, v):
|
||||||
|
# Prevent XSS in name field
|
||||||
|
if re.search(r'[<>]', v):
|
||||||
|
raise ValueError('Name contains invalid characters')
|
||||||
|
return v
|
||||||
|
|
||||||
|
class APIKeyRequest(BaseModel):
|
||||||
|
name: str = Field(..., max_length=100)
|
||||||
|
expires_in_days: int = Field(30, ge=1, le=365)
|
||||||
|
|
||||||
|
# JWT token verification
|
||||||
|
def verify_token(credentials: HTTPAuthorizationCredentials = Security(security)):
|
||||||
|
try:
|
||||||
|
token = credentials.credentials
|
||||||
|
|
||||||
|
payload = jwt.decode(
|
||||||
|
token,
|
||||||
|
"your-secret-key",
|
||||||
|
algorithms=["HS256"],
|
||||||
|
audience="api.example.com",
|
||||||
|
issuer="api.example.com"
|
||||||
|
)
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Token has expired"
|
||||||
|
)
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid token"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Role-based authorization
|
||||||
|
def require_role(required_roles: List[str]):
|
||||||
|
def role_checker(token_payload: dict = Depends(verify_token)):
|
||||||
|
user_role = token_payload.get('role')
|
||||||
|
|
||||||
|
if user_role not in required_roles:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Insufficient permissions"
|
||||||
|
)
|
||||||
|
|
||||||
|
return token_payload
|
||||||
|
|
||||||
|
return role_checker
|
||||||
|
|
||||||
|
# API key authentication
|
||||||
|
def verify_api_key(api_key: str):
|
||||||
|
# Constant-time comparison to prevent timing attacks
|
||||||
|
if not secrets.compare_digest(api_key, "expected-api-key"):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid API key"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Endpoints
|
||||||
|
@app.get("/api/health")
|
||||||
|
@limiter.limit("100/minute")
|
||||||
|
async def health_check():
|
||||||
|
return {"status": "healthy"}
|
||||||
|
|
||||||
|
@app.post("/api/users")
|
||||||
|
@limiter.limit("10/minute")
|
||||||
|
async def create_user(
|
||||||
|
user: CreateUserRequest,
|
||||||
|
token_payload: dict = Depends(require_role(["admin"]))
|
||||||
|
):
|
||||||
|
"""Create new user (admin only)"""
|
||||||
|
|
||||||
|
# Hash password before storing
|
||||||
|
# hashed_password = bcrypt.hashpw(user.password.encode(), bcrypt.gensalt())
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "User created successfully",
|
||||||
|
"user_id": "123"
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.post("/api/keys")
|
||||||
|
@limiter.limit("5/hour")
|
||||||
|
async def create_api_key(
|
||||||
|
request: APIKeyRequest,
|
||||||
|
token_payload: dict = Depends(verify_token)
|
||||||
|
):
|
||||||
|
"""Generate API key"""
|
||||||
|
|
||||||
|
# Generate secure random API key
|
||||||
|
api_key = secrets.token_urlsafe(32)
|
||||||
|
|
||||||
|
expires_at = datetime.now() + timedelta(days=request.expires_in_days)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"api_key": api_key,
|
||||||
|
"expires_at": expires_at.isoformat(),
|
||||||
|
"name": request.name
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.get("/api/protected")
|
||||||
|
async def protected_endpoint(token_payload: dict = Depends(verify_token)):
|
||||||
|
return {
|
||||||
|
"message": "Access granted",
|
||||||
|
"user_id": token_payload.get("sub")
|
||||||
|
}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000, ssl_certfile="cert.pem", ssl_keyfile="key.pem")
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. **API Gateway Security Configuration**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# nginx-api-gateway.conf
|
||||||
|
# Nginx API Gateway with security hardening
|
||||||
|
|
||||||
|
http {
|
||||||
|
# Security headers
|
||||||
|
add_header X-Frame-Options "DENY" always;
|
||||||
|
add_header X-Content-Type-Options "nosniff" always;
|
||||||
|
add_header X-XSS-Protection "1; mode=block" always;
|
||||||
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||||
|
add_header Content-Security-Policy "default-src 'self'" always;
|
||||||
|
|
||||||
|
# Rate limiting zones
|
||||||
|
limit_req_zone $binary_remote_addr zone=api_limit:10m rate=10r/s;
|
||||||
|
limit_req_zone $binary_remote_addr zone=auth_limit:10m rate=1r/s;
|
||||||
|
limit_conn_zone $binary_remote_addr zone=conn_limit:10m;
|
||||||
|
|
||||||
|
# Request body size limit
|
||||||
|
client_max_body_size 10M;
|
||||||
|
client_body_buffer_size 128k;
|
||||||
|
|
||||||
|
# Timeout settings
|
||||||
|
client_body_timeout 12;
|
||||||
|
client_header_timeout 12;
|
||||||
|
send_timeout 10;
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 443 ssl http2;
|
||||||
|
server_name api.example.com;
|
||||||
|
|
||||||
|
# SSL configuration
|
||||||
|
ssl_certificate /etc/ssl/certs/api.example.com.crt;
|
||||||
|
ssl_certificate_key /etc/ssl/private/api.example.com.key;
|
||||||
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
|
ssl_ciphers HIGH:!aNULL:!MD5;
|
||||||
|
ssl_prefer_server_ciphers on;
|
||||||
|
ssl_session_cache shared:SSL:10m;
|
||||||
|
ssl_session_timeout 10m;
|
||||||
|
|
||||||
|
# API endpoints
|
||||||
|
location /api/ {
|
||||||
|
# Rate limiting
|
||||||
|
limit_req zone=api_limit burst=20 nodelay;
|
||||||
|
limit_conn conn_limit 10;
|
||||||
|
|
||||||
|
# CORS headers
|
||||||
|
add_header Access-Control-Allow-Origin "https://app.example.com" always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE" always;
|
||||||
|
add_header Access-Control-Allow-Headers "Authorization, Content-Type" always;
|
||||||
|
|
||||||
|
# Block common exploits
|
||||||
|
if ($request_method !~ ^(GET|POST|PUT|DELETE|HEAD)$ ) {
|
||||||
|
return 444;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Proxy to backend
|
||||||
|
proxy_pass http://backend:3000;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|
||||||
|
# Timeouts
|
||||||
|
proxy_connect_timeout 60s;
|
||||||
|
proxy_send_timeout 60s;
|
||||||
|
proxy_read_timeout 60s;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Auth endpoints with stricter limits
|
||||||
|
location /api/auth/ {
|
||||||
|
limit_req zone=auth_limit burst=5 nodelay;
|
||||||
|
|
||||||
|
proxy_pass http://backend:3000;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Block access to sensitive files
|
||||||
|
location ~ /\. {
|
||||||
|
deny all;
|
||||||
|
return 404;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### ✅ DO
|
||||||
|
- Use HTTPS everywhere
|
||||||
|
- Implement rate limiting
|
||||||
|
- Validate all inputs
|
||||||
|
- Use security headers
|
||||||
|
- Log security events
|
||||||
|
- Implement CORS properly
|
||||||
|
- Use strong authentication
|
||||||
|
- Version your APIs
|
||||||
|
|
||||||
|
### ❌ DON'T
|
||||||
|
- Expose stack traces
|
||||||
|
- Return detailed errors
|
||||||
|
- Trust user input
|
||||||
|
- Use HTTP for APIs
|
||||||
|
- Skip input validation
|
||||||
|
- Ignore rate limiting
|
||||||
|
|
||||||
|
## Security Checklist
|
||||||
|
|
||||||
|
- [ ] HTTPS enforced
|
||||||
|
- [ ] Authentication required
|
||||||
|
- [ ] Authorization implemented
|
||||||
|
- [ ] Rate limiting active
|
||||||
|
- [ ] Input validation
|
||||||
|
- [ ] CORS configured
|
||||||
|
- [ ] Security headers set
|
||||||
|
- [ ] Error handling secure
|
||||||
|
- [ ] Logging enabled
|
||||||
|
- [ ] API versioning
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
- [OWASP API Security Top 10](https://owasp.org/www-project-api-security/)
|
||||||
|
- [API Security Best Practices](https://github.com/shieldfy/API-Security-Checklist)
|
||||||
|
- [JWT Best Practices](https://tools.ietf.org/html/rfc8725)
|
||||||
384
.agents/skills/database-migration-management/SKILL.md
Normal file
384
.agents/skills/database-migration-management/SKILL.md
Normal file
@@ -0,0 +1,384 @@
|
|||||||
|
---
|
||||||
|
name: database-migration-management
|
||||||
|
description: Manage database migrations and schema versioning. Use when planning migrations, version control, rollback strategies, or data transformations in PostgreSQL and MySQL.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Database Migration Management
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Implement robust database migration systems with version control, rollback capabilities, and data transformation strategies. Includes migration frameworks and production deployment patterns.
|
||||||
|
|
||||||
|
## When to Use
|
||||||
|
|
||||||
|
- Schema versioning and evolution
|
||||||
|
- Data transformations and cleanup
|
||||||
|
- Adding/removing tables and columns
|
||||||
|
- Index creation and optimization
|
||||||
|
- Migration testing and validation
|
||||||
|
- Rollback planning and execution
|
||||||
|
- Multi-environment deployments
|
||||||
|
|
||||||
|
## Migration Framework Setup
|
||||||
|
|
||||||
|
### PostgreSQL - Schema Versioning
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Create migrations tracking table
|
||||||
|
CREATE TABLE schema_migrations (
|
||||||
|
version BIGINT PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
duration_ms INTEGER,
|
||||||
|
checksum VARCHAR(64)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create migration log table
|
||||||
|
CREATE TABLE migration_logs (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
version BIGINT NOT NULL,
|
||||||
|
status VARCHAR(20) NOT NULL,
|
||||||
|
error_message TEXT,
|
||||||
|
rolled_back_at TIMESTAMP,
|
||||||
|
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Function to record migration
|
||||||
|
CREATE OR REPLACE FUNCTION record_migration(
|
||||||
|
p_version BIGINT,
|
||||||
|
p_name VARCHAR,
|
||||||
|
p_duration_ms INTEGER
|
||||||
|
) RETURNS void AS $$
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO schema_migrations (version, name, duration_ms)
|
||||||
|
VALUES (p_version, p_name, p_duration_ms)
|
||||||
|
ON CONFLICT (version) DO UPDATE
|
||||||
|
SET executed_at = CURRENT_TIMESTAMP;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
```
|
||||||
|
|
||||||
|
### MySQL - Migration Tracking
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Create migrations table for MySQL
|
||||||
|
CREATE TABLE schema_migrations (
|
||||||
|
version BIGINT PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
duration_ms INT,
|
||||||
|
checksum VARCHAR(64)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
|
||||||
|
-- Migration status table
|
||||||
|
CREATE TABLE migration_status (
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
version BIGINT NOT NULL,
|
||||||
|
status ENUM('pending', 'completed', 'failed', 'rolled_back'),
|
||||||
|
error_message TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Migration Patterns
|
||||||
|
|
||||||
|
### Adding Columns
|
||||||
|
|
||||||
|
**PostgreSQL - Safe Column Addition:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_001_add_phone_to_users.sql
|
||||||
|
|
||||||
|
-- Add column with default (non-blocking)
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN phone VARCHAR(20) DEFAULT '';
|
||||||
|
|
||||||
|
-- Add constraint after population
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD CONSTRAINT phone_format
|
||||||
|
CHECK (phone = '' OR phone ~ '^\+?[0-9\-\(\)]{10,}$');
|
||||||
|
|
||||||
|
-- Create index
|
||||||
|
CREATE INDEX CONCURRENTLY idx_users_phone ON users(phone);
|
||||||
|
|
||||||
|
-- Rollback:
|
||||||
|
-- DROP INDEX CONCURRENTLY idx_users_phone;
|
||||||
|
-- ALTER TABLE users DROP COLUMN phone;
|
||||||
|
```
|
||||||
|
|
||||||
|
**MySQL - Column Addition:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_001_add_phone_to_users.sql
|
||||||
|
|
||||||
|
-- Add column with ALTER
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN phone VARCHAR(20) DEFAULT '',
|
||||||
|
ADD INDEX idx_phone (phone);
|
||||||
|
|
||||||
|
-- Rollback:
|
||||||
|
-- ALTER TABLE users DROP COLUMN phone;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Renaming Columns
|
||||||
|
|
||||||
|
**PostgreSQL - Column Rename:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_002_rename_user_name_columns.sql
|
||||||
|
|
||||||
|
-- Rename columns
|
||||||
|
ALTER TABLE users RENAME COLUMN user_name TO full_name;
|
||||||
|
ALTER TABLE users RENAME COLUMN user_email TO email_address;
|
||||||
|
|
||||||
|
-- Update indexes
|
||||||
|
REINDEX TABLE users;
|
||||||
|
|
||||||
|
-- Rollback:
|
||||||
|
-- ALTER TABLE users RENAME COLUMN email_address TO user_email;
|
||||||
|
-- ALTER TABLE users RENAME COLUMN full_name TO user_name;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating Indexes Non-blocking
|
||||||
|
|
||||||
|
**PostgreSQL - Concurrent Index Creation:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_003_add_performance_indexes.sql
|
||||||
|
|
||||||
|
-- Create indexes without blocking writes
|
||||||
|
CREATE INDEX CONCURRENTLY idx_orders_user_created
|
||||||
|
ON orders(user_id, created_at DESC);
|
||||||
|
|
||||||
|
CREATE INDEX CONCURRENTLY idx_products_category_active
|
||||||
|
ON products(category_id)
|
||||||
|
WHERE active = true;
|
||||||
|
|
||||||
|
-- Verify index creation
|
||||||
|
SELECT schemaname, tablename, indexname, idx_scan
|
||||||
|
FROM pg_stat_user_indexes
|
||||||
|
WHERE indexname LIKE 'idx_%';
|
||||||
|
|
||||||
|
-- Rollback:
|
||||||
|
-- DROP INDEX CONCURRENTLY idx_orders_user_created;
|
||||||
|
-- DROP INDEX CONCURRENTLY idx_products_category_active;
|
||||||
|
```
|
||||||
|
|
||||||
|
**MySQL - Online Index Creation:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_003_add_performance_indexes.sql
|
||||||
|
|
||||||
|
-- Create indexes with ALGORITHM=INPLACE and LOCK=NONE
|
||||||
|
ALTER TABLE orders
|
||||||
|
ADD INDEX idx_user_created (user_id, created_at),
|
||||||
|
ALGORITHM=INPLACE, LOCK=NONE;
|
||||||
|
|
||||||
|
-- Monitor progress
|
||||||
|
SELECT * FROM INFORMATION_SCHEMA.PROCESSLIST
|
||||||
|
WHERE INFO LIKE 'ALTER TABLE%';
|
||||||
|
```
|
||||||
|
|
||||||
|
### Data Transformations
|
||||||
|
|
||||||
|
**PostgreSQL - Data Cleanup Migration:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_004_normalize_email_addresses.sql
|
||||||
|
|
||||||
|
-- Normalize existing email addresses
|
||||||
|
UPDATE users
|
||||||
|
SET email = LOWER(TRIM(email))
|
||||||
|
WHERE email != LOWER(TRIM(email));
|
||||||
|
|
||||||
|
-- Remove duplicates by keeping latest
|
||||||
|
DELETE FROM users
|
||||||
|
WHERE id NOT IN (
|
||||||
|
SELECT DISTINCT ON (LOWER(email)) id
|
||||||
|
FROM users
|
||||||
|
ORDER BY LOWER(email), created_at DESC
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Rollback: Restore from backup (no safe rollback for data changes)
|
||||||
|
```
|
||||||
|
|
||||||
|
**MySQL - Bulk Data Update:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_004_update_product_categories.sql
|
||||||
|
|
||||||
|
-- Update multiple rows with JOIN
|
||||||
|
UPDATE products p
|
||||||
|
JOIN category_mapping cm ON p.old_category = cm.old_name
|
||||||
|
SET p.category_id = cm.new_category_id
|
||||||
|
WHERE p.old_category IS NOT NULL;
|
||||||
|
|
||||||
|
-- Verify update
|
||||||
|
SELECT COUNT(*) as updated_count
|
||||||
|
FROM products
|
||||||
|
WHERE category_id IS NOT NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Table Structure Changes
|
||||||
|
|
||||||
|
**PostgreSQL - Alter Table Migration:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_005_modify_order_columns.sql
|
||||||
|
|
||||||
|
-- Add new column
|
||||||
|
ALTER TABLE orders
|
||||||
|
ADD COLUMN status_updated_at TIMESTAMP;
|
||||||
|
|
||||||
|
-- Add constraint
|
||||||
|
ALTER TABLE orders
|
||||||
|
ADD CONSTRAINT valid_status
|
||||||
|
CHECK (status IN ('pending', 'processing', 'completed', 'cancelled'));
|
||||||
|
|
||||||
|
-- Set default for existing records
|
||||||
|
UPDATE orders
|
||||||
|
SET status_updated_at = updated_at
|
||||||
|
WHERE status_updated_at IS NULL;
|
||||||
|
|
||||||
|
-- Make column NOT NULL
|
||||||
|
ALTER TABLE orders
|
||||||
|
ALTER COLUMN status_updated_at SET NOT NULL;
|
||||||
|
|
||||||
|
-- Rollback:
|
||||||
|
-- ALTER TABLE orders DROP COLUMN status_updated_at;
|
||||||
|
-- ALTER TABLE orders DROP CONSTRAINT valid_status;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing Migrations
|
||||||
|
|
||||||
|
**PostgreSQL - Test in Transaction:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Test migration in transaction (will be rolled back)
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- Run migration statements
|
||||||
|
ALTER TABLE users ADD COLUMN test_column VARCHAR(255);
|
||||||
|
|
||||||
|
-- Validate data
|
||||||
|
SELECT COUNT(*) FROM users;
|
||||||
|
SELECT COUNT(DISTINCT email) FROM users;
|
||||||
|
|
||||||
|
-- Rollback if issues found
|
||||||
|
ROLLBACK;
|
||||||
|
|
||||||
|
-- Or commit if all good
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Validate Migration:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Check migration was applied
|
||||||
|
SELECT version, name, executed_at FROM schema_migrations
|
||||||
|
WHERE version = 20240115005;
|
||||||
|
|
||||||
|
-- Verify table structure
|
||||||
|
SELECT column_name, data_type, is_nullable
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_name = 'users'
|
||||||
|
ORDER BY ordinal_position;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rollback Strategies
|
||||||
|
|
||||||
|
**PostgreSQL - Bidirectional Migrations:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration file: 20240115_006_add_user_status.sql
|
||||||
|
|
||||||
|
-- ===== UP =====
|
||||||
|
CREATE TYPE user_status AS ENUM ('active', 'suspended', 'deleted');
|
||||||
|
ALTER TABLE users ADD COLUMN status user_status DEFAULT 'active';
|
||||||
|
|
||||||
|
-- ===== DOWN =====
|
||||||
|
-- ALTER TABLE users DROP COLUMN status;
|
||||||
|
-- DROP TYPE user_status;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rollback Execution:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Function to rollback to specific version
|
||||||
|
CREATE OR REPLACE FUNCTION rollback_to_version(p_target_version BIGINT)
|
||||||
|
RETURNS TABLE (version BIGINT, name VARCHAR, status VARCHAR) AS $$
|
||||||
|
BEGIN
|
||||||
|
-- Execute down migrations in reverse order
|
||||||
|
RETURN QUERY
|
||||||
|
SELECT m.version, m.name, 'rolled_back'::VARCHAR
|
||||||
|
FROM schema_migrations m
|
||||||
|
WHERE m.version > p_target_version
|
||||||
|
ORDER BY m.version DESC;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Production Deployment
|
||||||
|
|
||||||
|
**Safe Migration Checklist:**
|
||||||
|
|
||||||
|
- Test migration on production-like database
|
||||||
|
- Verify backup exists before migration
|
||||||
|
- Schedule during low-traffic window
|
||||||
|
- Monitor table locks and long-running queries
|
||||||
|
- Have rollback plan ready
|
||||||
|
- Test rollback procedure
|
||||||
|
- Document all changes
|
||||||
|
- Run in transaction when possible
|
||||||
|
- Verify data integrity after migration
|
||||||
|
- Update application code coordinated with migration
|
||||||
|
|
||||||
|
**PostgreSQL - Long Transaction Safety:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Use statement timeout to prevent hanging migrations
|
||||||
|
SET statement_timeout = '30min';
|
||||||
|
|
||||||
|
-- Use lock timeout to prevent deadlocks
|
||||||
|
SET lock_timeout = '5min';
|
||||||
|
|
||||||
|
-- Run migration with timeouts
|
||||||
|
ALTER TABLE large_table
|
||||||
|
ADD COLUMN new_column VARCHAR(255),
|
||||||
|
ALGORITHM='INPLACE';
|
||||||
|
```
|
||||||
|
|
||||||
|
## Migration Examples
|
||||||
|
|
||||||
|
**Combined Migration - Multiple Changes:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Migration: 20240115_007_refactor_user_tables.sql
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- 1. Create new column with data from old column
|
||||||
|
ALTER TABLE users ADD COLUMN full_name VARCHAR(255);
|
||||||
|
UPDATE users SET full_name = first_name || ' ' || last_name;
|
||||||
|
|
||||||
|
-- 2. Add indexes
|
||||||
|
CREATE INDEX idx_users_full_name ON users(full_name);
|
||||||
|
|
||||||
|
-- 3. Add new constraint
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD CONSTRAINT email_unique UNIQUE(email);
|
||||||
|
|
||||||
|
-- 4. Drop old columns (after verification)
|
||||||
|
-- ALTER TABLE users DROP COLUMN first_name;
|
||||||
|
-- ALTER TABLE users DROP COLUMN last_name;
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
- [Flyway - Java Migration Tool](https://flywaydb.org/)
|
||||||
|
- [Liquibase - Database Changelog](https://www.liquibase.org/)
|
||||||
|
- [Alembic - Python Migration](https://alembic.sqlalchemy.org/)
|
||||||
|
- [PostgreSQL ALTER TABLE](https://www.postgresql.org/docs/current/sql-altertable.html)
|
||||||
|
- [MySQL ALTER TABLE](https://dev.mysql.com/doc/refman/8.0/en/alter-table.html)
|
||||||
133
.agents/skills/find-skills/SKILL.md
Normal file
133
.agents/skills/find-skills/SKILL.md
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
---
|
||||||
|
name: find-skills
|
||||||
|
description: Helps users discover and install agent skills when they ask questions like "how do I do X", "find a skill for X", "is there a skill that can...", or express interest in extending capabilities. This skill should be used when the user is looking for functionality that might exist as an installable skill.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Find Skills
|
||||||
|
|
||||||
|
This skill helps you discover and install skills from the open agent skills ecosystem.
|
||||||
|
|
||||||
|
## When to Use This Skill
|
||||||
|
|
||||||
|
Use this skill when the user:
|
||||||
|
|
||||||
|
- Asks "how do I do X" where X might be a common task with an existing skill
|
||||||
|
- Says "find a skill for X" or "is there a skill for X"
|
||||||
|
- Asks "can you do X" where X is a specialized capability
|
||||||
|
- Expresses interest in extending agent capabilities
|
||||||
|
- Wants to search for tools, templates, or workflows
|
||||||
|
- Mentions they wish they had help with a specific domain (design, testing, deployment, etc.)
|
||||||
|
|
||||||
|
## What is the Skills CLI?
|
||||||
|
|
||||||
|
The Skills CLI (`npx skills`) is the package manager for the open agent skills ecosystem. Skills are modular packages that extend agent capabilities with specialized knowledge, workflows, and tools.
|
||||||
|
|
||||||
|
**Key commands:**
|
||||||
|
|
||||||
|
- `npx skills find [query]` - Search for skills interactively or by keyword
|
||||||
|
- `npx skills add <package>` - Install a skill from GitHub or other sources
|
||||||
|
- `npx skills check` - Check for skill updates
|
||||||
|
- `npx skills update` - Update all installed skills
|
||||||
|
|
||||||
|
**Browse skills at:** https://skills.sh/
|
||||||
|
|
||||||
|
## How to Help Users Find Skills
|
||||||
|
|
||||||
|
### Step 1: Understand What They Need
|
||||||
|
|
||||||
|
When a user asks for help with something, identify:
|
||||||
|
|
||||||
|
1. The domain (e.g., React, testing, design, deployment)
|
||||||
|
2. The specific task (e.g., writing tests, creating animations, reviewing PRs)
|
||||||
|
3. Whether this is a common enough task that a skill likely exists
|
||||||
|
|
||||||
|
### Step 2: Search for Skills
|
||||||
|
|
||||||
|
Run the find command with a relevant query:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx skills find [query]
|
||||||
|
```
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
- User asks "how do I make my React app faster?" → `npx skills find react performance`
|
||||||
|
- User asks "can you help me with PR reviews?" → `npx skills find pr review`
|
||||||
|
- User asks "I need to create a changelog" → `npx skills find changelog`
|
||||||
|
|
||||||
|
The command will return results like:
|
||||||
|
|
||||||
|
```
|
||||||
|
Install with npx skills add <owner/repo@skill>
|
||||||
|
|
||||||
|
vercel-labs/agent-skills@vercel-react-best-practices
|
||||||
|
└ https://skills.sh/vercel-labs/agent-skills/vercel-react-best-practices
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 3: Present Options to the User
|
||||||
|
|
||||||
|
When you find relevant skills, present them to the user with:
|
||||||
|
|
||||||
|
1. The skill name and what it does
|
||||||
|
2. The install command they can run
|
||||||
|
3. A link to learn more at skills.sh
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
|
||||||
|
```
|
||||||
|
I found a skill that might help! The "vercel-react-best-practices" skill provides
|
||||||
|
React and Next.js performance optimization guidelines from Vercel Engineering.
|
||||||
|
|
||||||
|
To install it:
|
||||||
|
npx skills add vercel-labs/agent-skills@vercel-react-best-practices
|
||||||
|
|
||||||
|
Learn more: https://skills.sh/vercel-labs/agent-skills/vercel-react-best-practices
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 4: Offer to Install
|
||||||
|
|
||||||
|
If the user wants to proceed, you can install the skill for them:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx skills add <owner/repo@skill> -g -y
|
||||||
|
```
|
||||||
|
|
||||||
|
The `-g` flag installs globally (user-level) and `-y` skips confirmation prompts.
|
||||||
|
|
||||||
|
## Common Skill Categories
|
||||||
|
|
||||||
|
When searching, consider these common categories:
|
||||||
|
|
||||||
|
| Category | Example Queries |
|
||||||
|
| --------------- | ---------------------------------------- |
|
||||||
|
| Web Development | react, nextjs, typescript, css, tailwind |
|
||||||
|
| Testing | testing, jest, playwright, e2e |
|
||||||
|
| DevOps | deploy, docker, kubernetes, ci-cd |
|
||||||
|
| Documentation | docs, readme, changelog, api-docs |
|
||||||
|
| Code Quality | review, lint, refactor, best-practices |
|
||||||
|
| Design | ui, ux, design-system, accessibility |
|
||||||
|
| Productivity | workflow, automation, git |
|
||||||
|
|
||||||
|
## Tips for Effective Searches
|
||||||
|
|
||||||
|
1. **Use specific keywords**: "react testing" is better than just "testing"
|
||||||
|
2. **Try alternative terms**: If "deploy" doesn't work, try "deployment" or "ci-cd"
|
||||||
|
3. **Check popular sources**: Many skills come from `vercel-labs/agent-skills` or `ComposioHQ/awesome-claude-skills`
|
||||||
|
|
||||||
|
## When No Skills Are Found
|
||||||
|
|
||||||
|
If no relevant skills exist:
|
||||||
|
|
||||||
|
1. Acknowledge that no existing skill was found
|
||||||
|
2. Offer to help with the task directly using your general capabilities
|
||||||
|
3. Suggest the user could create their own skill with `npx skills init`
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
I searched for skills related to "xyz" but didn't find any matches.
|
||||||
|
I can still help you with this task directly! Would you like me to proceed?
|
||||||
|
|
||||||
|
If this is something you do often, you could create your own skill:
|
||||||
|
npx skills init my-xyz-skill
|
||||||
|
```
|
||||||
292
.agents/skills/gcp-cloud-run/SKILL.md
Normal file
292
.agents/skills/gcp-cloud-run/SKILL.md
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
---
|
||||||
|
name: gcp-cloud-run
|
||||||
|
description: "Specialized skill for building production-ready serverless applications on GCP. Covers Cloud Run services (containerized), Cloud Run Functions (event-driven), cold start optimization, and event-dri..."
|
||||||
|
source: vibeship-spawner-skills (Apache 2.0)
|
||||||
|
risk: unknown
|
||||||
|
---
|
||||||
|
|
||||||
|
# GCP Cloud Run
|
||||||
|
|
||||||
|
## Patterns
|
||||||
|
|
||||||
|
### Cloud Run Service Pattern
|
||||||
|
|
||||||
|
Containerized web service on Cloud Run
|
||||||
|
|
||||||
|
**When to use**: ['Web applications and APIs', 'Need any runtime or library', 'Complex services with multiple endpoints', 'Stateless containerized workloads']
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
```dockerfile
|
||||||
|
# Dockerfile - Multi-stage build for smaller image
|
||||||
|
FROM node:20-slim AS builder
|
||||||
|
WORKDIR /app
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --only=production
|
||||||
|
|
||||||
|
FROM node:20-slim
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy only production dependencies
|
||||||
|
COPY --from=builder /app/node_modules ./node_modules
|
||||||
|
COPY src ./src
|
||||||
|
COPY package.json ./
|
||||||
|
|
||||||
|
# Cloud Run uses PORT env variable
|
||||||
|
ENV PORT=8080
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
# Run as non-root user
|
||||||
|
USER node
|
||||||
|
|
||||||
|
CMD ["node", "src/index.js"]
|
||||||
|
```
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// src/index.js
|
||||||
|
const express = require('express');
|
||||||
|
const app = express();
|
||||||
|
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// Health check endpoint
|
||||||
|
app.get('/health', (req, res) => {
|
||||||
|
res.status(200).send('OK');
|
||||||
|
});
|
||||||
|
|
||||||
|
// API routes
|
||||||
|
app.get('/api/items/:id', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const item = await getItem(req.params.id);
|
||||||
|
res.json(item);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error:', error);
|
||||||
|
res.status(500).json({ error: 'Internal server error' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Graceful shutdown
|
||||||
|
process.on('SIGTERM', () => {
|
||||||
|
console.log('SIGTERM received, shutting down gracefully');
|
||||||
|
server.close(() => {
|
||||||
|
console.log('Server closed');
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const PORT = process.env.PORT || 8080;
|
||||||
|
const server = app.listen(PORT, () => {
|
||||||
|
console.log(`Server listening on port ${PORT}`);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# cloudbuild.yaml
|
||||||
|
steps:
|
||||||
|
# Build the container image
|
||||||
|
- name: 'gcr.io/cloud-builders/docker'
|
||||||
|
args: ['build', '-t', 'gcr.io/$PROJECT_ID/my-service:$COMMIT_SHA', '.']
|
||||||
|
|
||||||
|
# Push the container image
|
||||||
|
- name: 'gcr.io/cloud-builders/docker'
|
||||||
|
args: ['push', 'gcr.io/$PROJECT_ID/my-service:$COMMIT_SHA']
|
||||||
|
|
||||||
|
# Deploy to Cloud Run
|
||||||
|
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
|
||||||
|
entrypoint: gcloud
|
||||||
|
args:
|
||||||
|
- 'run'
|
||||||
|
- 'deploy'
|
||||||
|
- 'my-service'
|
||||||
|
- '--image=gcr.io/$PROJECT_ID/my-service:$COMMIT_SHA'
|
||||||
|
- '--region=us-central1'
|
||||||
|
- '--platform=managed'
|
||||||
|
- '--allow-unauthenticated'
|
||||||
|
- '--memory=512Mi'
|
||||||
|
- '--cpu=1'
|
||||||
|
- '--min-instances=1'
|
||||||
|
- '--max-instances=100'
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cloud Run Functions Pattern
|
||||||
|
|
||||||
|
Event-driven functions (formerly Cloud Functions)
|
||||||
|
|
||||||
|
**When to use**: ['Simple event handlers', 'Pub/Sub message processing', 'Cloud Storage triggers', 'HTTP webhooks']
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
```javascript
|
||||||
|
// HTTP Function
|
||||||
|
// index.js
|
||||||
|
const functions = require('@google-cloud/functions-framework');
|
||||||
|
|
||||||
|
functions.http('helloHttp', (req, res) => {
|
||||||
|
const name = req.query.name || req.body.name || 'World';
|
||||||
|
res.send(`Hello, ${name}!`);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Pub/Sub Function
|
||||||
|
const functions = require('@google-cloud/functions-framework');
|
||||||
|
|
||||||
|
functions.cloudEvent('processPubSub', (cloudEvent) => {
|
||||||
|
// Decode Pub/Sub message
|
||||||
|
const message = cloudEvent.data.message;
|
||||||
|
const data = message.data
|
||||||
|
? JSON.parse(Buffer.from(message.data, 'base64').toString())
|
||||||
|
: {};
|
||||||
|
|
||||||
|
console.log('Received message:', data);
|
||||||
|
|
||||||
|
// Process message
|
||||||
|
processMessage(data);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Cloud Storage Function
|
||||||
|
const functions = require('@google-cloud/functions-framework');
|
||||||
|
|
||||||
|
functions.cloudEvent('processStorageEvent', async (cloudEvent) => {
|
||||||
|
const file = cloudEvent.data;
|
||||||
|
|
||||||
|
console.log(`Event: ${cloudEvent.type}`);
|
||||||
|
console.log(`Bucket: ${file.bucket}`);
|
||||||
|
console.log(`File: ${file.name}`);
|
||||||
|
|
||||||
|
if (cloudEvent.type === 'google.cloud.storage.object.v1.finalized') {
|
||||||
|
await processUploadedFile(file.bucket, file.name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Deploy HTTP function
|
||||||
|
gcloud functions deploy hello-http \
|
||||||
|
--gen2 \
|
||||||
|
--runtime nodejs20 \
|
||||||
|
--trigger-http \
|
||||||
|
--allow-unauthenticated \
|
||||||
|
--region us-central1
|
||||||
|
|
||||||
|
# Deploy Pub/Sub function
|
||||||
|
gcloud functions deploy process-messages \
|
||||||
|
--gen2 \
|
||||||
|
--runtime nodejs20 \
|
||||||
|
--trigger-topic my-topic \
|
||||||
|
--region us-central1
|
||||||
|
|
||||||
|
# Deploy Cloud Storage function
|
||||||
|
gcloud functions deploy process-uploads \
|
||||||
|
--gen2 \
|
||||||
|
--runtime nodejs20 \
|
||||||
|
--trigger-event-filters="type=google.cloud.storage.object.v1.finalized" \
|
||||||
|
--trigger-event-filters="bucket=my-bucket" \
|
||||||
|
--region us-central1
|
||||||
|
```
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cold Start Optimization Pattern
|
||||||
|
|
||||||
|
Minimize cold start latency for Cloud Run
|
||||||
|
|
||||||
|
**When to use**: ['Latency-sensitive applications', 'User-facing APIs', 'High-traffic services']
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
## 1. Enable Startup CPU Boost
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gcloud run deploy my-service \
|
||||||
|
--cpu-boost \
|
||||||
|
--region us-central1
|
||||||
|
```
|
||||||
|
|
||||||
|
## 2. Set Minimum Instances
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gcloud run deploy my-service \
|
||||||
|
--min-instances 1 \
|
||||||
|
--region us-central1
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3. Optimize Container Image
|
||||||
|
|
||||||
|
```dockerfile
|
||||||
|
# Use distroless for minimal image
|
||||||
|
FROM node:20-slim AS builder
|
||||||
|
WORKDIR /app
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --only=production
|
||||||
|
|
||||||
|
FROM gcr.io/distroless/nodejs20-debian12
|
||||||
|
WORKDIR /app
|
||||||
|
COPY --from=builder /app/node_modules ./node_modules
|
||||||
|
COPY src ./src
|
||||||
|
CMD ["src/index.js"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. Lazy Initialize Heavy Dependencies
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Lazy load heavy libraries
|
||||||
|
let bigQueryClient = null;
|
||||||
|
|
||||||
|
function getBigQueryClient() {
|
||||||
|
if (!bigQueryClient) {
|
||||||
|
const { BigQuery } = require('@google-cloud/bigquery');
|
||||||
|
bigQueryClient = new BigQuery();
|
||||||
|
}
|
||||||
|
return bigQueryClient;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only initialize when needed
|
||||||
|
app.get('/api/analytics', async (req, res) => {
|
||||||
|
const client = getBigQueryClient();
|
||||||
|
const results = await client.query({...});
|
||||||
|
res.json(results);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## 5. Increase Memory (More CPU)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Higher memory = more CPU during startup
|
||||||
|
gcloud run deploy my-service \
|
||||||
|
--memory 1Gi \
|
||||||
|
--cpu 2 \
|
||||||
|
--region us-central1
|
||||||
|
```
|
||||||
|
```
|
||||||
|
|
||||||
|
## Anti-Patterns
|
||||||
|
|
||||||
|
### ❌ CPU-Intensive Work Without Concurrency=1
|
||||||
|
|
||||||
|
**Why bad**: CPU is shared across concurrent requests. CPU-bound work
|
||||||
|
will starve other requests, causing timeouts.
|
||||||
|
|
||||||
|
### ❌ Writing Large Files to /tmp
|
||||||
|
|
||||||
|
**Why bad**: /tmp is an in-memory filesystem. Large files consume
|
||||||
|
your memory allocation and can cause OOM errors.
|
||||||
|
|
||||||
|
### ❌ Long-Running Background Tasks
|
||||||
|
|
||||||
|
**Why bad**: Cloud Run throttles CPU to near-zero when not handling
|
||||||
|
requests. Background tasks will be extremely slow or stall.
|
||||||
|
|
||||||
|
## ⚠️ Sharp Edges
|
||||||
|
|
||||||
|
| Issue | Severity | Solution |
|
||||||
|
|-------|----------|----------|
|
||||||
|
| Issue | high | ## Calculate memory including /tmp usage |
|
||||||
|
| Issue | high | ## Set appropriate concurrency |
|
||||||
|
| Issue | high | ## Enable CPU always allocated |
|
||||||
|
| Issue | medium | ## Configure connection pool with keep-alive |
|
||||||
|
| Issue | high | ## Enable startup CPU boost |
|
||||||
|
| Issue | medium | ## Explicitly set execution environment |
|
||||||
|
| Issue | medium | ## Set consistent timeouts |
|
||||||
|
|
||||||
|
## When to Use
|
||||||
|
This skill is applicable to execute the workflow or actions described in the overview.
|
||||||
Reference in New Issue
Block a user