Use Case

/

代码维护

优化性能

Model

Claude 3.7 十四行诗

优化 Node.js Express API 以处理 1,000 个 RPS:使用代码示例建议改进数据库索引、缓存和异步处理。

Try Query
Claude 3.7 十四行诗
/
编码员

为 1,000 个 RPS 优化 Node.js Express API

要在 Node.js Express API 中处理每秒 1,000 个请求 (RPS),你需要专注于几个关键的优化领域。以下是综合方法:

1。数据库索引

正确的数据库索引对于高性能 API 至关重要。

MongoDB 示例:

1// Create indexes for frequently queried fields
2const mongoose = require('mongoose');
3const userSchema = new mongoose.Schema({
4  email: String,
5  username: String,
6  createdAt: Date
7});
8
9// Create single field indexes
10userSchema.index({ email: 1 });
11userSchema.index({ username: 1 });
12
13// Create compound index for queries that filter on multiple fields
14userSchema.index({ createdAt: -1, username: 1 });
15
16// Create text index for search functionality
17userSchema.index({ username: 'text', email: 'text' });
18
19const User = mongoose.model('User', userSchema);

SQL 示例(使用 Sequelize):

1const User = sequelize.define('User', {
2  email: DataTypes.STRING,
3  username: DataTypes.STRING,
4  createdAt: DataTypes.DATE
5});
6
7// After defining models, create indexes
8(async () => {
9  await sequelize.sync();
10  // Create indexes
11  await sequelize.query('CREATE INDEX idx_users_email ON Users(email)');
12  await sequelize.query('CREATE INDEX idx_users_username ON Users(username)');
13  await sequelize.query('CREATE INDEX idx_users_created_at ON Users(createdAt)');
14})();

2。缓存实现

Redis 缓存:

1const express = require('express');
2const redis = require('redis');
3const { promisify } = require('util');
4const app = express();
5
6// Create Redis client
7const redisClient = redis.createClient({
8  host: process.env.REDIS_HOST || 'localhost',
9  port: process.env.REDIS_PORT || 6379
10});
11
12// Promisify Redis get and set methods
13const getAsync = promisify(redisClient.get).bind(redisClient);
14const setAsync = promisify(redisClient.set).bind(redisClient);
15
16// Middleware for caching
17const cacheMiddleware = (duration) => async (req, res, next) => {
18  const key = `api:${req.originalUrl}`;
19  
20  try {
21    const cachedData = await getAsync(key);
22    
23    if (cachedData) {
24      return res.json(JSON.parse(cachedData));
25    }
26    
27    // Store original send function
28    const originalSend = res.send;
29    
30    // Override send
31    res.send = function(body) {
32      setAsync(key, body, 'EX', duration);
33      return originalSend.call(this, body);
34    };
35    
36    next();
37  } catch (err) {
38    console.error('Cache error:', err);
39    next();
40  }
41};
42
43// Apply cache middleware to routes
44app.get('/api/users', cacheMiddleware(60), async (req, res) => {
45  // Database query here
46  const users = await User.find();
47  res.json(users);
48});

使用节点缓存进行内存缓存:

1const NodeCache = require('node-cache');
2const cache = new NodeCache({ stdTTL: 60 }); // Default TTL: 60 seconds
3
4// Cache middleware
5function cacheMiddleware(key, ttl) {
6  return (req, res, next) => {
7    const cacheKey = key || req.originalUrl;
8    const cachedResponse = cache.get(cacheKey);
9    
10    if (cachedResponse) {
11      return res.json(cachedResponse);
12    }
13    
14    const originalJson = res.json;
15    res.json = function(data) {
16      cache.set(cacheKey, data, ttl || 60);
17      originalJson.call(this, data);
18    };
19    
20    next();
21  };
22}
23
24// Usage
25app.get('/api/products', cacheMiddleware('products', 300), async (req, res) => {
26  const products = await Product.find();
27  res.json(products);
28});

3.异步处理

使用 Bull Queue 进行后台作业:

1const Queue = require('bull');
2const express = require('express');
3const app = express();
4
5// Create processing queue
6const emailQueue = new Queue('email-sending', {
7  redis: {
8    host: process.env.REDIS_HOST || 'localhost',
9    port: process.env.REDIS_PORT || 6379
10  }
11});
12
13// API endpoint that offloads work to the queue
14app.post('/api/users', async (req, res) => {
15  try {
16    // Save user to database
17    const user = await User.create(req.body);
18    
19    // Add welcome email job to queue instead of sending it synchronously
20    await emailQueue.add({
21      to: user.email,
22      subject: 'Welcome!',
23      template: 'welcome',
24      userId: user.id
25    }, {
26      attempts: 3,
27      backoff: {
28        type: 'exponential',
29        delay: 1000
30      }
31    });
32    
33    // Respond immediately without waiting for email
34    res.status(201).json(user);
35  } catch (err) {
36    res.status(500).json({ error: err.message });
37  }
38});
39
40// Process queue in a separate process
41emailQueue.process(async (job) => {
42  const { to, subject, template, userId } = job.data;
43  // Send email logic here
44  await sendEmail(to, subject, template, { userId });
45  return { sent: true };
46});
47
48// Handle queue events
49emailQueue.on('completed', job => {
50  console.log(`Job ${job.id} completed`);
51});
52
53emailQueue.on('failed', (job, err) => {
54  console.error(`Job ${job.id} failed with error: ${err.message}`);
55});

4。服务器优化

集群模式:

1const cluster = require('cluster');
2const os = require('os');
3const express = require('express');
4
5if (cluster.isMaster) {
6  // Get the number of available CPU cores
7  const numCPUs = os.cpus().length;
8  
9  console.log(`Master process running. Forking ${numCPUs} workers`);
10  
11  // Fork workers for each available CPU
12  for (let i = 0; i < numCPUs; i++) {
13    cluster.fork();
14  }
15  
16  // Handle worker crashes
17  cluster.on('exit', (worker, code, signal) => {
18    console.log(`Worker ${worker.process.pid} died. Restarting...`);
19    cluster.fork();
20  });
21} else {
22  // Worker processes share the same port
23  const app = express();
24  
25  // Your Express routes and middleware here
26  app.get('/', (req, res) => {
27    res.send('Hello from worker ' + process.pid);
28  });
29  
30  const PORT = process.env.PORT || 3000;
31  app.listen(PORT, () => {
32    console.log(`Worker ${process.pid} started and listening on port ${PORT}`);
33  });
34}

使用 PM2:

1// app.js - Your Express application
2const express = require('express');
3const app = express();
4
5// Routes and middleware here
6
7const PORT = process.env.PORT || 3000;
8app.listen(PORT, () => {
9  console.log(`Server running on port ${PORT}`);
10});
11
12// ecosystem.config.js - PM2 configuration
13module.exports = {
14  apps: [{
15    name: "api-server",
16    script: "./app.js",
17    instances: "max", // Use all available CPUs
18    exec_mode: "cluster",
19    watch: false,
20    max_memory_restart: "1G",
21    env: {
22      NODE_ENV: "production",
23      PORT: 3000
24    }
25  }]
26};
27
28// Run with: pm2 start ecosystem.config.js

5。响应压缩

1const express = require('express');
2const compression = require('compression');
3const app = express();
4
5// Add compression middleware
6app.use(compression({
7  level: 6, // Default compression level
8  threshold: 1024, // Only compress responses larger than 1KB
9  filter: (req, res) => {
10    // Don't compress responses with this header
11    if (req.headers['x-no-compression']) {
12      return false;
13    }
14    // Use compression filter function from the module
15    return compression.filter(req, res);
16  }
17}));
18
19// Your routes here

6。连接共享

1// For MongoDB with Mongoose
2mongoose.connect('mongodb://localhost/myapp', {
3  poolSize: 10, // Maintain up to 10 socket connections
4  serverSelectionTimeoutMS: 5000,
5  socketTimeoutMS: 45000
6});
7
8// For PostgreSQL with pg
9const { Pool } = require('pg');
10const pool = new Pool({
11  host: 'localhost',
12  database: 'myapp',
13  user: 'postgres',
14  password: 'password',
15  max: 20, // Maximum number of clients in the pool
16  idleTimeoutMillis: 30000,
17  connectionTimeoutMillis: 2000
18});
19
20// Use the pool in your routes
21app.get('/api/data', async (req, res) => {
22  const client = await pool.connect();
23  try {
24    const result = await client.query('SELECT * FROM items');
25    res.json(result.rows);
26  } finally {
27    client.release(); // Return client to pool
28  }
29});

7。速率限制

1const rateLimit = require('express-rate-limit');
2
3// Create rate limiter
4const apiLimiter = rateLimit({
5  windowMs: 15 * 60 * 1000, // 15 minutes
6  max: 100, // Limit each IP to 100 requests per windowMs
7  standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
8  legacyHeaders: false, // Disable the `X-RateLimit-*` headers
9  message: 'Too many requests from this IP, please try again after 15 minutes'
10});
11
12// Apply rate limiting to all API routes
13app.use('/api/', apiLimiter);
14
15// Different limits for specific endpoints
16const createAccountLimiter = rateLimit({
17  windowMs: 60 * 60 * 1000, // 1 hour
18  max: 5, // 5 accounts per hour per IP
19  message: 'Too many accounts created from this IP, please try again after an hour'
20});
21
22app.post('/api/create-account', createAccountLimiter, (req, res) => {
23  // Create account logic
24});

8。监控和性能优化

1const express = require('express');
2const responseTime = require('response-time');
3const app = express();
4
5// Add response time header
6app.use(responseTime());
7
8// Basic request logging
9app.use((req, res, next) => {
10  const start = Date.now();
11  
12  res.on('finish', () => {
13    const duration = Date.now() - start;
14    console.log(`${req.method} ${req.originalUrl} ${res.statusCode} ${duration}ms`);
15  });
16  
17  next();
18});
19
20// For more advanced monitoring, consider using:
21// - Prometheus + Grafana
22// - New Relic
23// - Datadog

通过实施这些优化,你的 Node.js Express API 应该能够处理 1,000 个 RPS。记住:

  1. 分析您的应用程序以确定瓶颈
  2. 战略性地为经常访问的数据实施缓存
  3. 使用数据库索引来处理常见的查询模式
  4. 将 CPU 密集型任务转移给后台工作人员
  5. 使用集群或容器编排进行水平扩展
  6. 监控生产性能以持续优化

Experience Ninja's AI Assistant

Try for free today. Plans starting at $19/month.