Skip to main content

Rate Limiter Middleware

The Rate Limiter middleware limits API access rates to prevent abuse and protect server resources. Implemented using the token bucket algorithm.

Usage

Basic Usage

import (
"go-slim.dev/slim"
"go-slim.dev/slim/middleware"
"golang.org/x/time/rate"
)

func main() {
s := slim.New()

// Create rate limiter store (20 requests/second)
limiterStore := middleware.NewRateLimiterMemoryStore(20)

s.GET("/api/limited", func(c slim.Context) error {
return c.String(200, "success")
}, middleware.RateLimiter(limiterStore))

s.Start(":8080")
}

Global Rate Limiting

s := slim.New()

// Apply rate limiting to all routes
limiterStore := middleware.NewRateLimiterMemoryStore(20)
s.Use(middleware.RateLimiter(limiterStore))

s.GET("/api/users", handler)
s.POST("/api/users", handler)

Custom Configuration

s := slim.New()

limiterStore := middleware.NewRateLimiterMemoryStoreWithConfig(
middleware.RateLimiterMemoryStoreConfig{
Rate: 10, // 10 requests/second
Burst: 30, // Burst capacity
ExpiresIn: 3 * time.Minute, // Limiter expiration time
},
)

s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: limiterStore,
IdentifierExtractor: func(c slim.Context) (string, error) {
// Rate limit by IP address
return c.RealIP(), nil
},
ErrorHandler: func(c slim.Context, err error) error {
return c.JSON(403, map[string]string{
"error": "Unable to identify client",
})
},
DenyHandler: func(c slim.Context, identifier string, err error) error {
return c.JSON(429, map[string]string{
"error": "Too many requests, please try again later",
})
},
}))

Configuration Options

RateLimiterConfig

type RateLimiterConfig struct {
// BeforeFunc executes before rate limit check
// Optional
BeforeFunc BeforeFunc

// IdentifierExtractor extracts visitor identifier from context
// Optional. Defaults to IP address
IdentifierExtractor Extractor

// Store defines the rate limiter storage implementation
// Required
Store RateLimiterStore

// ErrorHandler called when IdentifierExtractor returns an error
// Optional. Defaults to 403 Forbidden
ErrorHandler func(context slim.Context, err error) error

// DenyHandler called when rate limiter denies access
// Optional. Defaults to 429 Too Many Requests
DenyHandler func(context slim.Context, identifier string, err error) error
}

RateLimiterMemoryStoreConfig

type RateLimiterMemoryStoreConfig struct {
// Rate of requests allowed to pass (requests/second)
// Required
Rate rate.Limit

// Burst is maximum number of burst requests
// Optional. Defaults to floored value of Rate
Burst int

// ExpiresIn is the duration after which a limiter is cleaned up
// Optional. Default value 3 minutes
ExpiresIn time.Duration
}

Examples

1. Rate Limit by IP Address

// 20 requests per second per IP
limiterStore := middleware.NewRateLimiterMemoryStore(20)

s.Use(middleware.RateLimiter(limiterStore))

2. Rate Limit by User ID

limiterStore := middleware.NewRateLimiterMemoryStore(10)

s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: limiterStore,
IdentifierExtractor: func(c slim.Context) (string, error) {
// Get user ID from JWT or session
userID := c.Get("user_id")
if userID == nil {
return "", errors.New("not logged in")
}
return fmt.Sprintf("user:%v", userID), nil
},
}))

3. Rate Limit by API Key

limiterStore := middleware.NewRateLimiterMemoryStore(100)

s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: limiterStore,
IdentifierExtractor: func(c slim.Context) (string, error) {
apiKey := c.Request().Header.Get("X-API-Key")
if apiKey == "" {
return "", errors.New("missing API Key")
}
return fmt.Sprintf("apikey:%s", apiKey), nil
},
ErrorHandler: func(c slim.Context, err error) error {
return c.JSON(401, map[string]string{
"error": "Invalid API Key",
})
},
}))

4. Different Strategies for Different Routes

// Public API: 10 requests/second
publicLimiter := middleware.NewRateLimiterMemoryStore(10)

// Authenticated API: 50 requests/second
authLimiter := middleware.NewRateLimiterMemoryStore(50)

// Public route group
public := s.Group("/api/public", middleware.RateLimiter(publicLimiter))
public.GET("/posts", listPosts)

// Authenticated route group
auth := s.Group("/api/auth", middleware.RateLimiter(authLimiter))
auth.GET("/users", listUsers)
auth.POST("/users", createUser)

5. Configure Burst Capacity

// Allow 10 requests/second, but can burst up to 30 requests
limiterStore := middleware.NewRateLimiterMemoryStoreWithConfig(
middleware.RateLimiterMemoryStoreConfig{
Rate: 10, // 10 requests/second
Burst: 30, // Burst capacity 30
},
)

s.Use(middleware.RateLimiter(limiterStore))

Explanation: When there's a burst of requests, the burst capacity allows more requests to pass before the rate limit is reached.

6. Custom Error Response

s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: middleware.NewRateLimiterMemoryStore(20),
DenyHandler: func(c slim.Context, identifier string, err error) error {
// Return custom response
c.Response().Header().Set("X-RateLimit-Limit", "20")
c.Response().Header().Set("X-RateLimit-Remaining", "0")
c.Response().Header().Set("Retry-After", "60")

return c.JSON(429, map[string]any{
"error": "Rate Limit",
"message": "Too many requests, please try again later",
"retry_after": 60,
})
},
}))

7. Skip Specific IPs

whitelistedIPs := map[string]bool{
"127.0.0.1": true,
"10.0.0.1": true,
}

s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: middleware.NewRateLimiterMemoryStore(20),
BeforeFunc: func(c slim.Context) {
ip := c.RealIP()
if whitelistedIPs[ip] {
// Skip rate limit check
c.Set("skip_rate_limit", true)
}
},
IdentifierExtractor: func(c slim.Context) (string, error) {
if skip := c.Get("skip_rate_limit"); skip != nil {
return "", errors.New("whitelisted")
}
return c.RealIP(), nil
},
ErrorHandler: func(c slim.Context, err error) error {
if err.Error() == "whitelisted" {
return nil // Allow
}
return middleware.ErrExtractorError
},
}))

Token Bucket Algorithm

Rate Limiter uses the Token Bucket Algorithm:

  1. Bucket Capacity (Burst): Maximum number of tokens the bucket can hold
  2. Fill Rate (Rate): Number of tokens added to the bucket per second
  3. Request Processing:
    • Each request consumes one token
    • If bucket has tokens, request passes and consumes one token
    • If bucket is empty, request is denied

Example:

// Rate: 10, Burst: 30
// - Initially bucket has 30 tokens
// - Can immediately process 30 requests
// - After that, can process 10 requests per second
limiterStore := middleware.NewRateLimiterMemoryStoreWithConfig(
middleware.RateLimiterMemoryStoreConfig{
Rate: 10,
Burst: 30,
},
)

Memory Store Characteristics

Features

  • Suitable for moderate load scenarios
  • Simple to use, no external dependencies
  • Automatically cleans up expired limiters

Limitations

  • Concurrency Performance: 100+ concurrent requests may cause lock contention
  • Memory Usage: 16000+ different IP addresses may impact performance
  • Single Machine: Does not support distributed rate limiting

When to Use Custom Storage

For high load scenarios, consider implementing custom storage (e.g., Redis):

type RedisRateLimiterStore struct {
client *redis.Client
rate rate.Limit
burst int
}

func (r *RedisRateLimiterStore) Allow(identifier string) (bool, error) {
// Implement distributed rate limiting using Redis
// Can use Redis INCR + EXPIRE or Lua scripts
// ...
}

// Use custom storage
s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: &RedisRateLimiterStore{
client: redisClient,
rate: 10,
burst: 30,
},
}))

Default Configuration

DefaultRateLimiterConfig = RateLimiterConfig{
IdentifierExtractor: func(ctx slim.Context) (string, error) {
return ctx.RealIP(), nil
},
ErrorHandler: func(c slim.Context, err error) error {
return &slim.HTTPError{
Code: 403,
Message: "error while extracting identifier",
Internal: err,
}
},
DenyHandler: func(c slim.Context, identifier string, err error) error {
return &slim.HTTPError{
Code: 429,
Message: "rate limit exceeded",
Internal: err,
}
},
}

DefaultRateLimiterMemoryStoreConfig = RateLimiterMemoryStoreConfig{
ExpiresIn: 3 * time.Minute,
}

Best Practices

1. Set Different Limits by Resource Type

// Read operations: 100 requests/second
readLimiter := middleware.NewRateLimiterMemoryStore(100)

// Write operations: 10 requests/second
writeLimiter := middleware.NewRateLimiterMemoryStore(10)

s.GET("/api/posts", handler, middleware.RateLimiter(readLimiter))
s.POST("/api/posts", handler, middleware.RateLimiter(writeLimiter))

2. Return Rate Limit Information

s.Use(func(c slim.Context, next slim.HandlerFunc) error {
err := next(c)

// Add rate limit info to response headers
c.Response().Header().Set("X-RateLimit-Limit", "100")
c.Response().Header().Set("X-RateLimit-Remaining", "95")
c.Response().Header().Set("X-RateLimit-Reset", "1699430400")

return err
})

s.Use(middleware.RateLimiter(limiterStore))

3. Monitor Rate Limiting

var rateLimitCounter int64

s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: middleware.NewRateLimiterMemoryStore(20),
DenyHandler: func(c slim.Context, identifier string, err error) error {
// Count rate limited requests
atomic.AddInt64(&rateLimitCounter, 1)

// Log
log.Printf("Rate limit exceeded for %s", identifier)

return c.JSON(429, map[string]string{
"error": "rate limit exceeded",
})
},
}))

// Periodically export metrics
go func() {
ticker := time.NewTicker(1 * time.Minute)
for range ticker.C {
count := atomic.SwapInt64(&rateLimitCounter, 0)
log.Printf("Rate limited requests in last minute: %d", count)
}
}()

4. Progressive Rate Limiting

// Dynamically adjust rate limit based on load
func getDynamicRateLimit() rate.Limit {
load := getSystemLoad()
if load > 0.8 {
return 10 // Lower limit under high load
} else if load > 0.5 {
return 50
}
return 100 // Higher limit under low load
}

5. Higher Limits for Authenticated Users

s.Use(middleware.RateLimiterWithConfig(middleware.RateLimiterConfig{
Store: middleware.NewRateLimiterMemoryStore(20), // Anonymous: 20/sec
IdentifierExtractor: func(c slim.Context) (string, error) {
// Authenticated users use separate limiter
if userID := c.Get("user_id"); userID != nil {
return fmt.Sprintf("user:%v", userID), nil
}
// Anonymous users by IP
return fmt.Sprintf("ip:%s", c.RealIP()), nil
},
}))

// Higher limit for authenticated users
authenticatedLimiter := middleware.NewRateLimiterMemoryStore(100) // 100/sec
s.Use(func(c slim.Context, next slim.HandlerFunc) error {
if c.Get("user_id") != nil {
return authenticatedLimiter.ToMiddleware()(c, next)
}
return next(c)
})

Error Handling

Predefined Errors

// Rate limit exceeded
var ErrRateLimitExceeded = slim.NewHTTPError(http.StatusTooManyRequests, "rate limit exceeded")

// Extractor error
var ErrExtractorError = slim.NewHTTPError(http.StatusForbidden, "error while extracting identifier")

Performance Considerations

Memory Store

  • Suitable for single-machine deployment
  • Suitable for small to medium applications (< 10000 different identifiers)
  • Good concurrency performance below 100 requests/second

Distributed Storage

For distributed deployments, consider using Redis or other distributed storage:

// Implement RateLimiterStore interface
type RateLimiterStore interface {
Allow(identifier string) (bool, error)
}

References