Node.js WeatherAPI Tutorial: Axios, Caching & Rate Limits

Building Weather Apps with Node.js and WeatherAPI: A Complete Guide

Building weather-enabled applications has never been easier with WeatherAPI.com and Node.js. This tutorial walks you through creating a robust weather service that handles API calls efficiently, respects rate limits, and implements smart caching strategies.

Setting Up Your Node.js Project

First, initialize your project and install the required dependencies:

npm init -y
npm install axios node-cache dotenv

Create a .env file for your WeatherAPI key:

WEATHER_API_KEY=your_api_key_here
WEATHER_API_BASE_URL=https://api.weatherapi.com/v1

Don’t have an API key yet? Sign up for free and get 100,000 calls per month with no credit card required.

Creating the Weather Service Class

Let’s build a comprehensive weather service that handles multiple endpoints and implements best practices:

const axios = require('axios');
const NodeCache = require('node-cache');
require('dotenv').config();

class WeatherService {
  constructor() {
    this.apiKey = process.env.WEATHER_API_KEY;
    this.baseURL = process.env.WEATHER_API_BASE_URL;
    this.cache = new NodeCache({ stdTTL: 600 }); // 10-minute cache
    this.requestQueue = [];
    this.isProcessing = false;
    
    // Rate limiting: 1000 requests per hour for free tier
    this.rateLimit = {
      maxRequests: 900, // Buffer for safety
      windowMs: 60 * 60 * 1000, // 1 hour
      requests: []
    };
  }

  // Rate limiting check
  canMakeRequest() {
    const now = Date.now();
    this.rateLimit.requests = this.rateLimit.requests.filter(
      time => now - time < this.rateLimit.windowMs
    );
    
    return this.rateLimit.requests.length < this.rateLimit.maxRequests;
  }

  // Generic API request method
  async makeRequest(endpoint, params = {}) {
    const cacheKey = `${endpoint}-${JSON.stringify(params)}`;
    const cachedResult = this.cache.get(cacheKey);
    
    if (cachedResult) {
      console.log('Cache hit for:', cacheKey);
      return cachedResult;
    }

    if (!this.canMakeRequest()) {
      throw new Error('Rate limit exceeded. Please try again later.');
    }

    try {
      const response = await axios.get(`${this.baseURL}${endpoint}`, {
        params: {
          key: this.apiKey,
          ...params
        },
        timeout: 5000
      });

      this.rateLimit.requests.push(Date.now());
      this.cache.set(cacheKey, response.data);
      
      return response.data;
    } catch (error) {
      console.error('WeatherAPI request failed:', error.message);
      throw new Error(`Weather API error: ${error.response?.data?.error?.message || error.message}`);
    }
  }
}

Implementing Core Weather Methods

Add specific methods for different WeatherAPI endpoints:

// Get current weather
async getCurrentWeather(location) {
  return await this.makeRequest('/current.json', { q: location });
}

// Get weather forecast
async getForecast(location, days = 3) {
  if (days > 14) days = 14; // API limit
  return await this.makeRequest('/forecast.json', { 
    q: location, 
    days: days,
    aqi: 'yes',
    alerts: 'yes'
  });
}

// Get historical weather
async getHistoricalWeather(location, date) {
  return await this.makeRequest('/history.json', { 
    q: location, 
    dt: date 
  });
}

// Search locations
async searchLocations(query) {
  return await this.makeRequest('/search.json', { q: query });
}

Advanced Caching Strategies

Implement intelligent caching based on data volatility:

// Different TTL for different data types
getCacheKey(endpoint, params) {
  return `${endpoint}-${JSON.stringify(params)}`;
}

setCacheWithCustomTTL(key, data, endpoint) {
  let ttl;
  
  switch (endpoint) {
    case '/current.json':
      ttl = 300; // 5 minutes for current weather
      break;
    case '/forecast.json':
      ttl = 1800; // 30 minutes for forecasts
      break;
    case '/history.json':
      ttl = 86400; // 24 hours for historical data
      break;
    default:
      ttl = 600; // 10 minutes default
  }
  
  this.cache.set(key, data, ttl);
}

Error Handling and Retry Logic

Add robust error handling with exponential backoff:

async makeRequestWithRetry(endpoint, params = {}, maxRetries = 3) {
  for (let attempt = 1; attempt <= maxRetries; attempt++) {
    try {
      return await this.makeRequest(endpoint, params);
    } catch (error) {
      if (attempt === maxRetries) throw error;
      
      const delay = Math.pow(2, attempt) * 1000; // Exponential backoff
      console.log(`Retry ${attempt} after ${delay}ms`);
      await new Promise(resolve => setTimeout(resolve, delay));
    }
  }
}

Usage Example

Here's how to use your weather service in practice:

const weather = new WeatherService();

async function getWeatherData() {
  try {
    // Get current weather
    const current = await weather.getCurrentWeather('London');
    console.log(`Current temp in London: ${current.current.temp_c}°C`);

    // Get 7-day forecast
    const forecast = await weather.getForecast('London', 7);
    console.log(`Tomorrow's high: ${forecast.forecast.forecastday[1].day.maxtemp_c}°C`);

    // Search for locations
    const locations = await weather.searchLocations('New York');
    console.log('Found locations:', locations.map(loc => loc.name));

  } catch (error) {
    console.error('Weather service error:', error.message);
  }
}

getWeatherData();

Production Considerations

For production deployments, consider these enhancements:

  • Redis for distributed caching across multiple server instances
  • Queue management for handling high-volume requests
  • Monitoring and alerting for API usage and errors
  • Graceful degradation when API is unavailable

WeatherAPI.com's reliable infrastructure serves 850,000+ developers worldwide with an average response time of ~200ms, making it perfect for production applications.

Ready to build your weather-powered application? Get your free API key and start with 100,000 monthly requests at no cost.

Scroll to Top