Node.js

Node.js code examples for using Residential Proxy with axios, Puppeteer, and native http.

Practical Node.js examples for integrating Residential Proxy into your scraping and automation projects.

Prerequisites

npm install axios cheerio puppeteer
  • axios: Promise-based HTTP client for making requests with built-in proxy support and interceptors.
  • cheerio: Server-side jQuery implementation for parsing and manipulating HTML responses.
  • puppeteer: Headless Chrome automation library for full browser simulation with proxy support.

1. Basic Static Proxy

Demonstrates the simplest proxy setup with axios using a static session. The same IP address is maintained across requests using the session ID parameter.

const axios = require('axios');

// Static proxy configuration
const proxy = {
  host: 'network.mrproxy.com',
  port: 10000,
  auth: {
    username: 'user-country-us-sessid-session1',
    password: 'pass123'
  }
};

// Make a request
async function testProxy() {
  try {
    const response = await axios.get('https://api.ipify.org', { proxy });
    console.log('Your IP:', response.data);
  } catch (error) {
    console.error('Error:', error.message);
  }
}

testProxy();

Use Case: Simple proxy testing and scenarios where you need consistent IP addresses for maintaining login sessions or avoiding duplicate detection.

2. Rotating Proxy with Axios

Shows how to combine rotating proxies with Cheerio for HTML parsing. Each request gets a fresh IP address, perfect for scraping multiple pages without detection.

const axios = require('axios');
const cheerio = require('cheerio');

// Rotating proxy - new IP per request
const rotatingProxy = {
  host: 'network.mrproxy.com',
  port: 10000,
  auth: {
    username: 'user-country-uk',
    password: 'pass123'
  }
};

async function scrapePage(url) {
  try {
    const response = await axios.get(url, { 
      proxy: rotatingProxy,
      timeout: 30000
    });
    
    const $ = cheerio.load(response.data);
    const title = $('h1').first().text();
    
    console.log(`Scraped: ${title}`);
    return response.data;
    
  } catch (error) {
    console.error(`Error scraping ${url}:`, error.message);
    return null;
  }
}

// Scrape multiple pages (each with different IP)
async function scrapeMultiple() {
  const urls = [
    'https://example.com/page1',
    'https://example.com/page2',
    'https://example.com/page3'
  ];
  
  for (const url of urls) {
    await scrapePage(url);
  }
}

scrapeMultiple();

Use Case: High-volume web scraping where IP rotation helps avoid rate limits and detection. Ideal for scraping e-commerce sites, job boards, or news sites.

3. Multiple Sessions (Promise.all)

Demonstrates parallel scraping using Promise.all with different static proxy sessions. Each concurrent request maintains its own consistent IP address.

const axios = require('axios');

// Create array of proxy configs with different sessions
const proxies = Array.from({ length: 5 }, (_, i) => ({
  host: 'network.mrproxy.com',
  port: 10000,
  auth: {
    username: `user-country-de-sessid-worker${i + 1}`,
    password: 'pass123'
  }
}));

async function scrapeWithProxy(proxy, url) {
  try {
    const response = await axios.get(url, { proxy, timeout: 30000 });
    return {
      status: response.status,
      length: response.data.length,
      url
    };
  } catch (error) {
    return { error: error.message, url };
  }
}

async function parallelScraping() {
  const urls = Array.from({ length: 10 }, (_, i) => 
    `https://example.com/page${i + 1}`
  );
  
  // Scrape in parallel using different sessions
  const promises = urls.map((url, index) => {
    const proxy = proxies[index % proxies.length];
    return scrapeWithProxy(proxy, url);
  });
  
  const results = await Promise.all(promises);
  console.log('Results:', results);
}

parallelScraping();

Use Case: High-performance scraping that needs maximum throughput while maintaining multiple distinct identities. Great for scraping large datasets quickly.

4. Puppeteer with Proxy

Shows how to use Residential Proxy with Puppeteer for full browser automation. This enables JavaScript rendering, screenshot capture, and interaction with dynamic content.

const puppeteer = require('puppeteer');

async function scrapeWithPuppeteer() {
  // Proxy URL format for Puppeteer
  const proxyUrl = 'http://user-country-jp-sessid-pup1:pass123@network.mrproxy.com:10000';
  
  const browser = await puppeteer.launch({
    headless: true,
    args: [
      `--proxy-server=${proxyUrl}`,
      '--no-sandbox',
      '--disable-setuid-sandbox'
    ]
  });
  
  try {
    const page = await browser.newPage();
    
    // Navigate to page
    await page.goto('https://example.com', { waitUntil: 'networkidle2' });
    
    // Extract data
    const title = await page.title();
    console.log('Page title:', title);
    
    // Take screenshot
    await page.screenshot({ path: 'screenshot.png' });
    
  } finally {
    await browser.close();
  }
}

scrapeWithPuppeteer();

Use Case: Scraping Single Page Applications (SPAs), taking screenshots for monitoring, or automating complex user interactions through a proxy.

5. Error Handling & Retries

Implements robust error handling with exponential backoff retry logic. Essential for production applications that need to handle network failures gracefully.

const axios = require('axios');

const proxy = {
  host: 'network.mrproxy.com',
  port: 10000,
  auth: {
    username: 'user-country-fr',
    password: 'pass123'
  }
};

async function scrapeWithRetry(url, maxRetries = 3) {
  for (let attempt = 0; attempt < maxRetries; attempt++) {
    try {
      const response = await axios.get(url, { 
        proxy, 
        timeout: 30000 
      });
      return response.data;
      
    } catch (error) {
      console.log(`Attempt ${attempt + 1} failed: ${error.message}`);
      
      if (attempt < maxRetries - 1) {
        const delay = Math.pow(2, attempt) * 1000;  // Exponential backoff
        await new Promise(resolve => setTimeout(resolve, delay));
      }
    }
  }
  
  console.log(`Failed after ${maxRetries} attempts`);
  return null;
}

// Use with retry logic
scrapeWithRetry('https://example.com')
  .then(data => console.log('Success:', data ? 'Got data' : 'Failed'));

Use Case: Production-ready scraping that must handle temporary network issues, server errors, and proxy failures without losing data or crashing.

6. Environment Variables

Demonstrates secure credential management using environment variables. This keeps sensitive proxy credentials out of your source code.

const axios = require('axios');
require('dotenv').config();

// Use environment variables for credentials
const proxy = {
  host: 'network.mrproxy.com',
  port: 10000,
  auth: {
    username: process.env.MRPROXY_USERNAME,
    password: process.env.MRPROXY_PASSWORD
  }
};

async function secureScrape(url) {
  try {
    const response = await axios.get(url, { proxy });
    return response.data;
  } catch (error) {
    console.error('Error:', error.message);
    return null;
  }
}

// .env file:
// MRPROXY_USERNAME=user-country-us-sessid-session1
// MRPROXY_PASSWORD=pass123

Use Case: Production deployments where credentials need to be kept secure and separate from code. Essential for CI/CD pipelines and team development.