Skip to content

Node.js Reference

Node.js Reference

Core Node.js APIs for production: fs, path, http/https, streams, events, child_process, worker_threads, crypto, and the built-in test runner.

fs — file system (sync, async, stream)
import { readFile, writeFile, readdir, stat, mkdir, rm,
         copyFile, rename, watch } from "node:fs/promises";
import { createReadStream, createWriteStream } from "node:fs";

// Read / write
const content = await readFile("config.json", "utf8");
const data    = await readFile("image.png");          // Buffer
await writeFile("output.txt", "hello", "utf8");
await writeFile("output.txt", "more\n", { flag: "a" }); // append

// Directory
const entries = await readdir("./src");
const all     = await readdir("./src", { withFileTypes: true });
for (const entry of all) {
  if (entry.isDirectory()) console.log("dir:", entry.name);
  if (entry.isFile())      console.log("file:", entry.name);
}
await mkdir("./build/assets", { recursive: true });   // mkdir -p

// Stat
const s = await stat("./app.js");
s.size;          // bytes
s.mtimeMs;       // mtime as milliseconds
s.isFile();
s.isDirectory();
s.isSymbolicLink();

// Delete
await rm("./file.txt");
await rm("./dir", { recursive: true, force: true });  // rm -rf

// Copy / rename / move
await copyFile("src.txt", "dst.txt");
await rename("old.txt", "new.txt");   // rename OR move (cross-device needs copy+rm)

// Watch for changes
const watcher = watch("./src", { recursive: true });
for await (const { eventType, filename } of watcher) {
  console.log(eventType, filename);
}

// Streams — memory-efficient for large files
const readable = createReadStream("large.csv", { encoding: "utf8" });
const writable = createWriteStream("output.csv");
readable.pipe(writable);

// or with async iteration
for await (const chunk of createReadStream("file.txt", "utf8")) {
  process(chunk);
}
path — cross-platform path manipulation
import path from "node:path";
import { fileURLToPath } from "node:url";

// __dirname / __filename in ESM modules
const __filename = fileURLToPath(import.meta.url);
const __dirname  = path.dirname(__filename);

// Common operations
path.join("/home", "user", "docs", "../config.json")  // /home/user/config.json
path.resolve("relative/path")      // absolute path from cwd
path.resolve(__dirname, "data")     // absolute path relative to current file

path.dirname("/home/user/file.txt") // /home/user
path.basename("/home/user/file.txt")// file.txt
path.basename("/home/user/file.txt", ".txt")  // file
path.extname("app.min.js")         // .js
path.extname("archive.tar.gz")     // .gz

// Parse / format
const p = path.parse("/home/user/notes.txt");
// { root: '/', dir: '/home/user', base: 'notes.txt', ext: '.txt', name: 'notes' }

path.format({ dir: "/home/user", name: "notes", ext: ".txt" })
// /home/user/notes.txt

// Normalise (resolves .. and . without hitting filesystem)
path.normalize("./a/../b/./c")     // b/c

// Relative path between two absolute paths
path.relative("/home/user/a", "/home/user/b/c")  // ../b/c

// Platform-specific
path.sep      // '/' on POSIX, '\' on Windows
path.delimiter  // ':' on POSIX, ';' on Windows
path.posix.join(...)   // always use / regardless of OS
path.win32.join(...)
http/https — server and client
import http from "node:http";
import https from "node:https";

// HTTP server
const server = http.createServer((req, res) => {
  const { method, url, headers } = req;

  // Read body
  let body = "";
  req.on("data", chunk => { body += chunk; });
  req.on("end", () => {
    const data = method === "POST" ? JSON.parse(body) : null;

    res.writeHead(200, { "Content-Type": "application/json" });
    res.end(JSON.stringify({ ok: true, data }));
  });
});

server.listen(3000, () => console.log("Listening on :3000"));

// HTTPS server
import fs from "node:fs";
const httpsServer = https.createServer({
  key:  fs.readFileSync("server.key"),
  cert: fs.readFileSync("server.crt"),
}, requestHandler);

// HTTP client (low-level — prefer fetch() in Node 18+)
https.get("https://api.example.com/data", (res) => {
  let data = "";
  res.on("data", chunk => { data += chunk; });
  res.on("end", () => console.log(JSON.parse(data)));
});

// fetch() — built-in since Node 18 (recommended)
const res  = await fetch("https://api.example.com/users");
if (!res.ok) throw new Error(`HTTP ${res.status}`);
const json = await res.json();

// POST with fetch
const res2 = await fetch("https://api.example.com/users", {
  method: "POST",
  headers: { "Content-Type": "application/json", "Authorization": `Bearer ${token}` },
  body: JSON.stringify({ name: "Alice" }),
  signal: AbortSignal.timeout(5000),   // 5s timeout
});

// Graceful shutdown
process.on("SIGTERM", () => {
  server.close(() => {
    console.log("Server closed");
    process.exit(0);
  });
});
EventEmitter — events pattern
import { EventEmitter } from "node:events";

class JobQueue extends EventEmitter {
  #jobs = [];

  add(job) {
    this.#jobs.push(job);
    this.emit("queued", job);
    return this;
  }

  async process() {
    for (const job of this.#jobs) {
      this.emit("start", job);
      try {
        const result = await job.run();
        this.emit("done", job, result);
      } catch (err) {
        this.emit("error", err, job);   // "error" events must be handled!
      }
    }
    this.emit("drain");
  }
}

const queue = new JobQueue();

queue
  .on("queued", job => console.log("Queued:", job.id))
  .on("done",   (job, result) => console.log("Done:", job.id, result))
  .on("error",  (err, job) => console.error("Failed:", job.id, err.message))
  .on("drain",  () => console.log("Queue empty"));

// once() — listen only for first occurrence
queue.once("drain", cleanup);

// off() — remove listener
const handler = (job) => console.log(job);
queue.on("start", handler);
queue.off("start", handler);   // or: removeListener

// EventEmitter.on() — async iteration (Node 16+)
const watcher = new EventEmitter();
for await (const [event] of EventEmitter.on(watcher, "change")) {
  console.log("Change:", event);
}

// Avoid memory leak warning (default limit = 10)
queue.setMaxListeners(20);
EventEmitter.defaultMaxListeners = 20;

Always handle the 'error' event on EventEmitters. An unhandled error event throws and crashes the process.

Streams — readable, writable, transform
import { Readable, Writable, Transform, pipeline } from "node:stream";
import { pipeline as pipelineAsync } from "node:stream/promises";

// pipeline — handles teardown on error (always use over .pipe())
await pipelineAsync(
  createReadStream("input.csv"),
  createGzip(),
  createWriteStream("output.csv.gz")
);

// Custom Readable — push data on demand
class CounterStream extends Readable {
  constructor(max) {
    super({ objectMode: true });  // push objects, not buffers
    this.current = 0;
    this.max = max;
  }
  _read() {
    if (this.current < this.max) {
      this.push(this.current++);
    } else {
      this.push(null);   // signal end of stream
    }
  }
}

// Readable.from — create stream from async generator (most common)
async function* fetchPages(url) {
  let next = url;
  while (next) {
    const { data, nextUrl } = await fetch(next).then(r => r.json());
    yield data;
    next = nextUrl;
  }
}
const stream = Readable.from(fetchPages("/api/records"));

// Custom Transform
class JSONParser extends Transform {
  constructor() { super({ readableObjectMode: true }); }
  _transform(chunk, _encoding, callback) {
    try {
      this.push(JSON.parse(chunk.toString()));
      callback();
    } catch (err) {
      callback(err);
    }
  }
}

// Async iteration over any readable stream
for await (const chunk of stream) {
  await processItem(chunk);
}

// Collect stream to buffer/string
const chunks = [];
for await (const chunk of readable) chunks.push(chunk);
const result = Buffer.concat(chunks).toString("utf8");
child_process — spawning processes
import { exec, execFile, spawn, spawnSync } from "node:child_process";
import { promisify } from "node:util";

const execAsync = promisify(exec);

// exec — shell command, captures output (max 1MB by default)
const { stdout, stderr } = await execAsync("git log --oneline -10");
console.log(stdout.trim());

// Higher buffer, timeout
const { stdout: out } = await execAsync("npm audit --json", {
  maxBuffer: 10 * 1024 * 1024,   // 10MB
  timeout: 30_000,
  cwd: "/app",
  env: { ...process.env, CI: "1" },
});

// spawn — streaming output, no shell by default (safer)
const child = spawn("node", ["worker.js"], {
  stdio: ["inherit", "pipe", "pipe"],  // stdin | stdout | stderr
  env: { ...process.env, WORKER: "1" },
});

for await (const chunk of child.stdout) process.stdout.write(chunk);
for await (const chunk of child.stderr) process.stderr.write(chunk);

const exitCode = await new Promise(resolve =>
  child.on("close", resolve)
);

// spawnSync — synchronous (for scripts, not servers)
const result = spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf8" });
console.log(result.stdout.trim());

// execFile — like exec but no shell — safer for untrusted input
const { stdout: hash } = await promisify(execFile)(
  "git", ["rev-parse", "HEAD"], { encoding: "utf8" }
);

// fork — spawn Node.js child with IPC channel
import { fork } from "node:child_process";
const worker = fork("./worker.js");
worker.send({ type: "start", data });
worker.on("message", msg => console.log("Worker:", msg));
worker.on("exit", code => console.log("Exit:", code));
crypto — hashing, signing, encryption
import { createHash, createHmac, randomBytes, randomUUID,
         scrypt, pbkdf2, createCipheriv, createDecipheriv,
         generateKeyPairSync } from "node:crypto";
import { promisify } from "node:util";

const scryptAsync  = promisify(scrypt);
const pbkdf2Async  = promisify(pbkdf2);

// Hashing
createHash("sha256").update("data").digest("hex");
createHash("sha256").update(buffer).digest("base64");

// HMAC — keyed hash for message authentication
createHmac("sha256", secretKey).update(payload).digest("hex");

// Random values
randomBytes(32).toString("hex");      // 64-char hex string
randomUUID();                         // "110e8400-e29b-41d4-a716-446655440000"

// Password hashing (scrypt — prefer over bcrypt in Node)
const salt = randomBytes(32);
const hash = await scryptAsync(password, salt, 64);
const stored = `${salt.toString("hex")}:${hash.toString("hex")}`;

// Verify password
const [saltHex, hashHex] = stored.split(":");
const verify = await scryptAsync(input, Buffer.from(saltHex, "hex"), 64);
const ok = verify.equals(Buffer.from(hashHex, "hex"));

// PBKDF2 (FIPS-compliant alternative)
const key = await pbkdf2Async(password, salt, 310_000, 32, "sha256");

// AES-256-GCM encryption (authenticated)
const key256  = randomBytes(32);
const iv      = randomBytes(12);
const cipher  = createCipheriv("aes-256-gcm", key256, iv);
const encrypted = Buffer.concat([cipher.update(plaintext), cipher.final()]);
const authTag   = cipher.getAuthTag();

const decipher  = createDecipheriv("aes-256-gcm", key256, iv);
decipher.setAuthTag(authTag);
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);

// JWT signing (without libraries — just for reference)
// In practice, use jose or jsonwebtoken
worker_threads — CPU-bound parallelism
// main.js
import { Worker, isMainThread, parentPort, workerData } from "node:worker_threads";

if (isMainThread) {
  // Launch workers
  const results = await Promise.all(
    chunks.map(chunk => new Promise((resolve, reject) => {
      const worker = new Worker(new URL(import.meta.url), {
        workerData: { chunk }
      });
      worker.on("message", resolve);
      worker.on("error",   reject);
      worker.on("exit", code => {
        if (code !== 0) reject(new Error(`Worker exited with code ${code}`));
      });
    }))
  );
} else {
  // Worker code — runs in separate thread
  const result = heavyComputation(workerData.chunk);
  parentPort.postMessage(result);
}

// Worker pool pattern (for reuse)
import { StaticPool } from "node-worker-threads-pool";  // npm package

const pool = new StaticPool({
  size: os.cpus().length,
  task: "./worker.js",
});

const result = await pool.exec(inputData);

// SharedArrayBuffer — zero-copy shared memory between threads
const shared = new SharedArrayBuffer(4);
const view   = new Int32Array(shared);
Atomics.add(view, 0, 1);   // thread-safe increment
Atomics.load(view, 0);     // read

// MessageChannel — direct worker-to-worker communication
const { port1, port2 } = new MessageChannel();
worker.postMessage({ port: port2 }, [port2]);   // transfer ownership

Use worker_threads for CPU-bound work (image processing, parsing, crypto). For I/O-bound work, Node’s async event loop handles concurrency without threads.

Built-in test runner (Node 18+)
import { describe, it, before, after, beforeEach, afterEach } from "node:test";
import assert from "node:assert/strict";

describe("UserService", () => {
  let service;

  before(async () => {
    service = await UserService.create({ db: testDb });
  });

  after(async () => {
    await testDb.close();
  });

  beforeEach(async () => {
    await testDb.truncate("users");
  });

  it("creates a user", async () => {
    const user = await service.create({ name: "Alice", email: "alice@example.com" });
    assert.equal(user.name, "Alice");
    assert.match(user.email, /^alice@/);
    assert.ok(user.id > 0);
  });

  it("throws on duplicate email", async () => {
    await service.create({ name: "Alice", email: "alice@example.com" });
    await assert.rejects(
      service.create({ name: "Bob", email: "alice@example.com" }),
      { code: "DUPLICATE_EMAIL" }
    );
  });

  it("skipped test", { skip: "not implemented yet" }, async () => {});
  it("todo", { todo: "add validation" });
});

// Run: node --test
// Run specific: node --test --test-name-pattern="creates"
// Coverage: node --test --experimental-test-coverage
// Watch: node --test --watch

// Assert API
assert.equal(a, b)          // strict ===
assert.deepEqual(obj, exp)  // deep structural equality
assert.throws(() => fn())
assert.rejects(promise)
assert.ok(truthy)
assert.match(str, /regex/)
assert.doesNotThrow(() => fn())
Process, environment, and signals
// Environment
process.env.NODE_ENV     // "production" | "development" | "test"
process.env.PORT ?? "3000"
process.cwd()            // current working directory
process.argv             // ["node", "script.js", "--arg", "value"]
process.argv.slice(2)    // user args only
process.version          // "v22.0.0"
process.platform         // "linux" | "darwin" | "win32"
process.arch             // "x64" | "arm64"
process.pid

// Exit
process.exit(0)          // success
process.exit(1)          // failure
process.exitCode = 1     // set code without exiting immediately

// Signals and graceful shutdown
let isShuttingDown = false;

async function shutdown(signal) {
  if (isShuttingDown) return;
  isShuttingDown = true;
  console.log(`Received ${signal}, shutting down...`);
  await closeConnections();
  process.exit(0);
}

process.on("SIGTERM", () => shutdown("SIGTERM"));   // Kubernetes sends this
process.on("SIGINT",  () => shutdown("SIGINT"));    // Ctrl+C

// Unhandled errors
process.on("uncaughtException", (err, origin) => {
  console.error("Uncaught exception:", err, origin);
  process.exit(1);   // always exit after uncaughtException
});

process.on("unhandledRejection", (reason, promise) => {
  console.error("Unhandled rejection at:", promise, "reason:", reason);
  process.exit(1);
});

// Stdout / stderr
process.stdout.write("no newline");
process.stderr.write("error output\n");
process.stdout.isTTY    // true if running in a terminal (false in CI)

// Memory
const mem = process.memoryUsage();
mem.rss        // resident set size (total memory used)
mem.heapUsed   // JS heap used
mem.heapTotal  // JS heap total
mem.external   // memory used by C++ objects

Track Nodejs EOL dates and releases at ReleaseRun — free, live data.

📱

Watch as a Web Story
5 Node.js Mistakes You’re Probably Making in Production — quick visual guide, 2 min

Founded

2023 in London, UK

Contact

hello@releaserun.com