Using the Fetch API with Undici in Node.js
Introduction
Undici is an HTTP client library that powers the fetch API in Node.js. It was written from scratch and does not rely on the built-in HTTP client in Node.js. It includes a number of features that make it a good choice for high-performance applications.
For information on Undici's specification compliance, see the Undici documentation.
Basic GET Usage
async function function main(): Promise<void>
main() {
// Like the browser fetch API, the default method is GET
const const response: Response
response = await function fetch(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response> (+1 overload)
fetch('https://jsonplaceholder.typicode.com/posts');
const const data: any
data = await const response: Response
response.Body.json(): Promise<any>
json();
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log(const data: any
data);
// returns something like:
// {
// userId: 1,
// id: 1,
// title: 'sunt aut facere repellat provident occaecati excepturi optio reprehenderit',
// body: 'quia et suscipit\n' +
// 'suscipit recusandae consequuntur expedita et cum\n' +
// 'reprehenderit molestiae ut ut quas totam\n' +
// 'nostrum rerum est autem sunt rem eveniet architecto'
// }
}
function main(): Promise<void>
main().Promise<void>.catch<void>(onrejected?: ((reason: any) => void | PromiseLike<void>) | null | undefined): Promise<void>
catch(var console: Console
console.Console.error(...data: any[]): void (+1 overload)
error);
Basic POST Usage
// Data sent from the client to the server
const const body: {
title: string;
body: string;
userId: number;
}
body = {
title: string
title: 'foo',
body: string
body: 'bar',
userId: number
userId: 1,
};
async function function main(): Promise<void>
main() {
const const response: Response
response = await function fetch(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response> (+1 overload)
fetch('https://jsonplaceholder.typicode.com/posts', {
RequestInit.method?: string | undefined
method: 'POST',
RequestInit.headers?: HeadersInit | undefined
headers: {
'User-Agent': 'undici-stream-example',
'Content-Type': 'application/json',
},
RequestInit.body?: BodyInit | null | undefined
body: var JSON: JSON
JSON.JSON.stringify(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string (+1 overload)
stringify(const body: {
title: string;
body: string;
userId: number;
}
body),
});
const const data: any
data = await const response: Response
response.Body.json(): Promise<any>
json();
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log(const data: any
data);
// returns something like:
// { title: 'foo', body: 'bar', userId: 1, id: 101 }
}
function main(): Promise<void>
main().Promise<void>.catch<void>(onrejected?: ((reason: any) => void | PromiseLike<void>) | null | undefined): Promise<void>
catch(var console: Console
console.Console.error(...data: any[]): void (+1 overload)
error);
Customizing the Fetch API with Undici
Undici allows you to customize the Fetch API by providing options to the fetch
function. For example, you can set custom headers, set the request method, and set the request body. Here is an example of how you can customize the Fetch API with Undici:
The fetch function takes two arguments: the URL to fetch and an options object. The options object is the Request object that you can use to customize the request. The function returns a Promises that resolves to a Response object.
In the following example, we are sending a POST request to the Ollama API with a JSON payload. Ollama is a cli tool that allows you to run LLM's (Large Language Models) on your local machine. You can download it here
ollama run mistral
This will download the mistral
model and run it on your local machine.
With a pool, you can reuse connections to the same server, which can improve performance. Here is an example of how you can use a pool with Undici:
import { import Pool
Pool } from 'undici';
const const ollamaPool: any
ollamaPool = new import Pool
Pool('http://localhost:11434', {
connections: number
connections: 10,
});
/**
* Stream the completion of a prompt using the Ollama API.
* @param {string} prompt - The prompt to complete.
* @link https://github.com/ollama/ollama/blob/main/docs/api.md
**/
async function function streamOllamaCompletion(prompt: string): Promise<void>
streamOllamaCompletion(prompt: string
prompt) {
const { const statusCode: any
statusCode, const body: any
body } = await const ollamaPool: any
ollamaPool.request({
path: string
path: '/api/generate',
method: string
method: 'POST',
headers: {
'Content-Type': string;
}
headers: {
'Content-Type': 'application/json',
},
body: string
body: var JSON: JSON
JSON.JSON.stringify(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string (+1 overload)
stringify({ prompt: string
prompt, model: string
model: 'mistral' }),
});
// You can read about HTTP status codes here: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
// 200 means the request was successful.
if (const statusCode: any
statusCode !== 200) {
throw new var Error: ErrorConstructor
new (message?: string, options?: ErrorOptions) => Error (+1 overload)
Error(`Ollama request failed with status ${const statusCode: any
statusCode}`);
}
let let partial: string
partial = '';
const const decoder: TextDecoder
decoder = new var TextDecoder: new (label?: string, options?: TextDecoderOptions) => TextDecoder
TextDecoder();
for await (const const chunk: any
chunk of const body: any
body) {
let partial: string
partial += const decoder: TextDecoder
decoder.TextDecoder.decode(input?: AllowSharedBufferSource, options?: TextDecodeOptions): string
decode(const chunk: any
chunk, { TextDecodeOptions.stream?: boolean | undefined
stream: true });
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log(let partial: string
partial);
}
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log('Streaming complete.');
}
try {
await function streamOllamaCompletion(prompt: string): Promise<void>
streamOllamaCompletion('What is recursion?');
} catch (var error: unknown
error) {
var console: Console
console.Console.error(message?: any, ...optionalParams: any[]): void (+1 overload)
error('Error calling Ollama:', var error: unknown
error);
} finally {
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log('Closing Ollama pool.');
const ollamaPool: any
ollamaPool.close();
}
Streaming Responses with Undici
Streams is a feature in Node.js that allows you to read and write chunks of data.
import { class Writable
Writable } from 'stream';
import { import stream
stream } from 'undici';
async function function fetchGitHubRepos(): Promise<void>
fetchGitHubRepos() {
const const url: "https://api.github.com/users/nodejs/repos"
url = 'https://api.github.com/users/nodejs/repos';
const { const statusCode: any
statusCode } = await import stream
stream(
const url: "https://api.github.com/users/nodejs/repos"
url,
{
method: string
method: 'GET',
headers: {
'User-Agent': string;
Accept: string;
}
headers: {
'User-Agent': 'undici-stream-example',
type Accept: string
Accept: 'application/json',
},
},
() => {
let let buffer: string
buffer = '';
return new new Writable(opts?: Stream.WritableOptions): Writable
Writable({
Stream.WritableOptions<Stream.Writable>.write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void
write(chunk: any
chunk, encoding: BufferEncoding
encoding, callback: (error?: Error | null) => void
callback) {
let buffer: string
buffer += chunk: any
chunk.toString();
try {
const const json: any
json = var JSON: JSON
JSON.JSON.parse(text: string, reviver?: (this: any, key: string, value: any) => any): any
parse(let buffer: string
buffer);
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log(
'Repository Names:',
const json: any
json.map(repo: any
repo => repo: any
repo.name)
);
let buffer: string
buffer = '';
} catch (function (local var) error: unknown
error) {
var console: Console
console.Console.error(message?: any, ...optionalParams: any[]): void (+1 overload)
error('Error parsing JSON:', function (local var) error: unknown
error);
}
callback: (error?: Error | null) => void
callback();
},
Stream.WritableOptions<Stream.Writable>.final?(this: Writable, callback: (error?: Error | null) => void): void
final(callback: (error?: Error | null) => void
callback) {
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log('Stream processing completed.');
callback: (error?: Error | null) => void
callback();
},
});
}
);
var console: Console
console.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)
log(`Response status: ${const statusCode: any
statusCode}`);
}
function fetchGitHubRepos(): Promise<void>
fetchGitHubRepos().Promise<void>.catch<void>(onrejected?: ((reason: any) => void | PromiseLike<void>) | null | undefined): Promise<void>
catch(var console: Console
console.Console.error(...data: any[]): void (+1 overload)
error);