fetch API (Node.js 18+) or install node-fetch for older versions.
Setup
Copy
const API_KEY = 'ck_your_api_key';
const BASE_URL = 'https://api.crawlkit.com/api/v1';
// Helper function for all requests
async function crawlkit(endpoint, body) {
const response = await fetch(`${BASE_URL}${endpoint}`, {
method: 'POST',
headers: {
'Authorization': `ApiKey ${API_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(body),
});
return response.json();
}
Raw Crawl
Fetch a webpage and get its HTML content.Basic Request
Copy
const result = await crawlkit('/crawl/raw', {
url: 'https://example.com'
});
console.log(result.data.body); // HTML content
console.log(result.data.statusCode); // 200
With Options
Copy
const result = await crawlkit('/crawl/raw', {
url: 'https://example.com',
options: {
timeout: 30000,
followRedirects: true,
maxRedirects: 5,
headers: {
'User-Agent': 'MyApp/1.0'
}
}
});
Complete Example
Copy
async function fetchPage(url) {
try {
const result = await crawlkit('/crawl/raw', { url });
if (!result.success) {
throw new Error(result.error.message);
}
console.log(`Fetched ${result.data.finalUrl}`);
console.log(`Status: ${result.data.statusCode}`);
console.log(`Size: ${result.data.contentLength} bytes`);
console.log(`Time: ${result.data.timing.total}ms`);
console.log(`Credits remaining: ${result.data.creditsRemaining}`);
return result.data.body;
} catch (error) {
console.error('Error:', error.message);
throw error;
}
}
// Usage
const html = await fetchPage('https://example.com');
Web Search
Search the web using DuckDuckGo.Basic Search
Copy
const result = await crawlkit('/crawl/search', {
query: 'web scraping python'
});
for (const item of result.data.results) {
console.log(`${item.position}. ${item.title}`);
console.log(` ${item.url}`);
console.log(` ${item.snippet}\n`);
}
With Filters
Copy
const result = await crawlkit('/crawl/search', {
query: 'web scraping python',
options: {
language: 'en-US',
region: 'us-en',
timeRange: 'm', // Last month
maxResults: 20
}
});
Complete Example
Copy
async function search(query, options = {}) {
const result = await crawlkit('/crawl/search', { query, options });
if (!result.success) {
throw new Error(result.error.message);
}
console.log(`Found ${result.data.totalResults} results for "${query}"`);
return result.data.results.map(r => ({
title: r.title,
url: r.url,
snippet: r.snippet
}));
}
// Usage
const results = await search('nodejs tutorial', { maxResults: 10 });
console.log(results);
Screenshot
Take a full-page screenshot of any website.Basic Screenshot
Copy
const result = await crawlkit('/crawl/screenshot', {
url: 'https://example.com'
});
console.log('Screenshot URL:', result.data.url);
With Options
Copy
const result = await crawlkit('/crawl/screenshot', {
url: 'https://example.com',
options: {
width: 1920,
height: 1080,
timeout: 30000,
waitForSelector: '.content-loaded'
}
});
Download Screenshot
Copy
import fs from 'fs';
async function takeScreenshot(url, filename) {
const result = await crawlkit('/crawl/screenshot', { url });
if (!result.success) {
throw new Error(result.error.message);
}
// Download the image
const imageResponse = await fetch(result.data.url);
const buffer = await imageResponse.arrayBuffer();
fs.writeFileSync(filename, Buffer.from(buffer));
console.log(`Saved to ${filename}`);
return filename;
}
// Usage
await takeScreenshot('https://example.com', 'screenshot.png');
Error Handling
Copy
async function safeCrawl(url) {
const result = await crawlkit('/crawl/raw', { url });
if (!result.success) {
const { code, message } = result.error;
switch (code) {
case 'INSUFFICIENT_CREDITS':
console.error('Out of credits! Please purchase more.');
break;
case 'INVALID_URL':
console.error('Invalid URL provided.');
break;
case 'TIMEOUT':
console.error('Request timed out. Try increasing timeout.');
break;
default:
console.error(`Error: ${message}`);
}
return null;
}
return result.data;
}
Using with TypeScript
Copy
interface CrawlResponse {
success: boolean;
data?: {
url: string;
finalUrl: string;
statusCode: number;
body: string;
contentLength: number;
timing: { total: number };
creditsUsed: number;
creditsRemaining: number;
};
error?: {
code: string;
message: string;
};
}
async function crawlkit<T>(endpoint: string, body: object): Promise<T> {
const response = await fetch(`${BASE_URL}${endpoint}`, {
method: 'POST',
headers: {
'Authorization': `ApiKey ${API_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(body),
});
return response.json() as Promise<T>;
}
// Usage
const result = await crawlkit<CrawlResponse>('/crawl/raw', {
url: 'https://example.com'
});