Pagination
Pagination
Section titled “Pagination”GraphADV data endpoints support standard offset-based pagination using limit and offset query parameters.
export BASE_URL="https://api.graphadv.com/v2"export GRAPHADV_API_KEY="YOUR_API_KEY"Basic Pagination
Section titled “Basic Pagination”Use limit to control page size (max 100) and offset to skip records.
# First page (records 0-24)curl "$BASE_URL/companies?limit=25&offset=0" \ -H "X-Api-Key: $GRAPHADV_API_KEY"
# Second page (records 25-49)curl "$BASE_URL/companies?limit=25&offset=25" \ -H "X-Api-Key: $GRAPHADV_API_KEY"
# Third page (records 50-74)curl "$BASE_URL/companies?limit=25&offset=50" \ -H "X-Api-Key: $GRAPHADV_API_KEY"import osimport requests
BASE_URL = os.getenv("BASE_URL", "https://api.graphadv.com/v2")API_KEY = os.getenv("GRAPHADV_API_KEY")headers = {"X-Api-Key": API_KEY}
def get_page(offset=0, limit=25): response = requests.get( f"{BASE_URL}/companies", headers=headers, params={"limit": limit, "offset": offset} ) return response.json()
# First pagepage1 = get_page(offset=0)
# Second pagepage2 = get_page(offset=25)
# Third pagepage3 = get_page(offset=50)const BASE_URL = process.env.BASE_URL ?? 'https://api.graphadv.com/v2';const API_KEY = process.env.GRAPHADV_API_KEY;
async function getPage(offset = 0, limit = 25) { const params = new URLSearchParams({ limit: String(limit), offset: String(offset) }); const response = await fetch(`${BASE_URL}/companies?${params}`, { headers: { 'X-Api-Key': API_KEY } }); return response.json();}
// First pageconst page1 = await getPage(0);
// Second pageconst page2 = await getPage(25);
// Third pageconst page3 = await getPage(50);Paginate Through All Results
Section titled “Paginate Through All Results”Iterate through all pages until no more results are returned.
def fetch_all_companies(query=None, page_size=50): """Fetch all companies matching the query.""" all_companies = [] offset = 0
while True: params = {"limit": page_size, "offset": offset} if query: params["query"] = query
response = requests.get( f"{BASE_URL}/companies", headers=headers, params=params ) page = response.json() companies = page.get("data", [])
if not companies: break
all_companies.extend(companies) offset += page_size
# Optional: check total_count if available total = page.get("total_count") if total and offset >= total: break
return all_companies
# Fetch all fintech companiescompanies = fetch_all_companies(query="fintech")print(f"Found {len(companies)} companies")async function fetchAllCompanies(query = null, pageSize = 50) { const allCompanies = []; let offset = 0;
while (true) { const params = new URLSearchParams({ limit: String(pageSize), offset: String(offset) }); if (query) params.set('query', query);
const response = await fetch(`${BASE_URL}/companies?${params}`, { headers: { 'X-Api-Key': API_KEY } }); const page = await response.json(); const companies = page.data ?? [];
if (companies.length === 0) break;
allCompanies.push(...companies); offset += pageSize;
// Optional: check total_count if available if (page.total_count && offset >= page.total_count) break; }
return allCompanies;}
// Fetch all fintech companiesconst companies = await fetchAllCompanies('fintech');console.log(`Found ${companies.length} companies`);Paginate Jobs
Section titled “Paginate Jobs”List enrichment jobs with pagination.
# Recent completed jobscurl "$BASE_URL/jobs?status=completed&limit=20" \ -H "X-Api-Key: $GRAPHADV_API_KEY"
# Jobs since a specific datecurl "$BASE_URL/jobs?since=2026-01-01T00:00:00Z&limit=50" \ -H "X-Api-Key: $GRAPHADV_API_KEY"# Get recent completed jobsjobs = requests.get( f"{BASE_URL}/jobs", headers=headers, params={"status": "completed", "limit": 20}).json()
for job in jobs.get("data", []): print(f"{job['job_id']}: {job['status']} at {job['completed_at']}")// Get recent completed jobsconst jobs = await fetch(`${BASE_URL}/jobs?status=completed&limit=20`, { headers: { 'X-Api-Key': API_KEY }}).then(r => r.json());
jobs.data?.forEach(job => { console.log(`${job.job_id}: ${job.status} at ${job.completed_at}`);});- Default limit: 25 records per page
- Maximum limit: 100 records per page
- Total count: Some endpoints return
total_countin the response to help with pagination UI - Data reads are free: Pagination through read endpoints doesn’t consume units
- Rate limits apply: Don’t paginate too aggressively; respect rate limits (100 req/min for reads)