Skip to content

Data Loading

Learn how to load and use data in your VitePress site.

Data Files

Creating Data Files

Create .data.js or .data.ts files to load data:

javascript
// posts.data.js
export default {
  async load() {
    // Load data from API, files, etc.
    const posts = await fetch('https://api.example.com/posts')
      .then(res => res.json())
    
    return posts.map(post => ({
      title: post.title,
      url: post.url,
      excerpt: post.excerpt,
      date: new Date(post.date)
    }))
  }
}

Using Data in Components

Import and use data in Vue components:

vue
<script setup>
import { data as posts } from './posts.data.js'
</script>

<template>
  <div>
    <article v-for="post in posts" :key="post.url">
      <h2>{{ post.title }}</h2>
      <p>{{ post.excerpt }}</p>
      <time>{{ post.date.toLocaleDateString() }}</time>
    </article>
  </div>
</template>

Data Sources

File System

Load data from local files:

javascript
// docs.data.js
import { readFileSync } from 'fs'
import { resolve } from 'path'

export default {
  load() {
    const dataPath = resolve(__dirname, '../data/docs.json')
    const rawData = readFileSync(dataPath, 'utf-8')
    return JSON.parse(rawData)
  }
}

External APIs

Fetch data from external APIs:

javascript
// github.data.js
export default {
  async load() {
    const response = await fetch('https://api.github.com/repos/vuejs/vitepress')
    const repo = await response.json()
    
    return {
      name: repo.name,
      description: repo.description,
      stars: repo.stargazers_count,
      forks: repo.forks_count,
      lastUpdate: new Date(repo.updated_at)
    }
  }
}

Database Connections

Connect to databases:

javascript
// database.data.js
import { createConnection } from 'mysql2/promise'

export default {
  async load() {
    const connection = await createConnection({
      host: process.env.DB_HOST,
      user: process.env.DB_USER,
      password: process.env.DB_PASSWORD,
      database: process.env.DB_NAME
    })
    
    const [rows] = await connection.execute('SELECT * FROM articles')
    await connection.end()
    
    return rows
  }
}

Data Transformation

Processing Data

Transform data during loading:

javascript
// processed-posts.data.js
import { marked } from 'marked'

export default {
  async load() {
    const posts = await fetchPosts()
    
    return posts.map(post => ({
      ...post,
      html: marked(post.content),
      readingTime: calculateReadingTime(post.content),
      tags: post.tags.map(tag => tag.toLowerCase())
    }))
  }
}

function calculateReadingTime(content) {
  const wordsPerMinute = 200
  const wordCount = content.split(' ').length
  return Math.ceil(wordCount / wordsPerMinute)
}

Data Validation

Validate data structure:

javascript
// validated-data.data.js
import Joi from 'joi'

const schema = Joi.array().items(
  Joi.object({
    title: Joi.string().required(),
    date: Joi.date().required(),
    content: Joi.string().required()
  })
)

export default {
  async load() {
    const rawData = await fetchData()
    const { error, value } = schema.validate(rawData)
    
    if (error) {
      throw new Error(`Data validation failed: ${error.message}`)
    }
    
    return value
  }
}

Caching Strategies

Build-time Caching

Cache data during build:

javascript
// cached-data.data.js
import { readFileSync, writeFileSync, existsSync } from 'fs'

const CACHE_FILE = '.cache/data.json'
const CACHE_DURATION = 1000 * 60 * 60 // 1 hour

export default {
  async load() {
    // Check if cache exists and is fresh
    if (existsSync(CACHE_FILE)) {
      const stats = statSync(CACHE_FILE)
      const age = Date.now() - stats.mtime.getTime()
      
      if (age < CACHE_DURATION) {
        const cached = readFileSync(CACHE_FILE, 'utf-8')
        return JSON.parse(cached)
      }
    }
    
    // Fetch fresh data
    const data = await fetchFreshData()
    
    // Cache the data
    writeFileSync(CACHE_FILE, JSON.stringify(data, null, 2))
    
    return data
  }
}

Runtime Caching

Implement runtime caching:

javascript
// runtime-cached.data.js
let cache = null
let lastFetch = 0
const CACHE_DURATION = 5 * 60 * 1000 // 5 minutes

export default {
  async load() {
    const now = Date.now()
    
    if (cache && (now - lastFetch) < CACHE_DURATION) {
      return cache
    }
    
    cache = await fetchData()
    lastFetch = now
    
    return cache
  }
}

Dynamic Data

Real-time Updates

Handle real-time data updates:

javascript
// realtime-data.data.js
import { WebSocket } from 'ws'

export default {
  async load() {
    const initialData = await fetchInitialData()
    
    // Set up WebSocket for real-time updates
    if (typeof window !== 'undefined') {
      const ws = new WebSocket('wss://api.example.com/updates')
      
      ws.onmessage = (event) => {
        const update = JSON.parse(event.data)
        // Handle real-time updates
        updateData(update)
      }
    }
    
    return initialData
  }
}

Incremental Updates

Implement incremental data loading:

javascript
// incremental-data.data.js
export default {
  async load() {
    const lastUpdate = getLastUpdateTime()
    const updates = await fetchUpdates(lastUpdate)
    const existingData = getExistingData()
    
    return mergeData(existingData, updates)
  }
}

Error Handling

Graceful Degradation

Handle data loading errors:

javascript
// resilient-data.data.js
export default {
  async load() {
    try {
      return await fetchPrimaryData()
    } catch (primaryError) {
      console.warn('Primary data source failed:', primaryError)
      
      try {
        return await fetchFallbackData()
      } catch (fallbackError) {
        console.error('Fallback data source failed:', fallbackError)
        
        // Return default data
        return getDefaultData()
      }
    }
  }
}

Retry Logic

Implement retry mechanisms:

javascript
// retry-data.data.js
async function fetchWithRetry(url, maxRetries = 3) {
  for (let i = 0; i < maxRetries; i++) {
    try {
      const response = await fetch(url)
      if (response.ok) {
        return await response.json()
      }
      throw new Error(`HTTP ${response.status}`)
    } catch (error) {
      if (i === maxRetries - 1) throw error
      
      // Exponential backoff
      await new Promise(resolve => 
        setTimeout(resolve, Math.pow(2, i) * 1000)
      )
    }
  }
}

export default {
  async load() {
    return await fetchWithRetry('https://api.example.com/data')
  }
}

Performance Optimization

Lazy Loading

Implement lazy data loading:

javascript
// lazy-data.data.js
export default {
  async load() {
    // Load only essential data initially
    const essentialData = await fetchEssentialData()
    
    // Load additional data on demand
    essentialData.loadMore = async () => {
      return await fetchAdditionalData()
    }
    
    return essentialData
  }
}

Parallel Loading

Load data in parallel:

javascript
// parallel-data.data.js
export default {
  async load() {
    const [posts, authors, categories] = await Promise.all([
      fetchPosts(),
      fetchAuthors(),
      fetchCategories()
    ])
    
    return {
      posts,
      authors,
      categories
    }
  }
}

Best Practices

Data Structure

  • Keep data structures consistent
  • Use TypeScript for type safety
  • Validate data at boundaries
  • Handle missing or null data

Performance

  • Cache expensive operations
  • Use pagination for large datasets
  • Implement proper error handling
  • Monitor data loading performance

Security

  • Validate all external data
  • Sanitize user-generated content
  • Use environment variables for secrets
  • Implement rate limiting for APIs

VitePress Development Guide