๐Ÿ“ฆ
Bunty

Deploy to Cloudflare Workers

Deploy your Bunty application to Cloudflareโ€™s global edge network for ultra-low latency and automatic scaling.

Prerequisites

  • Cloudflare account
  • Wrangler CLI

Setup

Install Wrangler

npm install -g wrangler
# or
bun add -g wrangler

Login to Cloudflare

wrangler login

Configuration

Create wrangler.toml

name = "bunty-app"
main = "src/index.ts"
compatibility_date = "2024-01-01"

[build]
command = "bun run build"

[env.production]
name = "bunty-app-production"
route = "api.example.com/*"
vars = { ENVIRONMENT = "production" }

[env.staging]
name = "bunty-app-staging"
route = "staging-api.example.com/*"
vars = { ENVIRONMENT = "staging" }

# Bindings
[[kv_namespaces]]
binding = "CACHE"
id = "your-kv-namespace-id"

[[d1_databases]]
binding = "DB"
database_name = "bunty-db"
database_id = "your-database-id"

[[r2_buckets]]
binding = "STORAGE"
bucket_name = "bunty-storage"

Adapter for Cloudflare Workers

Create src/index.ts:

import { BuntyApplication } from '@bunty/common';
import { AppModule } from './app.module';

export default {
  async fetch(request: Request, env: any, ctx: ExecutionContext) {
    // Create app instance
    const app = await BuntyApplication.create({
      module: AppModule,
      context: { env, ctx },
    });

    // Handle request
    const response = await app.handle(request);
    return response;
  },
};

Environment Variables

# Set secrets
wrangler secret put DATABASE_URL
wrangler secret put JWT_SECRET

# Set variables
wrangler publish --var ENVIRONMENT=production

Database with D1

Create D1 Database

# Create database
wrangler d1 create bunty-db

# Run migrations
wrangler d1 execute bunty-db --file=./schema.sql

# Local development
wrangler d1 execute bunty-db --local --file=./schema.sql

Use D1 in Your App

import { Injectable } from '@bunty/common';

@Injectable()
export class UserService {
  constructor(private readonly db: any) {}

  async getUsers() {
    const result = await this.db
      .prepare('SELECT * FROM users')
      .all();
    return result.results;
  }

  async createUser(name: string, email: string) {
    return await this.db
      .prepare('INSERT INTO users (name, email) VALUES (?, ?)')
      .bind(name, email)
      .run();
  }
}

KV Storage

Create KV Namespace

# Production namespace
wrangler kv:namespace create "CACHE"

# Preview namespace for development
wrangler kv:namespace create "CACHE" --preview

Use KV in Your App

import { Injectable } from '@bunty/common';

@Injectable()
export class CacheService {
  constructor(private readonly kv: any) {}

  async get(key: string) {
    return await this.kv.get(key, { type: 'json' });
  }

  async set(key: string, value: any, ttl?: number) {
    const options = ttl ? { expirationTtl: ttl } : {};
    await this.kv.put(key, JSON.stringify(value), options);
  }

  async delete(key: string) {
    await this.kv.delete(key);
  }
}

R2 Storage (S3-compatible)

Create R2 Bucket

wrangler r2 bucket create bunty-storage

Use R2 in Your App

import { Injectable } from '@bunty/common';

@Injectable()
export class StorageService {
  constructor(private readonly r2: any) {}

  async upload(key: string, file: File) {
    await this.r2.put(key, file.stream(), {
      httpMetadata: {
        contentType: file.type,
      },
    });
  }

  async download(key: string) {
    const object = await this.r2.get(key);
    if (!object) return null;
    return new Response(object.body);
  }

  async delete(key: string) {
    await this.r2.delete(key);
  }
}

Development

Local Development

# Start local dev server
wrangler dev

# With live reload
wrangler dev --live-reload

# Specify port
wrangler dev --port 3000

Deployment

Deploy to Production

# Deploy
wrangler deploy

# Deploy to specific environment
wrangler deploy --env production

# Dry run
wrangler deploy --dry-run

View Deployments

# List deployments
wrangler deployments list

# View specific deployment
wrangler deployments view <deployment-id>

# Rollback
wrangler rollback <deployment-id>

Custom Domains

Add Custom Domain

# Add route in wrangler.toml or via dashboard

In wrangler.toml:

routes = [
  { pattern = "api.example.com/*", zone_name = "example.com" }
]

Cron Triggers

Add Scheduled Jobs

# wrangler.toml
[triggers]
crons = ["0 0 * * *"]  # Daily at midnight
export default {
  async scheduled(event: ScheduledEvent, env: any, ctx: ExecutionContext) {
    // Run scheduled task
    console.log('Running scheduled job');
  },
};

Durable Objects

Create Durable Object

export class Counter {
  state: DurableObjectState;

  constructor(state: DurableObjectState, env: any) {
    this.state = state;
  }

  async fetch(request: Request) {
    let count = (await this.state.storage.get('count')) || 0;
    count++;
    await this.state.storage.put('count', count);
    return new Response(count.toString());
  }
}

Configure in wrangler.toml

[[durable_objects.bindings]]
name = "COUNTER"
class_name = "Counter"
script_name = "bunty-app"

[[migrations]]
tag = "v1"
new_classes = ["Counter"]

Analytics and Monitoring

Enable Analytics

# View analytics
wrangler tail

# Real-time logs
wrangler tail --format pretty

Add Custom Metrics

export default {
  async fetch(request: Request, env: any, ctx: ExecutionContext) {
    const start = Date.now();
    
    const response = await app.handle(request);
    
    const duration = Date.now() - start;
    ctx.waitUntil(
      env.ANALYTICS.writeDataPoint({
        blobs: [request.url],
        doubles: [duration],
        indexes: [request.method],
      })
    );
    
    return response;
  },
};

Limitations

Be aware of Cloudflare Workers limitations:

  • 128MB memory limit
  • 50ms CPU time for free plan, 30s for paid
  • 10ms for subrequests
  • No file system access
  • Cold start considerations

Best Practices

1. Use Edge Caching

const response = await app.handle(request);
return new Response(response.body, {
  ...response,
  headers: {
    ...response.headers,
    'Cache-Control': 'public, max-age=300',
  },
});

2. Implement Rate Limiting

import { Injectable } from '@bunty/common';

@Injectable()
export class RateLimiter {
  constructor(private readonly kv: any) {}

  async check(ip: string, limit: number = 100) {
    const key = `rate:${ip}`;
    const current = await this.kv.get(key);
    
    if (current && parseInt(current) >= limit) {
      throw new Error('Rate limit exceeded');
    }
    
    await this.kv.put(key, (parseInt(current || '0') + 1).toString(), {
      expirationTtl: 60,
    });
  }
}

3. Handle Errors Gracefully

export default {
  async fetch(request: Request, env: any, ctx: ExecutionContext) {
    try {
      return await app.handle(request);
    } catch (error) {
      return new Response(JSON.stringify({ error: error.message }), {
        status: 500,
        headers: { 'Content-Type': 'application/json' },
      });
    }
  },
};

Cost Optimization

  • Use KV for caching to reduce compute time
  • Implement efficient caching strategies
  • Use D1 for data persistence
  • Monitor usage in Cloudflare dashboard

Next Steps

Have questions? Join our Discord community
Found an issue? Edit this page on GitHub