Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .coderabbit.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ reviews:
collapse_walkthrough: false
profile: "chill"
high_level_summary: true
request_changes_workflow: true
request_changes_workflow: false
poem: false
in_progress_fortune: false
sequence_diagrams: false
Expand Down
19 changes: 8 additions & 11 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ concurrency: ${{ github.workflow }}-${{ github.ref }}

env:
WORKSPACES: create-db create-pg create-postgres
PACKAGES_DIR: packages
POSTHOG_API_HOST: ${{ secrets.POSTHOG_API_HOST }}
POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }}
CREATE_DB_WORKER_URL: ${{ secrets.CREATE_DB_WORKER_URL }}
Expand Down Expand Up @@ -46,8 +47,6 @@ jobs:

- name: 📦 Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 8

- name: 🔧 Setup Node.js
uses: actions/setup-node@v4
Expand Down Expand Up @@ -75,12 +74,12 @@ jobs:

for pkg in ${{ env.WORKSPACES }}; do
echo "Publishing $pkg to npm..."
cd "$pkg"
cd "${{ env.PACKAGES_DIR }}/$pkg"
export POSTHOG_API_HOST="${POSTHOG_API_HOST}"
export POSTHOG_API_KEY="${POSTHOG_API_KEY}"
export CREATE_DB_WORKER_URL="${CREATE_DB_WORKER_URL}"
export CLAIM_DB_WORKER_URL="${CLAIM_DB_WORKER_URL}"

# First try to publish
if ! pnpm publish --access public --no-git-checks; then
echo "Publish failed, trying to bump version and retry..."
Expand Down Expand Up @@ -127,8 +126,6 @@ jobs:

- name: 📦 Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 8

- name: 🔧 Setup Node.js
uses: actions/setup-node@v4
Expand All @@ -146,7 +143,7 @@ jobs:
- name: 📄 Copy README to child CLIs
run: |
for pkg in create-pg create-postgres; do
cp create-db/README.md "$pkg/README.md"
cp packages/create-db/README.md "packages/$pkg/README.md"
done

- name: 🔖 Create unique preview tag
Expand Down Expand Up @@ -175,7 +172,7 @@ jobs:
echo "Using POSTHOG_API_HOST=${POSTHOG_API_HOST}"

for pkg in ${{ env.WORKSPACES }}; do
cd "$pkg"
cd "${{ env.PACKAGES_DIR }}/$pkg"
export CREATE_DB_WORKER_URL
export CLAIM_DB_WORKER_URL
export POSTHOG_API_HOST="${POSTHOG_API_HOST}"
Expand Down Expand Up @@ -216,9 +213,9 @@ jobs:
npx create-postgres@pr${{ github.event.number }}
\`\`\`

**Worker URLs**
• Create-DB Worker: ${dbUrl}
• Claim-DB Worker: ${clUrl}
**Worker URLs**
• Create-DB Worker: ${dbUrl}
• Claim-DB Worker: ${clUrl}

> These will live as long as this PR exists under tag \`${tag}\`.`
});
20 changes: 6 additions & 14 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ jobs:

- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 9

- name: Setup Node.js
uses: actions/setup-node@v4
Expand All @@ -29,25 +27,19 @@ jobs:

- name: Install Dependencies
run: pnpm install --frozen-lockfile
working-directory: ./claim-db-worker

- name: Run claim-db-worker tests
run: pnpm test
working-directory: ./claim-db-worker
run: pnpm turbo run test --filter=./apps/claim-db-worker...
env:
NODE_ENV: test

- name: Install create-db dependencies
run: pnpm install --frozen-lockfile
working-directory: ./create-db

- name: Build create-db
run: pnpm build
working-directory: ./create-db
- name: Run create-db-worker tests
run: pnpm turbo run test --filter=./apps/create-db-worker...
env:
NODE_ENV: test

- name: Run create-db tests
run: pnpm test
working-directory: ./create-db
run: pnpm turbo run test --filter=./packages/create-db...
env:
NODE_ENV: test
CREATE_DB_WORKER_URL: ${{ secrets.CREATE_DB_WORKER_URL }}
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# Turborepo
.turbo

# Node.js
node_modules/
dist/
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes
File renamed without changes
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"deploy": "wrangler deploy",
"dev": "wrangler dev",
"start": "wrangler dev",
"test": "vitest",
"test": "vitest run",
"cf-typegen": "wrangler types"
},
"devDependencies": {
Expand Down
File renamed without changes.
File renamed without changes.
62 changes: 62 additions & 0 deletions apps/create-db-worker/test/analytics.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { PosthogEventCapture, EventCaptureError } from '../src/analytics';

describe('PosthogEventCapture', () => {
beforeEach(() => {
vi.restoreAllMocks();
});

it('does nothing when POSTHOG_API_HOST is missing', async () => {
const fetchSpy = vi.spyOn(global, 'fetch');
const capture = new PosthogEventCapture({ POSTHOG_API_KEY: 'key' });
await capture.capture('test_event');
expect(fetchSpy).not.toHaveBeenCalled();
});

it('does nothing when POSTHOG_API_KEY is missing', async () => {
const fetchSpy = vi.spyOn(global, 'fetch');
const capture = new PosthogEventCapture({ POSTHOG_API_HOST: 'https://posthog.example.com' });
await capture.capture('test_event');
expect(fetchSpy).not.toHaveBeenCalled();
});

it('sends a POST request to the capture endpoint', async () => {
const fetchSpy = vi.spyOn(global, 'fetch').mockResolvedValue(new Response(null, { status: 200 }));
const capture = new PosthogEventCapture({
POSTHOG_API_HOST: 'https://posthog.example.com',
POSTHOG_API_KEY: 'phc_testkey',
});

await capture.capture('db_created', { region: 'us-east-1' });

expect(fetchSpy).toHaveBeenCalledOnce();
const [url, init] = fetchSpy.mock.calls[0];
expect(url).toBe('https://posthog.example.com/capture');
expect(init?.method).toBe('POST');

const body = JSON.parse(init?.body as string);
expect(body.event).toBe('db_created');
expect(body.properties.region).toBe('us-east-1');
expect(body.properties.$process_person_profile).toBe(false);
});

it('strips trailing slash from host', async () => {
const fetchSpy = vi.spyOn(global, 'fetch').mockResolvedValue(new Response(null, { status: 200 }));
const capture = new PosthogEventCapture({
POSTHOG_API_HOST: 'https://posthog.example.com///',
POSTHOG_API_KEY: 'key',
});
await capture.capture('event');
const [url] = fetchSpy.mock.calls[0];
expect(url).toBe('https://posthog.example.com/capture');
});

it('throws EventCaptureError on non-ok response', async () => {
vi.spyOn(global, 'fetch').mockResolvedValue(new Response(null, { status: 500, statusText: 'Internal Server Error' }));
const capture = new PosthogEventCapture({
POSTHOG_API_HOST: 'https://posthog.example.com',
POSTHOG_API_KEY: 'key',
});
await expect(capture.capture('event')).rejects.toBeInstanceOf(EventCaptureError);
});
});
File renamed without changes.
76 changes: 76 additions & 0 deletions apps/create-db-worker/test/ttl.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import { describe, it, expect } from 'vitest';
import { parseTtlMsInput, isTtlMsInRange, clampTtlMs, MIN_TTL_MS, MAX_TTL_MS } from '../src/ttl';

describe('parseTtlMsInput', () => {
it('returns the value for a valid finite number', () => {
expect(parseTtlMsInput(3600000)).toBe(3600000);
});

it('floors float values', () => {
expect(parseTtlMsInput(3600000.9)).toBe(3600000);
});

it('returns undefined for a string', () => {
expect(parseTtlMsInput('3600000')).toBeUndefined();
});

it('returns undefined for null', () => {
expect(parseTtlMsInput(null)).toBeUndefined();
});

it('returns undefined for undefined', () => {
expect(parseTtlMsInput(undefined)).toBeUndefined();
});

it('returns undefined for Infinity', () => {
expect(parseTtlMsInput(Infinity)).toBeUndefined();
});

it('returns undefined for NaN', () => {
expect(parseTtlMsInput(NaN)).toBeUndefined();
});
});

describe('isTtlMsInRange', () => {
it('returns true for the minimum value', () => {
expect(isTtlMsInRange(MIN_TTL_MS)).toBe(true);
});

it('returns true for the maximum value', () => {
expect(isTtlMsInRange(MAX_TTL_MS)).toBe(true);
});

it('returns true for a value within range', () => {
expect(isTtlMsInRange(3600000)).toBe(true);
});

it('returns false for a value below minimum', () => {
expect(isTtlMsInRange(MIN_TTL_MS - 1)).toBe(false);
});

it('returns false for a value above maximum', () => {
expect(isTtlMsInRange(MAX_TTL_MS + 1)).toBe(false);
});
});

describe('clampTtlMs', () => {
it('returns MAX_TTL_MS for undefined', () => {
expect(clampTtlMs(undefined)).toBe(MAX_TTL_MS);
});

it('returns MAX_TTL_MS for NaN', () => {
expect(clampTtlMs(NaN)).toBe(MAX_TTL_MS);
});

it('clamps to MIN_TTL_MS when below range', () => {
expect(clampTtlMs(1000)).toBe(MIN_TTL_MS);
});

it('clamps to MAX_TTL_MS when above range', () => {
expect(clampTtlMs(MAX_TTL_MS + 99999)).toBe(MAX_TTL_MS);
});

it('returns the value unchanged when within range', () => {
expect(clampTtlMs(3600000)).toBe(3600000);
});
});
Loading
Loading