diff --git a/.gitea/workflows/cloudflare-pages.yml b/.gitea/workflows/cloudflare-pages.yml
deleted file mode 100644
index a057fa2..0000000
--- a/.gitea/workflows/cloudflare-pages.yml
+++ /dev/null
@@ -1,123 +0,0 @@
-name: Deploy to Cloudflare Pages
-
-on:
- push:
- branches:
- - main
- workflow_dispatch:
-
-env:
- OAUTH_DIR: oauth
- KEEP_DEPLOYMENTS: 5
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
- permissions:
- contents: read
- deployments: write
-
- steps:
- - name: Checkout
- uses: actions/checkout@v4
-
- - name: Setup Node.js
- uses: actions/setup-node@v4
- with:
- node-version: '21'
-
- - name: Install dependencies
- run: |
- cd ${{ env.OAUTH_DIR }}
- npm install
-
- - name: Build OAuth app
- run: |
- cd ${{ env.OAUTH_DIR }}
- NODE_ENV=production npm run build
- - name: Copy OAuth build to static
- run: |
- rm -rf my-blog/static/assets
- cp -rf ${{ env.OAUTH_DIR }}/dist/* my-blog/static/
- cp ${{ env.OAUTH_DIR }}/dist/index.html my-blog/templates/oauth-assets.html
-
- - name: Cache ailog binary
- uses: actions/cache@v4
- with:
- path: ./bin
- key: ailog-bin-${{ runner.os }}
- restore-keys: |
- ailog-bin-${{ runner.os }}
-
- - name: Setup ailog binary
- run: |
- # Get expected version from Cargo.toml
- EXPECTED_VERSION=$(grep '^version' Cargo.toml | cut -d'"' -f2)
- echo "Expected version from Cargo.toml: $EXPECTED_VERSION"
-
- # Check current binary version if exists
- if [ -f "./bin/ailog" ]; then
- CURRENT_VERSION=$(./bin/ailog --version 2>/dev/null || echo "unknown")
- echo "Current binary version: $CURRENT_VERSION"
- else
- CURRENT_VERSION="none"
- echo "No binary found"
- fi
-
- # Check OS
- OS="${{ runner.os }}"
- echo "Runner OS: $OS"
-
- # Use pre-packaged binary if version matches or extract from tar.gz
- if [ "$CURRENT_VERSION" = "$EXPECTED_VERSION" ]; then
- echo "Binary is up to date"
- chmod +x ./bin/ailog
- elif [ "$OS" = "Linux" ] && [ -f "./bin/ailog-linux-x86_64.tar.gz" ]; then
- echo "Extracting ailog from pre-packaged tar.gz..."
- cd bin
- tar -xzf ailog-linux-x86_64.tar.gz
- chmod +x ailog
- cd ..
-
- # Verify extracted version
- EXTRACTED_VERSION=$(./bin/ailog --version 2>/dev/null || echo "unknown")
- echo "Extracted binary version: $EXTRACTED_VERSION"
-
- if [ "$EXTRACTED_VERSION" != "$EXPECTED_VERSION" ]; then
- echo "Warning: Binary version mismatch. Expected $EXPECTED_VERSION but got $EXTRACTED_VERSION"
- fi
- else
- echo "Error: No suitable binary found for OS: $OS"
- exit 1
- fi
-
- - name: Build site with ailog
- run: |
- cd my-blog
- ../bin/ailog build
-
- - name: List public directory
- run: |
- ls -la my-blog/public/
-
- - name: Deploy to Cloudflare Pages
- uses: cloudflare/pages-action@v1
- with:
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- projectName: ${{ secrets.CLOUDFLARE_PROJECT_NAME }}
- directory: my-blog/public
- wranglerVersion: '3'
-
- cleanup:
- needs: deploy
- runs-on: ubuntu-latest
- if: success()
- steps:
- - name: Cleanup old deployments
- run: |
- curl -X PATCH \
- "https://api.cloudflare.com/client/v4/accounts/${{ secrets.CLOUDFLARE_ACCOUNT_ID }}/pages/projects/${{ secrets.CLOUDFLARE_PROJECT_NAME }}" \
- -H "Authorization: Bearer ${{ secrets.CLOUDFLARE_API_TOKEN }}" \
- -H "Content-Type: application/json" \
- -d "{ \"deployment_configs\": { \"production\": { \"deployment_retention\": ${{ env.KEEP_DEPLOYMENTS }} } } }"
\ No newline at end of file
diff --git a/.gitea/workflows/deploy.yml b/.gitea/workflows/deploy.yml
deleted file mode 100644
index 4586d25..0000000
--- a/.gitea/workflows/deploy.yml
+++ /dev/null
@@ -1,53 +0,0 @@
-name: Deploy to Cloudflare Pages
-
-on:
- push:
- branches: [main]
- pull_request:
- branches: [main]
-
-jobs:
- build-and-deploy:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout
- uses: actions/checkout@v4
-
- - name: Setup Rust
- uses: actions-rs/toolchain@v1
- with:
- toolchain: stable
- override: true
-
- - name: Setup Node.js
- uses: actions/setup-node@v4
- with:
- node-version: '20'
-
- - name: Build ailog
- run: |
- cargo build --release
-
- - name: Build OAuth app
- run: |
- cd oauth
- npm install
- npm run build
-
- - name: Copy OAuth assets
- run: |
- cp -r oauth/dist/* my-blog/static/
-
- - name: Generate site with ailog
- run: |
- ./target/release/ailog generate --input content --output my-blog/public
-
- - name: Deploy to Cloudflare Pages
- uses: cloudflare/pages-action@v1
- with:
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- projectName: syui-ai
- directory: my-blog/public
- gitHubToken: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/.gitea/workflows/example-usage.yml b/.gitea/workflows/example-usage.yml
deleted file mode 100644
index ddc7bff..0000000
--- a/.gitea/workflows/example-usage.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-name: Example ailog usage
-
-on:
- workflow_dispatch: # Manual trigger for testing
-
-jobs:
- build-with-ailog-action:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout
- uses: actions/checkout@v4
-
- - name: Build with ailog action
- uses: ai/log@v1 # This will reference this repository
- with:
- content-dir: 'content'
- output-dir: 'public'
- ai-integration: true
- atproto-integration: true
-
- - name: Deploy to Cloudflare Pages
- uses: cloudflare/pages-action@v1
- with:
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- projectName: my-blog
- directory: public
\ No newline at end of file
diff --git a/.gitea/workflows/release.yml b/.gitea/workflows/release.yml
deleted file mode 100644
index 30224a8..0000000
--- a/.gitea/workflows/release.yml
+++ /dev/null
@@ -1,193 +0,0 @@
-name: Release
-
-on:
- push:
- tags:
- - 'v*'
- workflow_dispatch:
- inputs:
- tag:
- description: 'Release tag (e.g., v1.0.0)'
- required: true
- default: 'v0.1.0'
-
-permissions:
- contents: write
- actions: read
-
-env:
- CARGO_TERM_COLOR: always
- OPENSSL_STATIC: true
- OPENSSL_VENDOR: true
-
-jobs:
- build:
- name: Build ${{ matrix.target }}
- runs-on: ${{ matrix.os }}
- timeout-minutes: 60
- strategy:
- matrix:
- include:
- - target: x86_64-unknown-linux-gnu
- os: ubuntu-latest
- artifact_name: ailog
- asset_name: ailog-linux-x86_64
- - target: aarch64-unknown-linux-gnu
- os: ubuntu-latest
- artifact_name: ailog
- asset_name: ailog-linux-aarch64
- - target: x86_64-apple-darwin
- os: macos-latest
- artifact_name: ailog
- asset_name: ailog-macos-x86_64
- - target: aarch64-apple-darwin
- os: macos-latest
- artifact_name: ailog
- asset_name: ailog-macos-aarch64
-
- steps:
- - uses: actions/checkout@v4
-
- - name: Setup Rust
- uses: dtolnay/rust-toolchain@stable
- with:
- targets: ${{ matrix.target }}
-
- - name: Install cross-compilation tools (Linux)
- if: matrix.os == 'ubuntu-latest' && matrix.target == 'aarch64-unknown-linux-gnu'
- run: |
- sudo apt-get update
- sudo apt-get install -y gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu
-
- - name: Configure cross-compilation (Linux ARM64)
- if: matrix.target == 'aarch64-unknown-linux-gnu'
- run: |
- echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config.toml
- echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config.toml
-
- - name: Cache cargo registry
- uses: actions/cache@v4
- with:
- path: |
- ~/.cargo/registry
- ~/.cargo/git
- key: ${{ runner.os }}-${{ matrix.target }}-cargo-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache target directory
- uses: actions/cache@v4
- with:
- path: target
- key: ${{ runner.os }}-${{ matrix.target }}-target-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Build
- run: cargo build --release --target ${{ matrix.target }}
-
- - name: Prepare binary
- shell: bash
- run: |
- cd target/${{ matrix.target }}/release
-
- # Use appropriate strip command for cross-compilation
- if [[ "${{ matrix.target }}" == "aarch64-unknown-linux-gnu" ]]; then
- aarch64-linux-gnu-strip ${{ matrix.artifact_name }} || echo "Strip failed, continuing..."
- elif [[ "${{ matrix.os }}" == "windows-latest" ]]; then
- strip ${{ matrix.artifact_name }} || echo "Strip failed, continuing..."
- else
- strip ${{ matrix.artifact_name }} || echo "Strip failed, continuing..."
- fi
-
- # Create archive
- if [[ "${{ matrix.target }}" == *"windows"* ]]; then
- 7z a ../../../${{ matrix.asset_name }}.zip ${{ matrix.artifact_name }}
- else
- tar czvf ../../../${{ matrix.asset_name }}.tar.gz ${{ matrix.artifact_name }}
- fi
-
- - name: Upload binary
- uses: actions/upload-artifact@v4
- with:
- name: ${{ matrix.asset_name }}
- path: ${{ matrix.asset_name }}.tar.gz
-
- release:
- name: Create Release
- needs: build
- runs-on: ubuntu-latest
- permissions:
- contents: write
- actions: read
- steps:
- - uses: actions/checkout@v4
-
- - name: Download all artifacts
- uses: actions/download-artifact@v4
- with:
- path: artifacts
-
- - name: Generate release notes
- run: |
- echo "## What's Changed" > release_notes.md
- echo "" >> release_notes.md
- echo "### Features" >> release_notes.md
- echo "- AI-powered static blog generator" >> release_notes.md
- echo "- AtProto OAuth integration" >> release_notes.md
- echo "- Automatic translation support" >> release_notes.md
- echo "- AI comment system" >> release_notes.md
- echo "" >> release_notes.md
- echo "### Platforms" >> release_notes.md
- echo "- Linux (x86_64, aarch64)" >> release_notes.md
- echo "- macOS (Intel, Apple Silicon)" >> release_notes.md
- echo "" >> release_notes.md
- echo "### Installation" >> release_notes.md
- echo "\`\`\`bash" >> release_notes.md
- echo "# Linux/macOS" >> release_notes.md
- echo "tar -xzf ailog-linux-x86_64.tar.gz" >> release_notes.md
- echo "chmod +x ailog" >> release_notes.md
- echo "sudo mv ailog /usr/local/bin/" >> release_notes.md
- echo "" >> release_notes.md
- echo "\`\`\`" >> release_notes.md
-
- - name: Get tag name
- id: tag_name
- run: |
- if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
- echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
- else
- echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
- fi
-
- - name: Create Release with Gitea API
- run: |
- # Prepare release files
- mkdir -p release
- find artifacts -name "*.tar.gz" -exec cp {} release/ \;
-
- # Create release via Gitea API
- RELEASE_RESPONSE=$(curl -X POST \
- "${{ github.server_url }}/api/v1/repos/${{ github.repository }}/releases" \
- -H "Authorization: token ${{ github.token }}" \
- -H "Content-Type: application/json" \
- -d '{
- "tag_name": "${{ steps.tag_name.outputs.tag }}",
- "name": "ailog ${{ steps.tag_name.outputs.tag }}",
- "body": "'"$(cat release_notes.md | sed 's/"/\\"/g' | tr '\n' ' ')"'",
- "draft": false,
- "prerelease": '"$(if echo "${{ steps.tag_name.outputs.tag }}" | grep -E "(alpha|beta|rc)"; then echo "true"; else echo "false"; fi)"'
- }')
-
- # Get release ID
- RELEASE_ID=$(echo "$RELEASE_RESPONSE" | jq -r '.id')
- echo "Created release with ID: $RELEASE_ID"
-
- # Upload release assets
- for file in release/*.tar.gz; do
- if [ -f "$file" ]; then
- filename=$(basename "$file")
- echo "Uploading $filename..."
- curl -X POST \
- "${{ github.server_url }}/api/v1/repos/${{ github.repository }}/releases/$RELEASE_ID/assets?name=$filename" \
- -H "Authorization: token ${{ github.token }}" \
- -H "Content-Type: application/octet-stream" \
- --data-binary @"$file"
- fi
- done
\ No newline at end of file
diff --git a/.github/workflows/cloudflare-pages.yml b/.github/workflows/cloudflare-pages.yml
deleted file mode 100644
index 0b83abe..0000000
--- a/.github/workflows/cloudflare-pages.yml
+++ /dev/null
@@ -1,169 +0,0 @@
-name: Deploy to Cloudflare Pages
-
-on:
- push:
- branches:
- - main
- workflow_dispatch:
-
-env:
- OAUTH_DIR: oauth
- KEEP_DEPLOYMENTS: 5
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
- permissions:
- contents: read
- deployments: write
-
- steps:
- - name: Checkout
- uses: actions/checkout@v4
-
- - name: Setup Node.js
- uses: actions/setup-node@v4
- with:
- node-version: '25'
-
- - name: Install dependencies
- run: |
- cd ${{ env.OAUTH_DIR }}
- npm install
-
- - name: Build OAuth app
- run: |
- cd ${{ env.OAUTH_DIR }}
- NODE_ENV=production npm run build
- - name: Copy OAuth build to static
- run: |
- rm -rf my-blog/static/assets
- cp -rf ${{ env.OAUTH_DIR }}/dist/* my-blog/static/
- cp ${{ env.OAUTH_DIR }}/dist/index.html my-blog/templates/oauth-assets.html
-
- - name: Build PDS app
- run: |
- cd pds
- npm install
- npm run build
-
- - name: Copy PDS build to static
- run: |
- rm -rf my-blog/static/pds
- cp -rf pds/dist my-blog/static/pds
-
- - name: Cache ailog binary
- uses: actions/cache@v4
- with:
- path: ./bin
- key: ailog-bin-${{ runner.os }}-v${{ hashFiles('Cargo.toml') }}
- restore-keys: |
- ailog-bin-${{ runner.os }}-v
-
- - name: Setup ailog binary
- run: |
- # Get expected version from Cargo.toml
- EXPECTED_VERSION=$(grep '^version' Cargo.toml | cut -d'"' -f2)
- echo "Expected version from Cargo.toml: $EXPECTED_VERSION"
-
- # Check current binary version if exists
- if [ -f "./bin/ailog" ]; then
- CURRENT_VERSION=$(./bin/ailog --version 2>/dev/null || echo "unknown")
- echo "Current binary version: $CURRENT_VERSION"
- else
- CURRENT_VERSION="none"
- echo "No binary found"
- fi
-
- # Check OS
- OS="${{ runner.os }}"
- echo "Runner OS: $OS"
-
- # Use pre-packaged binary if version matches or extract from tar.gz
- if [ "$CURRENT_VERSION" = "$EXPECTED_VERSION" ]; then
- echo "Binary is up to date"
- chmod +x ./bin/ailog
- elif [ "$OS" = "Linux" ] && [ -f "./bin/ailog-linux-x86_64.tar.gz" ]; then
- echo "Extracting ailog from pre-packaged tar.gz..."
- cd bin
- tar -xzf ailog-linux-x86_64.tar.gz
- chmod +x ailog
- cd ..
-
- # Verify extracted version
- EXTRACTED_VERSION=$(./bin/ailog --version 2>/dev/null || echo "unknown")
- echo "Extracted binary version: $EXTRACTED_VERSION"
-
- if [ "$EXTRACTED_VERSION" != "$EXPECTED_VERSION" ]; then
- echo "Warning: Binary version mismatch. Expected $EXPECTED_VERSION but got $EXTRACTED_VERSION"
- fi
- else
- echo "Error: No suitable binary found for OS: $OS"
- exit 1
- fi
-
- - name: Build site with ailog
- run: |
- cd my-blog
- ../bin/ailog build
-
- - name: List public directory
- run: |
- ls -la my-blog/public/
-
- - name: Deploy to Cloudflare Pages
- uses: cloudflare/pages-action@v1
- with:
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- projectName: ${{ secrets.CLOUDFLARE_PROJECT_NAME }}
- directory: my-blog/public
- gitHubToken: ${{ secrets.GITHUB_TOKEN }}
- wranglerVersion: '3'
-
- cleanup:
- needs: deploy
- runs-on: ubuntu-latest
- if: success()
- steps:
- - name: Cleanup old deployments
- run: |
- curl -X PATCH \
- "https://api.cloudflare.com/client/v4/accounts/${{ secrets.CLOUDFLARE_ACCOUNT_ID }}/pages/projects/${{ secrets.CLOUDFLARE_PROJECT_NAME }}" \
- -H "Authorization: Bearer ${{ secrets.CLOUDFLARE_API_TOKEN }}" \
- -H "Content-Type: application/json" \
- -d "{ \"deployment_configs\": { \"production\": { \"deployment_retention\": ${{ env.KEEP_DEPLOYMENTS }} } } }"
- # Get all deployments
- DEPLOYMENTS=$(curl -s -X GET \
- "https://api.cloudflare.com/client/v4/accounts/${{ secrets.CLOUDFLARE_ACCOUNT_ID }}/pages/projects/${{ secrets.CLOUDFLARE_PROJECT_NAME }}/deployments" \
- -H "Authorization: Bearer ${{ secrets.CLOUDFLARE_API_TOKEN }}" \
- -H "Content-Type: application/json")
-
- # Extract deployment IDs (skip the latest N deployments)
- DEPLOYMENT_IDS=$(echo "$DEPLOYMENTS" | jq -r ".result | sort_by(.created_on) | reverse | .[${{ env.KEEP_DEPLOYMENTS }}:] | .[].id // empty")
-
- if [ -z "$DEPLOYMENT_IDS" ]; then
- echo "No old deployments to delete"
- exit 0
- fi
-
- # Delete old deployments
- for ID in $DEPLOYMENT_IDS; do
- echo "Deleting deployment: $ID"
- RESPONSE=$(curl -s -X DELETE \
- "https://api.cloudflare.com/client/v4/accounts/${{ secrets.CLOUDFLARE_ACCOUNT_ID }}/pages/projects/${{ secrets.CLOUDFLARE_PROJECT_NAME }}/deployments/$ID" \
- -H "Authorization: Bearer ${{ secrets.CLOUDFLARE_API_TOKEN }}" \
- -H "Content-Type: application/json")
-
- SUCCESS=$(echo "$RESPONSE" | jq -r '.success')
- if [ "$SUCCESS" = "true" ]; then
- echo "Successfully deleted deployment: $ID"
- else
- echo "Failed to delete deployment: $ID"
- echo "$RESPONSE" | jq .
- fi
-
- sleep 1 # Rate limiting
- done
-
- echo "Cleanup completed!"
diff --git a/.github/workflows/disabled/gh-pages-fast.yml b/.github/workflows/disabled/gh-pages-fast.yml
deleted file mode 100644
index 078d518..0000000
--- a/.github/workflows/disabled/gh-pages-fast.yml
+++ /dev/null
@@ -1,92 +0,0 @@
-name: github pages (fast)
-
-on:
- push:
- branches:
- - main
- paths-ignore:
- - 'src/**'
- - 'Cargo.toml'
- - 'Cargo.lock'
-
-jobs:
- build-deploy:
- runs-on: ubuntu-latest
- permissions:
- contents: write
- pages: write
- id-token: write
- steps:
- - uses: actions/checkout@v4
-
- - name: Cache ailog binary
- uses: actions/cache@v4
- with:
- path: ./bin
- key: ailog-bin-${{ runner.os }}
- restore-keys: |
- ailog-bin-${{ runner.os }}
-
- - name: Setup ailog binary
- run: |
- # Get expected version from Cargo.toml
- EXPECTED_VERSION=$(grep '^version' Cargo.toml | cut -d'"' -f2)
- echo "Expected version from Cargo.toml: $EXPECTED_VERSION"
-
- # Check current binary version if exists
- if [ -f "./bin/ailog" ]; then
- CURRENT_VERSION=$(./bin/ailog --version 2>/dev/null || echo "unknown")
- echo "Current binary version: $CURRENT_VERSION"
- else
- CURRENT_VERSION="none"
- echo "No binary found"
- fi
-
- # Check OS
- OS="${{ runner.os }}"
- echo "Runner OS: $OS"
-
- # Use pre-packaged binary if version matches or extract from tar.gz
- if [ "$CURRENT_VERSION" = "$EXPECTED_VERSION" ]; then
- echo "Binary is up to date"
- chmod +x ./bin/ailog
- elif [ "$OS" = "Linux" ] && [ -f "./bin/ailog-linux-x86_64.tar.gz" ]; then
- echo "Extracting ailog from pre-packaged tar.gz..."
- cd bin
- tar -xzf ailog-linux-x86_64.tar.gz
- chmod +x ailog
- cd ..
-
- # Verify extracted version
- EXTRACTED_VERSION=$(./bin/ailog --version 2>/dev/null || echo "unknown")
- echo "Extracted binary version: $EXTRACTED_VERSION"
-
- if [ "$EXTRACTED_VERSION" != "$EXPECTED_VERSION" ]; then
- echo "Warning: Binary version mismatch. Expected $EXPECTED_VERSION but got $EXTRACTED_VERSION"
- fi
- else
- echo "Error: No suitable binary found for OS: $OS"
- exit 1
- fi
-
- - name: Setup Hugo
- uses: peaceiris/actions-hugo@v3
- with:
- hugo-version: "0.139.2"
- extended: true
-
- - name: Build with ailog
- env:
- TZ: "Asia/Tokyo"
- run: |
- # Use pre-built ailog binary instead of cargo build
- cd my-blog
- ../bin/ailog build
- touch ./public/.nojekyll
-
- - name: Deploy
- uses: peaceiris/actions-gh-pages@v3
- with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- publish_dir: ./my-blog/public
- publish_branch: gh-pages
\ No newline at end of file
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
deleted file mode 100644
index 0ebe469..0000000
--- a/.github/workflows/release.yml
+++ /dev/null
@@ -1,170 +0,0 @@
-name: Release
-
-on:
- push:
- tags:
- - 'v*'
- workflow_dispatch:
- inputs:
- tag:
- description: 'Release tag (e.g., v1.0.0)'
- required: true
- default: 'v0.1.0'
-
-permissions:
- contents: write
- actions: read
-
-env:
- CARGO_TERM_COLOR: always
- OPENSSL_STATIC: true
- OPENSSL_VENDOR: true
-
-jobs:
- build:
- name: Build ${{ matrix.target }}
- runs-on: ${{ matrix.os }}
- timeout-minutes: 60
- strategy:
- matrix:
- include:
- - target: x86_64-unknown-linux-gnu
- os: ubuntu-latest
- artifact_name: ailog
- asset_name: ailog-linux-x86_64
- - target: aarch64-unknown-linux-gnu
- os: ubuntu-latest
- artifact_name: ailog
- asset_name: ailog-linux-aarch64
- - target: x86_64-apple-darwin
- os: macos-latest
- artifact_name: ailog
- asset_name: ailog-macos-x86_64
- - target: aarch64-apple-darwin
- os: macos-latest
- artifact_name: ailog
- asset_name: ailog-macos-aarch64
-
- steps:
- - uses: actions/checkout@v4
-
-
- - name: Setup Rust
- uses: dtolnay/rust-toolchain@stable
- with:
- targets: ${{ matrix.target }}
-
- - name: Install cross-compilation tools (Linux)
- if: matrix.os == 'ubuntu-latest' && matrix.target == 'aarch64-unknown-linux-gnu'
- run: |
- sudo apt-get update
- sudo apt-get install -y gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu
-
- - name: Configure cross-compilation (Linux ARM64)
- if: matrix.target == 'aarch64-unknown-linux-gnu'
- run: |
- echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config.toml
- echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config.toml
-
- - name: Cache cargo registry
- uses: actions/cache@v4
- with:
- path: |
- ~/.cargo/registry
- ~/.cargo/git
- key: ${{ runner.os }}-${{ matrix.target }}-cargo-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache target directory
- uses: actions/cache@v4
- with:
- path: target
- key: ${{ runner.os }}-${{ matrix.target }}-target-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Build
- run: cargo build --release --target ${{ matrix.target }}
-
- - name: Prepare binary
- shell: bash
- run: |
- cd target/${{ matrix.target }}/release
-
- # Use appropriate strip command for cross-compilation
- if [[ "${{ matrix.target }}" == "aarch64-unknown-linux-gnu" ]]; then
- aarch64-linux-gnu-strip ${{ matrix.artifact_name }} || echo "Strip failed, continuing..."
- elif [[ "${{ matrix.os }}" == "windows-latest" ]]; then
- strip ${{ matrix.artifact_name }} || echo "Strip failed, continuing..."
- else
- strip ${{ matrix.artifact_name }} || echo "Strip failed, continuing..."
- fi
-
- # Create archive
- if [[ "${{ matrix.target }}" == *"windows"* ]]; then
- 7z a ../../../${{ matrix.asset_name }}.zip ${{ matrix.artifact_name }}
- else
- tar czvf ../../../${{ matrix.asset_name }}.tar.gz ${{ matrix.artifact_name }}
- fi
-
- - name: Upload binary
- uses: actions/upload-artifact@v4
- with:
- name: ${{ matrix.asset_name }}
- path: ${{ matrix.asset_name }}.tar.gz
-
- release:
- name: Create Release
- needs: build
- runs-on: ubuntu-latest
- permissions:
- contents: write
- actions: read
- steps:
- - uses: actions/checkout@v4
-
- - name: Download all artifacts
- uses: actions/download-artifact@v4
- with:
- path: artifacts
-
- - name: Generate release notes
- run: |
- echo "## What's Changed" > release_notes.md
- echo "" >> release_notes.md
- echo "### Features" >> release_notes.md
- echo "- AI-powered static blog generator" >> release_notes.md
- echo "- AtProto OAuth integration" >> release_notes.md
- echo "- Automatic translation support" >> release_notes.md
- echo "- AI comment system" >> release_notes.md
- echo "" >> release_notes.md
- echo "### Platforms" >> release_notes.md
- echo "- Linux (x86_64, aarch64)" >> release_notes.md
- echo "- macOS (Intel, Apple Silicon)" >> release_notes.md
- echo "" >> release_notes.md
- echo "### Installation" >> release_notes.md
- echo "\`\`\`bash" >> release_notes.md
- echo "# Linux/macOS" >> release_notes.md
- echo "tar -xzf ailog-linux-x86_64.tar.gz" >> release_notes.md
- echo "chmod +x ailog" >> release_notes.md
- echo "sudo mv ailog /usr/local/bin/" >> release_notes.md
- echo "" >> release_notes.md
- echo "\`\`\`" >> release_notes.md
-
- - name: Get tag name
- id: tag_name
- run: |
- if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
- echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
- else
- echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
- fi
-
- - name: Create Release
- uses: softprops/action-gh-release@v1
- with:
- tag_name: ${{ steps.tag_name.outputs.tag }}
- name: ailog ${{ steps.tag_name.outputs.tag }}
- body_path: release_notes.md
- draft: false
- prerelease: ${{ contains(steps.tag_name.outputs.tag, 'alpha') || contains(steps.tag_name.outputs.tag, 'beta') || contains(steps.tag_name.outputs.tag, 'rc') }}
- files: artifacts/*/ailog-*.tar.gz
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 36985de..0062c02 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,12 +1,5 @@
-/target
-/Cargo.lock
-/public
-/dist
-/repos
-/pds/dist
-.DS_Store
-.config
+dist
.claude
node_modules
-package-lock.json
-claude.md
+package-lock.json
+repos
diff --git a/Cargo.toml b/Cargo.toml
deleted file mode 100644
index c264792..0000000
--- a/Cargo.toml
+++ /dev/null
@@ -1,21 +0,0 @@
-[package]
-name = "ailog"
-version = "0.3.5"
-edition = "2021"
-authors = ["syui"]
-description = "static site generator for atproto"
-license = "MIT"
-
-[dependencies]
-clap = { version = "4.5", features = ["derive"] }
-pulldown-cmark = "0.11"
-serde = { version = "1.0", features = ["derive"] }
-serde_json = "1.0"
-tokio = { version = "1.40", features = ["rt-multi-thread", "macros", "fs"] }
-anyhow = "1.0"
-reqwest = { version = "0.12", features = ["json", "rustls-tls"], default-features = false }
-dirs = "5.0"
-chrono = "0.4"
-walkdir = "2.5"
-axum = "0.7"
-tower-http = { version = "0.5", features = ["fs"] }
diff --git a/README.md b/README.md
deleted file mode 100644
index ba87f8c..0000000
--- a/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# ailog
-
-`bundle: ai.syui.log`
-
diff --git a/content/post/2026-01-08-test.md b/content/post/2026-01-08-test.md
deleted file mode 100644
index faed222..0000000
--- a/content/post/2026-01-08-test.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# Test Post (Updated!)
-
-This is a test blog post for ailog.
-
-## Features
-
-- atproto integration
-- Static site generation
-- at browser support
-- Hash-based rkey (TID: 3mbvk36vj2k2y)
-
-Let's see how it renders!
-
-**Updated:** This post was updated to test the mapping.json feature.
diff --git a/index.html b/index.html
new file mode 100644
index 0000000..952b65c
--- /dev/null
+++ b/index.html
@@ -0,0 +1,21 @@
+
+
+
+
+
+ ailog
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lexicons/ai/syui/log/comment.json b/lexicons/ai/syui/log/comment.json
deleted file mode 100644
index 45d093f..0000000
--- a/lexicons/ai/syui/log/comment.json
+++ /dev/null
@@ -1,36 +0,0 @@
-{
- "lexicon": 1,
- "id": "ai.syui.log.comment",
- "defs": {
- "main": {
- "type": "record",
- "description": "Record containing a comment.",
- "key": "tid",
- "record": {
- "type": "object",
- "required": ["content", "createdAt", "post"],
- "properties": {
- "content": {
- "type": "string",
- "maxLength": 100000,
- "maxGraphemes": 10000,
- "description": "The content of the comment."
- },
- "createdAt": {
- "type": "string",
- "format": "datetime",
- "description": "Client-declared timestamp when this comment was originally created."
- },
- "parent": {
- "type": "ref",
- "ref": "com.atproto.repo.strongRef"
- },
- "post": {
- "type": "ref",
- "ref": "com.atproto.repo.strongRef"
- }
- }
- }
- }
- }
-}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..f2af4a8
--- /dev/null
+++ b/package.json
@@ -0,0 +1,19 @@
+{
+ "name": "ailog",
+ "version": "0.1.0",
+ "description": "AT Protocol site generator",
+ "type": "module",
+ "scripts": {
+ "dev": "vite",
+ "build": "tsc && vite build",
+ "preview": "vite preview"
+ },
+ "dependencies": {
+ "@atproto/api": "^0.15.8",
+ "@atproto/oauth-client-browser": "^0.3.39"
+ },
+ "devDependencies": {
+ "typescript": "^5.7.0",
+ "vite": "^6.0.0"
+ }
+}
diff --git a/pds/index.html b/pds/index.html
deleted file mode 100644
index f46e33f..0000000
--- a/pds/index.html
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
- AT URI Browser - syui.ai
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pds/package.json b/pds/package.json
deleted file mode 100644
index 750a10d..0000000
--- a/pds/package.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "name": "pds-browser",
- "version": "0.3.4",
- "description": "AT Protocol browser for ai.log",
- "main": "index.js",
- "type": "module",
- "scripts": {
- "dev": "vite",
- "build": "vite build",
- "preview": "vite preview"
- },
- "license": "MIT",
- "dependencies": {
- "@atproto/api": "^0.13.0",
- "@atproto/did": "^0.1.0",
- "@atproto/lexicon": "^0.4.0",
- "@atproto/syntax": "^0.3.0",
- "react": "^18.2.0",
- "react-dom": "^18.2.0"
- },
- "devDependencies": {
- "@types/react": "^18.0.37",
- "@types/react-dom": "^18.0.11",
- "@vitejs/plugin-react": "^4.0.0",
- "vite": "^5.0.0"
- }
-}
diff --git a/pds/src/App.css b/pds/src/App.css
deleted file mode 100644
index db4b156..0000000
--- a/pds/src/App.css
+++ /dev/null
@@ -1,463 +0,0 @@
-body {
- font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
- margin: 0;
- padding: 20px;
- background-color: #f5f5f5;
- line-height: 1.6;
-}
-
-.container {
- max-width: 1200px;
- margin: 0 auto;
- background: white;
- padding: 30px;
- border-radius: 10px;
- box-shadow: 0 2px 10px rgba(0,0,0,0.1);
-}
-
-h1 {
- color: #333;
- margin-bottom: 30px;
- border-bottom: 3px solid #007acc;
- padding-bottom: 10px;
-}
-
-.test-section {
- margin-bottom: 30px;
- padding: 20px;
- background: #f8f9fa;
- border-radius: 8px;
- border-left: 4px solid #007acc;
-}
-
-.test-uris {
- background: #fff;
- padding: 15px;
- border-radius: 5px;
- border: 1px solid #ddd;
- margin: 15px 0;
-}
-
-.at-uri {
- font-family: 'Monaco', 'Consolas', monospace;
- background: #f4f4f4;
- padding: 8px 12px;
- border-radius: 4px;
- margin: 10px 0;
- display: block;
- word-break: break-all;
- cursor: pointer;
- transition: background-color 0.2s;
-}
-
-.at-uri:hover {
- background: #e8e8e8;
-}
-
-.instructions {
- background: #e8f4f8;
- padding: 15px;
- border-radius: 5px;
- margin: 15px 0;
-}
-
-.instructions ol {
- margin: 10px 0;
- padding-left: 20px;
-}
-
-.back-link {
- display: inline-block;
- margin-top: 20px;
- color: #007acc;
- text-decoration: none;
- font-weight: bold;
-}
-
-.back-link:hover {
- text-decoration: underline;
-}
-
-/* AT Browser Modal Styles */
-.at-uri-modal-overlay {
- position: fixed;
- top: 0;
- left: 0;
- right: 0;
- bottom: 0;
- background-color: rgba(0, 0, 0, 0.5);
- display: flex;
- align-items: center;
- justify-content: center;
- z-index: 1000;
-}
-
-.at-uri-modal-content {
- background-color: white;
- border-radius: 8px;
- max-width: 800px;
- max-height: 600px;
- width: 90%;
- height: 80%;
- overflow: auto;
- position: relative;
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
-}
-
-.at-uri-modal-close {
- position: absolute;
- top: 10px;
- right: 10px;
- background: none;
- border: none;
- font-size: 20px;
- cursor: pointer;
- z-index: 1001;
- padding: 5px 10px;
-}
-
-/* AT URI Link Styles */
-[data-at-uri] {
- color: #1976d2;
- cursor: pointer;
- text-decoration: underline;
-}
-
-[data-at-uri]:hover {
- color: #1565c0;
-}
-
-/* Handle Browser Styles */
-.handle-browser {
- margin-bottom: 30px;
-}
-
-.handle-form {
- display: flex;
- gap: 10px;
- margin-bottom: 20px;
-}
-
-.handle-input {
- flex: 1;
- padding: 12px 16px;
- font-size: 16px;
- border: 2px solid #ddd;
- border-radius: 6px;
- font-family: 'Monaco', 'Consolas', monospace;
-}
-
-.handle-input:focus {
- outline: none;
- border-color: #007acc;
-}
-
-.handle-input:disabled {
- background: #f5f5f5;
- cursor: not-allowed;
-}
-
-.handle-button {
- padding: 12px 24px;
- font-size: 16px;
- background: #007acc;
- color: white;
- border: none;
- border-radius: 6px;
- cursor: pointer;
- font-weight: 500;
- transition: background 0.2s;
-}
-
-.handle-button:hover:not(:disabled) {
- background: #005a9e;
-}
-
-.handle-button:disabled {
- background: #ccc;
- cursor: not-allowed;
-}
-
-.error-message {
- background: #fee;
- padding: 12px 16px;
- border-radius: 6px;
- margin-bottom: 20px;
- color: #c33;
- border-left: 4px solid #c33;
-}
-
-.debug-info {
- background: #f0f0f0;
- padding: 12px 16px;
- border-radius: 6px;
- margin-bottom: 20px;
- border-left: 4px solid #666;
-}
-
-.debug-info h3 {
- margin-top: 0;
- color: #333;
- font-size: 14px;
-}
-
-.debug-info pre {
- background: white;
- padding: 8px;
- border-radius: 4px;
- font-size: 12px;
- overflow-x: auto;
- margin: 0;
-}
-
-
-.record-item {
- display: flex;
- justify-content: space-between;
- align-items: center;
- width: 100%;
- padding: 12px;
- background: white;
- border: none;
- cursor: pointer;
- text-align: left;
- transition: background 0.2s;
- border-radius: 4px;
- margin: 4px 0;
-}
-
-.record-item:hover {
- background: #e8f4f8;
-}
-
-.record-title {
- font-size: 16px;
- color: #007acc;
- font-weight: 500;
-}
-
-.record-date {
- color: #666;
- font-size: 14px;
-}
-
-.record-detail {
- background: white;
- padding: 20px;
- border-radius: 8px;
- border: 1px solid #ddd;
-}
-
-.back-button {
- padding: 8px 16px;
- margin-bottom: 16px;
- background: #f5f5f5;
- border: 1px solid #ddd;
- border-radius: 4px;
- cursor: pointer;
- font-size: 14px;
- color: #666;
- transition: background 0.2s;
-}
-
-.back-button:hover {
- background: #e8e8e8;
-}
-
-.record-detail h2 {
- margin-top: 0;
- color: #333;
-}
-
-.record-meta {
- margin-bottom: 20px;
- padding-bottom: 12px;
- border-bottom: 1px solid #eee;
-}
-
-.record-meta p {
- margin: 8px 0;
- color: #666;
- font-size: 14px;
-}
-
-.record-meta code {
- background: #f4f4f4;
- padding: 2px 6px;
- border-radius: 3px;
- font-family: 'Monaco', 'Consolas', monospace;
- font-size: 12px;
-}
-
-.record-content {
- line-height: 1.8;
-}
-
-.record-content pre {
- white-space: pre-wrap;
- word-wrap: break-word;
- font-family: inherit;
- margin: 0;
- color: #333;
-}
-
-.services-list {
- margin-top: 20px;
- background: #f8f9fa;
- padding: 20px;
- border-radius: 8px;
-}
-
-.services-list h2 {
- margin-top: 0;
- margin-bottom: 16px;
- color: #333;
- font-size: 20px;
-}
-
-.services-list ul {
- list-style: none;
- padding: 0;
- margin: 0;
-}
-
-.services-list li {
- border-bottom: 1px solid #ddd;
-}
-
-.services-list li:last-child {
- border-bottom: none;
-}
-
-.service-item {
- display: flex;
- align-items: center;
- gap: 12px;
- width: 100%;
- padding: 16px;
- background: white;
- border: none;
- cursor: pointer;
- text-align: left;
- transition: background 0.2s;
- border-radius: 4px;
- margin: 4px 0;
-}
-
-.service-item:hover {
- background: #e8f4f8;
-}
-
-.service-icon {
- width: 24px;
- height: 24px;
- border-radius: 4px;
- flex-shrink: 0;
-}
-
-.service-name {
- font-size: 16px;
- color: #007acc;
- font-weight: 500;
- font-family: 'Monaco', 'Consolas', monospace;
- flex: 1;
-}
-
-.service-count {
- color: #666;
- font-size: 14px;
- background: #e8e8e8;
- padding: 4px 12px;
- border-radius: 12px;
-}
-
-.collections-list {
- margin-top: 20px;
- background: #f8f9fa;
- padding: 20px;
- border-radius: 8px;
-}
-
-.collections-list h2 {
- margin-top: 0;
- margin-bottom: 16px;
- color: #333;
- font-size: 20px;
-}
-
-.collections-list ul {
- list-style: none;
- padding: 0;
- margin: 0;
-}
-
-.collections-list li {
- border-bottom: 1px solid #ddd;
-}
-
-.collections-list li:last-child {
- border-bottom: none;
-}
-
-.collection-item {
- display: flex;
- justify-content: space-between;
- align-items: center;
- width: 100%;
- padding: 16px;
- background: white;
- border: none;
- cursor: pointer;
- text-align: left;
- transition: background 0.2s;
- border-radius: 4px;
- margin: 4px 0;
-}
-
-.collection-item:hover {
- background: #e8f4f8;
-}
-
-.collection-name {
- font-size: 16px;
- color: #007acc;
- font-weight: 500;
- font-family: 'Monaco', 'Consolas', monospace;
-}
-
-.collection-count {
- color: #666;
- font-size: 14px;
- background: #e8e8e8;
- padding: 4px 12px;
- border-radius: 12px;
-}
-
-.records-view {
- margin-top: 20px;
- background: white;
- padding: 20px;
- border-radius: 8px;
- border: 1px solid #ddd;
-}
-
-.records-view h2 {
- margin-top: 0;
- margin-bottom: 16px;
- color: #333;
- font-size: 20px;
-}
-
-.records-view .records-list {
- list-style: none;
- padding: 0;
- margin: 0;
-}
-
-.records-view .records-list li {
- border-bottom: 1px solid #eee;
-}
-
-.records-view .records-list li:last-child {
- border-bottom: none;
-}
\ No newline at end of file
diff --git a/pds/src/App.jsx b/pds/src/App.jsx
deleted file mode 100644
index 28e6bfe..0000000
--- a/pds/src/App.jsx
+++ /dev/null
@@ -1,32 +0,0 @@
-import React, { useState } from 'react'
-import { AtUriBrowser } from './components/AtUriBrowser.jsx'
-import { HandleBrowser } from './components/HandleBrowser.jsx'
-import './App.css'
-
-function App() {
- return (
-
-
-
AT Protocol Browser
-
-
-
-
-
AT URI について
-
AT URIは、AT Protocolで使用される統一リソース識別子です。この形式により、分散ソーシャルネットワーク上のコンテンツを一意に識別できます。
-
-
対応PDS環境
-
- - bsky.social - メインのBlueskyネットワーク
- - syu.is - 独立したPDS環境
- - plc.directory + plc.syu.is - DID解決
-
-
-
-
← ブログに戻る
-
-
- )
-}
-
-export default App
\ No newline at end of file
diff --git a/pds/src/components/AtUriBrowser.jsx b/pds/src/components/AtUriBrowser.jsx
deleted file mode 100644
index 2836050..0000000
--- a/pds/src/components/AtUriBrowser.jsx
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * AT URI Browser Component
- * Copyright (c) 2025 ai.log
- * MIT License
- */
-
-import React, { useState, useEffect } from 'react'
-import { AtUriModal } from './AtUriModal.jsx'
-import { isAtUri } from '../lib/atproto.js'
-
-export function AtUriBrowser({ children }) {
- const [modalUri, setModalUri] = useState(null)
-
- useEffect(() => {
- const handleAtUriClick = (e) => {
- const target = e.target
-
- // Check if clicked element has at-uri data attribute
- if (target.dataset.atUri) {
- e.preventDefault()
- setModalUri(target.dataset.atUri)
- return
- }
-
- // Check if clicked element contains at-uri text
- const text = target.textContent
- if (text && isAtUri(text)) {
- e.preventDefault()
- setModalUri(text)
- return
- }
-
- // Check if parent element has at-uri
- const parent = target.parentElement
- if (parent && parent.dataset.atUri) {
- e.preventDefault()
- setModalUri(parent.dataset.atUri)
- return
- }
- }
-
- document.addEventListener('click', handleAtUriClick)
-
- return () => {
- document.removeEventListener('click', handleAtUriClick)
- }
- }, [])
-
- const handleAtUriClick = (uri) => {
- setModalUri(uri)
- }
-
- const handleCloseModal = () => {
- setModalUri(null)
- }
-
- return (
- <>
- {children}
-
- >
- )
-}
-
-// Utility function to wrap at-uri text with clickable spans
-export const wrapAtUris = (text) => {
- const atUriRegex = /at:\/\/[^\s]+/g
- return text.replace(atUriRegex, (match) => {
- return `${match}`
- })
-}
\ No newline at end of file
diff --git a/pds/src/components/AtUriJson.jsx b/pds/src/components/AtUriJson.jsx
deleted file mode 100644
index 280b829..0000000
--- a/pds/src/components/AtUriJson.jsx
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Based on frontpage/atproto-browser
- * Copyright (c) 2025 The Frontpage Authors
- * MIT License
- */
-
-import React from 'react'
-import { isDid } from '@atproto/did'
-import { parseAtUri, isAtUri } from '../lib/atproto.js'
-
-const JSONString = ({ data, onAtUriClick }) => {
- const handleClick = (uri) => {
- if (onAtUriClick) {
- onAtUriClick(uri)
- }
- }
-
- return (
-
- {isAtUri(data) ? (
- <>
- "
- handleClick(data)}
- style={{
- color: 'blue',
- cursor: 'pointer',
- textDecoration: 'underline'
- }}
- >
- {data}
-
- "
- >
- ) : isDid(data) ? (
- <>
- "
- handleClick(`at://${data}`)}
- style={{
- color: 'blue',
- cursor: 'pointer',
- textDecoration: 'underline'
- }}
- >
- {data}
-
- "
- >
- ) : URL.canParse(data) ? (
- <>
- "
-
- {data}
-
- "
- >
- ) : (
- `"${data}"`
- )}
-
- )
-}
-
-const JSONValue = ({ data, onAtUriClick }) => {
- if (data === null) {
- return null
- }
-
- if (typeof data === 'string') {
- return
- }
-
- if (typeof data === 'number') {
- return {data}
- }
-
- if (typeof data === 'boolean') {
- return {data.toString()}
- }
-
- if (Array.isArray(data)) {
- return (
-
- [
- {data.map((item, index) => (
-
-
- {index < data.length - 1 && ','}
-
- ))}
- ]
-
- )
- }
-
- if (typeof data === 'object') {
- return (
-
- {'{'}
- {Object.entries(data).map(([key, value], index, entries) => (
-
- "{key}":
- {index < entries.length - 1 && ','}
-
- ))}
- {'}'}
-
- )
- }
-
- return {String(data)}
-}
-
-export default function AtUriJson({ data, onAtUriClick }) {
- return (
-
-
-
- )
-}
\ No newline at end of file
diff --git a/pds/src/components/AtUriModal.jsx b/pds/src/components/AtUriModal.jsx
deleted file mode 100644
index 21e12af..0000000
--- a/pds/src/components/AtUriModal.jsx
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * AT URI Modal Component
- * Copyright (c) 2025 ai.log
- * MIT License
- */
-
-import React, { useEffect } from 'react'
-import AtUriViewer from './AtUriViewer.jsx'
-
-export function AtUriModal({ uri, onClose, onAtUriClick }) {
- useEffect(() => {
- const handleEscape = (e) => {
- if (e.key === 'Escape') {
- onClose()
- }
- }
-
- const handleClickOutside = (e) => {
- if (e.target.classList.contains('at-uri-modal-overlay')) {
- onClose()
- }
- }
-
- document.addEventListener('keydown', handleEscape)
- document.addEventListener('click', handleClickOutside)
-
- return () => {
- document.removeEventListener('keydown', handleEscape)
- document.removeEventListener('click', handleClickOutside)
- }
- }, [onClose])
-
- if (!uri) return null
-
- return (
-
- )
-}
\ No newline at end of file
diff --git a/pds/src/components/AtUriViewer.jsx b/pds/src/components/AtUriViewer.jsx
deleted file mode 100644
index d5e8bec..0000000
--- a/pds/src/components/AtUriViewer.jsx
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Based on frontpage/atproto-browser
- * Copyright (c) 2025 The Frontpage Authors
- * MIT License
- */
-
-import React, { useState, useEffect } from 'react'
-import { parseAtUri, getRecord } from '../lib/atproto.js'
-import AtUriJson from './AtUriJson.jsx'
-
-export default function AtUriViewer({ uri, onAtUriClick }) {
- const [record, setRecord] = useState(null)
- const [loading, setLoading] = useState(true)
- const [error, setError] = useState(null)
-
- useEffect(() => {
- const loadRecord = async () => {
- if (!uri) return
-
- setLoading(true)
- setError(null)
-
- try {
- const atUri = parseAtUri(uri)
- if (!atUri) {
- throw new Error('Invalid AT URI')
- }
-
-
- const result = await getRecord(atUri.hostname, atUri.collection, atUri.rkey)
-
-
- if (!result.success) {
- throw new Error(result.error)
- }
-
- setRecord(result.data)
- } catch (err) {
- setError(err.message)
- } finally {
- setLoading(false)
- }
- }
-
- loadRecord()
- }, [uri])
-
- if (loading) {
- return (
-
- )
- }
-
- if (error) {
- return (
-
-
Error: {error}
-
- URI: {uri}
-
-
- デバッグ情報: このAT URIは有効ではないか、レコードが存在しません。
-
-
- )
- }
-
- if (!record) {
- return (
-
- )
- }
-
- const atUri = parseAtUri(uri)
-
- return (
-
-
-
AT URI Record
-
- {uri}
-
-
- DID: {atUri.hostname} | Collection: {atUri.collection} | RKey: {atUri.rkey}
-
-
-
-
-
- )
-}
\ No newline at end of file
diff --git a/pds/src/components/HandleBrowser.jsx b/pds/src/components/HandleBrowser.jsx
deleted file mode 100644
index 8a3ff01..0000000
--- a/pds/src/components/HandleBrowser.jsx
+++ /dev/null
@@ -1,247 +0,0 @@
-import React, { useState } from 'react'
-import { listAllCollections } from '../lib/atproto.js'
-
-const getServiceIcon = (service) => {
- // Known domain mappings
- const domainMap = {
- 'app.bsky': 'bsky.app',
- 'chat.bsky': 'bsky.app',
- 'ai.syui': 'syui.ai',
- 'tools.ozone': 'ozone.tools',
- 'com.atproto': 'atproto.com'
- }
-
- // If in map, use it
- if (domainMap[service]) {
- return `https://www.google.com/s2/favicons?domain=${domainMap[service]}&sz=32`
- }
-
- // Otherwise, try to infer domain from service name
- // Format: prefix.domain → domain.tld (e.g., app.bsky → bsky.app)
- const parts = service.split('.')
- if (parts.length >= 2) {
- // Take last 2 parts and reverse
- const domain = parts.slice(-2).reverse().join('.')
- return `https://www.google.com/s2/favicons?domain=${domain}&sz=32`
- }
-
- // Fallback: use service as-is
- return `https://www.google.com/s2/favicons?domain=${service}&sz=32`
-}
-
-const groupCollectionsByService = (collections) => {
- const services = {}
-
- collections.forEach(col => {
- const parts = col.collection.split('.')
- const service = parts.slice(0, 2).join('.')
-
- if (!services[service]) {
- services[service] = []
- }
- services[service].push(col)
- })
-
- return services
-}
-
-export function HandleBrowser() {
- const [handle, setHandle] = useState('')
- const [loading, setLoading] = useState(false)
- const [error, setError] = useState(null)
- const [collections, setCollections] = useState([])
- const [services, setServices] = useState({})
- const [expandedService, setExpandedService] = useState(null)
- const [expandedCollection, setExpandedCollection] = useState(null)
- const [selectedRecord, setSelectedRecord] = useState(null)
- const [debugInfo, setDebugInfo] = useState(null)
-
- const handleSubmit = async (e) => {
- e.preventDefault()
-
- if (!handle) return
-
- setLoading(true)
- setError(null)
- setCollections([])
- setServices({})
- setExpandedService(null)
- setExpandedCollection(null)
- setSelectedRecord(null)
- setDebugInfo(null)
-
- try {
- const result = await listAllCollections(handle)
-
- const totalRecords = result.collections?.reduce((sum, c) => sum + c.records.length, 0) || 0
-
- setDebugInfo({
- handle,
- success: result.success,
- pdsUrl: result.pdsUrl,
- collectionCount: result.collections?.length || 0,
- totalRecords
- })
-
- if (!result.success) {
- throw new Error(result.error)
- }
-
- if (result.collections.length === 0) {
- setError('No collections found for this handle')
- } else {
- setCollections(result.collections)
- const grouped = groupCollectionsByService(result.collections)
- setServices(grouped)
- }
- } catch (err) {
- setError(`Failed to load: ${err.message}`)
- } finally {
- setLoading(false)
- }
- }
-
- const handleServiceClick = (service) => {
- setExpandedService(service)
- setExpandedCollection(null)
- setSelectedRecord(null)
- }
-
- const handleBackToServices = () => {
- setExpandedService(null)
- setExpandedCollection(null)
- setSelectedRecord(null)
- }
-
- const handleCollectionClick = (collection) => {
- setExpandedCollection(collection)
- setSelectedRecord(null)
- }
-
- const handleBackToCollections = () => {
- setExpandedCollection(null)
- setSelectedRecord(null)
- }
-
- const handleRecordClick = (record) => {
- setSelectedRecord(record)
- }
-
- const handleBackToRecords = () => {
- setSelectedRecord(null)
- }
-
- return (
-
-
-
- {error && (
-
- )}
-
- {debugInfo && (
-
-
Debug Info
-
{JSON.stringify(debugInfo, null, 2)}
-
- )}
-
- {selectedRecord ? (
-
-
-
{selectedRecord.uri.split('/').pop()}
-
-
URI: {selectedRecord.uri}
- {selectedRecord.value.createdAt && (
-
Created: {new Date(selectedRecord.value.createdAt).toLocaleString()}
- )}
-
-
-
{JSON.stringify(selectedRecord.value, null, 2)}
-
-
- ) : expandedCollection ? (
-
-
-
{expandedCollection.collection} ({expandedCollection.records.length})
-
- {expandedCollection.records.map((record) => {
- const rkey = record.uri.split('/').pop()
- return (
- -
-
-
- )
- })}
-
-
- ) : expandedService ? (
-
-
-
{expandedService} ({services[expandedService].length})
-
- {services[expandedService].map((collectionGroup) => (
- -
-
-
- ))}
-
-
- ) : Object.keys(services).length > 0 ? (
-
-
Services ({Object.keys(services).length})
-
- {Object.keys(services).map((service) => {
- const totalRecords = services[service].reduce((sum, col) => sum + col.records.length, 0)
- return (
- -
-
-
- )
- })}
-
-
- ) : null}
-
- )
-}
diff --git a/pds/src/config.js b/pds/src/config.js
deleted file mode 100644
index 244aeb9..0000000
--- a/pds/src/config.js
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * AT Protocol Configuration for syu.is environment
- */
-
-export const AT_PROTOCOL_CONFIG = {
- // Primary PDS environment (syu.is)
- primary: {
- pds: 'https://syu.is',
- plc: 'https://plc.syu.is',
- bsky: 'https://bsky.syu.is',
- web: 'https://web.syu.is'
- },
-
- // Fallback PDS environment (bsky.social)
- fallback: {
- pds: 'https://bsky.social',
- plc: 'https://plc.directory',
- bsky: 'https://public.api.bsky.app',
- web: 'https://bsky.app'
- }
-}
-
-export const getPDSConfig = (pds) => {
- // Map PDS URL to appropriate config
- if (pds.includes('syu.is')) {
- return AT_PROTOCOL_CONFIG.primary
- } else if (pds.includes('bsky.social')) {
- return AT_PROTOCOL_CONFIG.fallback
- }
-
- // Default to primary for unknown PDS
- return AT_PROTOCOL_CONFIG.primary
-}
\ No newline at end of file
diff --git a/pds/src/index.js b/pds/src/index.js
deleted file mode 100644
index 2e82f25..0000000
--- a/pds/src/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Based on frontpage/atproto-browser
- * Copyright (c) 2025 The Frontpage Authors
- * MIT License
- */
-
-export { AtUriBrowser } from './components/AtUriBrowser.jsx'
-export { AtUriModal } from './components/AtUriModal.jsx'
-export { default as AtUriViewer } from './components/AtUriViewer.jsx'
\ No newline at end of file
diff --git a/pds/src/lib/atproto.js b/pds/src/lib/atproto.js
deleted file mode 100644
index 201c4a9..0000000
--- a/pds/src/lib/atproto.js
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
- * Based on frontpage/atproto-browser
- * Copyright (c) 2025 The Frontpage Authors
- * MIT License
- */
-
-import { AtpBaseClient } from '@atproto/api'
-import { AtUri } from '@atproto/syntax'
-import { isDid } from '@atproto/did'
-import { AT_PROTOCOL_CONFIG } from '../config.js'
-
-// Identity resolution cache
-const identityCache = new Map()
-
-// Create AT Protocol client
-export const createAtpClient = (pds) => {
- return new AtpBaseClient({
- service: pds.startsWith('http') ? pds : `https://${pds}`
- })
-}
-
-// Resolve identity (DID/Handle)
-export const resolveIdentity = async (identifier) => {
- if (identityCache.has(identifier)) {
- return identityCache.get(identifier)
- }
-
- try {
- let did = identifier
-
- // If it's a handle, resolve to DID
- if (!isDid(identifier)) {
- // Try syu.is first, then fallback to bsky.social
- let resolved = false
-
- try {
- const client = createAtpClient(AT_PROTOCOL_CONFIG.primary.pds)
- const response = await client.com.atproto.repo.describeRepo({ repo: identifier })
- did = response.data.did
- resolved = true
- } catch (error) {
- }
-
- if (!resolved) {
- try {
- const client = createAtpClient(AT_PROTOCOL_CONFIG.fallback.pds)
- const response = await client.com.atproto.repo.describeRepo({ repo: identifier })
- did = response.data.did
- } catch (error) {
- throw new Error(`Failed to resolve handle: ${identifier}`)
- }
- }
- }
-
- // Get DID document to find PDS
- // Try plc.syu.is first, then fallback to plc.directory
- let didDoc = null
- let plcResponse = null
-
- try {
- plcResponse = await fetch(`${AT_PROTOCOL_CONFIG.primary.plc}/${did}`)
- if (plcResponse.ok) {
- didDoc = await plcResponse.json()
- }
- } catch (error) {
- }
-
- // If plc.syu.is fails, try plc.directory
- if (!didDoc) {
- try {
- plcResponse = await fetch(`${AT_PROTOCOL_CONFIG.fallback.plc}/${did}`)
- if (plcResponse.ok) {
- didDoc = await plcResponse.json()
- }
- } catch (error) {
- }
- }
-
- if (!didDoc) {
- throw new Error(`Failed to resolve DID document from any PLC server`)
- }
-
- // Find PDS service endpoint
- const pdsService = didDoc.service?.find(service =>
- service.type === 'AtprotoPersonalDataServer' ||
- service.id === '#atproto_pds'
- )
-
- if (!pdsService) {
- throw new Error('No PDS service found in DID document')
- }
-
- const result = {
- success: true,
- didDocument: didDoc,
- pdsUrl: pdsService.serviceEndpoint
- }
-
- identityCache.set(identifier, result)
- return result
- } catch (error) {
- const result = {
- success: false,
- error: error.message
- }
- identityCache.set(identifier, result)
- return result
- }
-}
-
-// Get record from AT Protocol
-export const getRecord = async (did, collection, rkey) => {
- try {
- const identityResult = await resolveIdentity(did)
-
- if (!identityResult.success) {
- return { success: false, error: identityResult.error }
- }
-
- const pdsUrl = identityResult.pdsUrl
-
- const client = createAtpClient(pdsUrl)
-
- const response = await client.com.atproto.repo.getRecord({
- repo: did,
- collection,
- rkey
- })
-
- return {
- success: true,
- data: response.data,
- pdsUrl
- }
- } catch (error) {
- return {
- success: false,
- error: error.message
- }
- }
-}
-
-// Parse AT URI
-export const parseAtUri = (uri) => {
- try {
- return new AtUri(uri)
- } catch (error) {
- return null
- }
-}
-
-// Check if string is AT URI
-export const isAtUri = (str) => {
- return str.startsWith('at://') && str.split(' ').length === 1
-}
-
-// List records from AT Protocol
-export const listRecords = async (identifier, collection) => {
- try {
- const identityResult = await resolveIdentity(identifier)
-
- if (!identityResult.success) {
- return { success: false, error: identityResult.error }
- }
-
- const did = identityResult.didDocument.id
- const pdsUrl = identityResult.pdsUrl
-
- const client = createAtpClient(pdsUrl)
-
- const response = await client.com.atproto.repo.listRecords({
- repo: did,
- collection,
- limit: 100
- })
-
- return {
- success: true,
- records: response.data.records || [],
- pdsUrl
- }
- } catch (error) {
- return {
- success: false,
- error: error.message
- }
- }
-}
-
-// List all collections for a user
-export const listAllCollections = async (identifier) => {
- try {
- const identityResult = await resolveIdentity(identifier)
-
- if (!identityResult.success) {
- return { success: false, error: identityResult.error }
- }
-
- const did = identityResult.didDocument.id
- const pdsUrl = identityResult.pdsUrl
-
- const client = createAtpClient(pdsUrl)
-
- // Get collections list from describeRepo
- const repoDesc = await client.com.atproto.repo.describeRepo({
- repo: did
- })
-
- const collections = repoDesc.data.collections || []
-
- if (collections.length === 0) {
- return {
- success: true,
- collections: [],
- pdsUrl
- }
- }
-
- const allRecords = []
-
- for (const collection of collections) {
- try {
- const response = await client.com.atproto.repo.listRecords({
- repo: did,
- collection,
- limit: 100
- })
-
- if (response.data.records && response.data.records.length > 0) {
- allRecords.push({
- collection,
- records: response.data.records
- })
- }
- } catch (err) {
- // Collection doesn't exist or is empty, skip
- }
- }
-
- return {
- success: true,
- collections: allRecords,
- pdsUrl
- }
- } catch (error) {
- return {
- success: false,
- error: error.message
- }
- }
-}
\ No newline at end of file
diff --git a/pds/src/main.jsx b/pds/src/main.jsx
deleted file mode 100644
index 6161d18..0000000
--- a/pds/src/main.jsx
+++ /dev/null
@@ -1,9 +0,0 @@
-import React from 'react'
-import ReactDOM from 'react-dom/client'
-import App from './App.jsx'
-
-ReactDOM.createRoot(document.getElementById('root')).render(
-
-
- ,
-)
\ No newline at end of file
diff --git a/pds/vite.config.js b/pds/vite.config.js
deleted file mode 100644
index e21c23d..0000000
--- a/pds/vite.config.js
+++ /dev/null
@@ -1,10 +0,0 @@
-import { defineConfig } from 'vite'
-import react from '@vitejs/plugin-react'
-
-export default defineConfig({
- plugins: [react()],
- base: '/pds/',
- define: {
- 'process.env.NODE_ENV': JSON.stringify('production')
- }
-})
\ No newline at end of file
diff --git a/lexicons/ai/syui/log/post.json b/public/.well-known/lexicon/ai.syui.log.post.json
similarity index 100%
rename from lexicons/ai/syui/log/post.json
rename to public/.well-known/lexicon/ai.syui.log.post.json
diff --git a/public/client-metadata.json b/public/client-metadata.json
new file mode 100644
index 0000000..701ade0
--- /dev/null
+++ b/public/client-metadata.json
@@ -0,0 +1,15 @@
+{
+ "client_id": "https://syui.ai/client-metadata.json",
+ "client_name": "ailog",
+ "client_uri": "https://syui.ai",
+ "logo_uri": "https://syui.ai/favicon.ico",
+ "tos_uri": "https://syui.ai/tos",
+ "policy_uri": "https://syui.ai/policy",
+ "redirect_uris": ["https://syui.ai/"],
+ "scope": "atproto transition:generic",
+ "grant_types": ["authorization_code", "refresh_token"],
+ "response_types": ["code"],
+ "token_endpoint_auth_method": "none",
+ "application_type": "web",
+ "dpop_bound_access_tokens": true
+}
diff --git a/public/config.json b/public/config.json
new file mode 100644
index 0000000..1c93226
--- /dev/null
+++ b/public/config.json
@@ -0,0 +1,6 @@
+{
+ "title": "ailog",
+ "handle": "syui.ai",
+ "collection": "ai.syui.log.post",
+ "network": "bsky.social"
+}
diff --git a/public/networks.json b/public/networks.json
new file mode 100644
index 0000000..f806e10
--- /dev/null
+++ b/public/networks.json
@@ -0,0 +1,10 @@
+{
+ "bsky.social": {
+ "plc": "https://plc.directory",
+ "bsky": "https://public.api.bsky.app"
+ },
+ "syu.is": {
+ "plc": "https://plc.syu.is",
+ "bsky": "https://bsky.syu.is"
+ }
+}
diff --git a/src/build.rs b/src/build.rs
deleted file mode 100644
index 2ef7a87..0000000
--- a/src/build.rs
+++ /dev/null
@@ -1,211 +0,0 @@
-use anyhow::{Context, Result};
-use pulldown_cmark::{html, Parser};
-use serde::{Deserialize, Serialize};
-use std::fs;
-
-use crate::config::Config;
-
-#[derive(Debug, Deserialize)]
-#[allow(dead_code)]
-struct ListRecordsResponse {
- records: Vec,
- cursor: Option,
-}
-
-#[derive(Debug, Deserialize, Clone)]
-#[allow(dead_code)]
-struct Record {
- uri: String,
- cid: String,
- value: PostRecord,
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone)]
-struct PostRecord {
- title: String,
- content: String,
- #[serde(rename = "createdAt")]
- created_at: String,
-}
-
-pub async fn execute() -> Result<()> {
- let mut config = Config::load()?;
-
- // Refresh session before API calls
- crate::refresh::refresh_session(&mut config).await?;
-
- println!("Building static site from atproto records...");
-
- let pds_url = format!("https://{}", config.pds);
- let client = reqwest::Client::new();
-
- // List records
- let list_url = format!(
- "{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=ai.syui.log.post&limit=100",
- pds_url, config.did
- );
-
- let res: ListRecordsResponse = client
- .get(&list_url)
- .send()
- .await
- .context("Failed to list records")?
- .json()
- .await
- .context("Failed to parse listRecords response")?;
-
- println!("Found {} posts", res.records.len());
-
- // Create output directory
- fs::create_dir_all("./public")?;
- fs::create_dir_all("./public/posts")?;
-
- // Generate index.html
- let mut index_html = String::from(
- r#"
-
-
-
- Blog Posts
-
-
-
-
- Posts
-
-"#,
- );
-
- for record in &res.records {
- let rkey = record.uri.split('/').last().unwrap();
- index_html.push_str(&format!(
- r#" - {}
-"#,
- rkey, record.value.title
- ));
-
- // Generate individual post page
- let parser = Parser::new(&record.value.content);
- let mut html_output = String::new();
- html::push_html(&mut html_output, parser);
-
- let post_html = format!(
- r#"
-
-
-
- {}
-
-
- {}
- {}
- ← Back to list
-
-"#,
- record.value.title, record.value.title, html_output
- );
-
- fs::write(format!("./public/posts/{}.html", rkey), post_html)?;
- println!(" ✓ Generated: posts/{}.html", rkey);
- }
-
- index_html.push_str(
- r#"
-
-"#,
- );
-
- fs::write("./public/index.html", index_html)?;
- println!(" ✓ Generated: index.html");
-
- // Build browser app
- println!("\nBuilding AT Browser...");
- build_browser().await?;
-
- println!("\nDone! Site generated in ./public/");
- println!(" - Blog: ./public/index.html");
- println!(" - PDS Browser: ./public/pds/index.html");
- Ok(())
-}
-
-async fn build_browser() -> Result<()> {
- use std::process::Command;
-
- let browser_dir = "./pds";
-
- // Check if pds directory exists
- if !std::path::Path::new(browser_dir).exists() {
- println!(" ⚠ PDS directory not found, skipping");
- return Ok(());
- }
-
- // Run npm install if node_modules doesn't exist
- if !std::path::Path::new(&format!("{}/node_modules", browser_dir)).exists() {
- println!(" → Running npm install...");
- let status = Command::new("npm")
- .arg("install")
- .current_dir(browser_dir)
- .status()
- .context("Failed to run npm install")?;
-
- if !status.success() {
- anyhow::bail!("npm install failed");
- }
- }
-
- // Run npm run build
- println!(" → Running npm run build...");
- let status = Command::new("npm")
- .arg("run")
- .arg("build")
- .current_dir(browser_dir)
- .status()
- .context("Failed to run npm run build")?;
-
- if !status.success() {
- anyhow::bail!("npm run build failed");
- }
-
- // Copy dist to public/pds
- let dist_dir = format!("{}/dist", browser_dir);
- let target_dir = "./public/pds";
-
- if std::path::Path::new(&dist_dir).exists() {
- fs::create_dir_all(target_dir)?;
- copy_dir_all(&dist_dir, target_dir)?;
- println!(" ✓ PDS browser deployed to ./public/pds/");
- } else {
- println!(" ⚠ dist directory not found");
- }
-
- Ok(())
-}
-
-fn copy_dir_all(src: &str, dst: &str) -> Result<()> {
- use walkdir::WalkDir;
-
- for entry in WalkDir::new(src) {
- let entry = entry?;
- let path = entry.path();
- let relative = path.strip_prefix(src)?;
- let target = std::path::Path::new(dst).join(relative);
-
- if path.is_dir() {
- fs::create_dir_all(&target)?;
- } else {
- if let Some(parent) = target.parent() {
- fs::create_dir_all(parent)?;
- }
- fs::copy(path, &target)?;
- }
- }
-
- Ok(())
-}
diff --git a/src/components/atbrowser.ts b/src/components/atbrowser.ts
new file mode 100644
index 0000000..1723a17
--- /dev/null
+++ b/src/components/atbrowser.ts
@@ -0,0 +1,140 @@
+import { describeRepo, listRecordsRaw, getRecordRaw, fetchLexicon, resolveHandle, getServiceInfo } from '../lib/api.js'
+
+function extractRkey(uri: string): string {
+ const parts = uri.split('/')
+ return parts[parts.length - 1]
+}
+
+function formatDate(dateStr: string): string {
+ const date = new Date(dateStr)
+ return date.toLocaleDateString('ja-JP', {
+ year: 'numeric',
+ month: '2-digit',
+ day: '2-digit',
+ })
+}
+
+function escapeHtml(str: string): string {
+ return str
+ .replace(/&/g, '&')
+ .replace(//g, '>')
+ .replace(/"/g, '"')
+}
+
+async function renderCollections(did: string, handle: string): Promise {
+ const collections = await describeRepo(did)
+
+ if (collections.length === 0) {
+ return 'No collections found
'
+ }
+
+ const items = collections.map(col => {
+ const service = getServiceInfo(col)
+ const favicon = service ? `
` : ''
+ const serviceName = service ? `${service.name}` : ''
+
+ return `
+
+
+ ${favicon}
+ ${col}
+ ${serviceName}
+
+
+ `
+ }).join('')
+
+ return `
+
+ `
+}
+
+async function renderRecordList(did: string, handle: string, collection: string): Promise {
+ const records = await listRecordsRaw(did, collection)
+
+ if (records.length === 0) {
+ return 'No records found
'
+ }
+
+ const items = records.map(rec => {
+ const rkey = extractRkey(rec.uri)
+ const preview = rec.value.title || rec.value.text?.slice(0, 50) || rkey
+ return `
+
+
+ ${rkey}
+ ${preview}
+
+
+ `
+ }).join('')
+
+ return `
+
+
${collection}
+
${records.length} records
+
+
+ `
+}
+
+async function renderRecordDetail(did: string, handle: string, collection: string, rkey: string): Promise {
+ const record = await getRecordRaw(did, collection, rkey)
+
+ if (!record) {
+ return 'Record not found
'
+ }
+
+ const lexicon = await fetchLexicon(collection)
+ const schemaStatus = lexicon ? 'verified' : 'none'
+ const schemaLabel = lexicon ? '✓ Schema' : '○ No schema'
+ const json = JSON.stringify(record, null, 2)
+
+ return `
+
+ `
+}
+
+export async function mountAtBrowser(
+ container: HTMLElement,
+ handle: string,
+ collection: string | null,
+ rkey: string | null
+): Promise {
+ container.innerHTML = 'Loading...
'
+
+ try {
+ const did = handle.startsWith('did:') ? handle : await resolveHandle(handle)
+
+ let content: string
+ let nav = ''
+
+ if (collection && rkey) {
+ nav = `← Back`
+ content = await renderRecordDetail(did, handle, collection, rkey)
+ } else if (collection) {
+ nav = `← Collections`
+ content = await renderRecordList(did, handle, collection)
+ } else {
+ content = await renderCollections(did, handle)
+ }
+
+ container.innerHTML = nav + content
+ } catch (err) {
+ container.innerHTML = `Failed to load: ${err}
`
+ }
+}
diff --git a/src/components/browser.ts b/src/components/browser.ts
new file mode 100644
index 0000000..ac53b3b
--- /dev/null
+++ b/src/components/browser.ts
@@ -0,0 +1,89 @@
+export function renderHeader(currentHandle: string, isLoggedIn: boolean, userHandle?: string): string {
+ const loginBtn = isLoggedIn
+ ? ``
+ : ``
+
+ return `
+
+ `
+}
+
+export interface HeaderCallbacks {
+ onBrowse: (handle: string) => void
+ onLogin: () => void
+ onLogout: () => void
+}
+
+export function mountHeader(
+ container: HTMLElement,
+ currentHandle: string,
+ isLoggedIn: boolean,
+ userHandle: string | undefined,
+ callbacks: HeaderCallbacks
+): void {
+ container.innerHTML = renderHeader(currentHandle, isLoggedIn, userHandle)
+
+ const form = document.getElementById('header-form') as HTMLFormElement
+ const input = document.getElementById('header-input') as HTMLInputElement
+
+ form.addEventListener('submit', (e) => {
+ e.preventDefault()
+ const handle = input.value.trim()
+ if (handle) {
+ callbacks.onBrowse(handle)
+ }
+ })
+
+ if (isLoggedIn) {
+ const userBtn = document.getElementById('user-btn')
+ userBtn?.addEventListener('click', async (e) => {
+ e.preventDefault()
+ e.stopPropagation()
+ if (confirm('Logout?')) {
+ await callbacks.onLogout()
+ }
+ })
+ } else {
+ const loginBtn = document.getElementById('login-btn')
+ loginBtn?.addEventListener('click', (e) => {
+ e.preventDefault()
+ e.stopPropagation()
+ callbacks.onLogin()
+ })
+ }
+}
+
+// Keep old function for compatibility
+export function mountBrowser(
+ container: HTMLElement,
+ currentHandle: string,
+ onSubmit: (handle: string) => void
+): void {
+ mountHeader(container, currentHandle, false, undefined, {
+ onBrowse: onSubmit,
+ onLogin: () => {},
+ onLogout: () => {}
+ })
+}
diff --git a/src/components/postform.ts b/src/components/postform.ts
new file mode 100644
index 0000000..9950040
--- /dev/null
+++ b/src/components/postform.ts
@@ -0,0 +1,74 @@
+import { createPost } from '../lib/auth.js'
+
+export function renderPostForm(collection: string): string {
+ return `
+
+ `
+}
+
+export function mountPostForm(
+ container: HTMLElement,
+ collection: string,
+ onSuccess: () => void
+): void {
+ container.innerHTML = renderPostForm(collection)
+
+ const form = document.getElementById('post-form') as HTMLFormElement
+ const titleInput = document.getElementById('post-title') as HTMLInputElement
+ const bodyInput = document.getElementById('post-body') as HTMLTextAreaElement
+ const submitBtn = document.getElementById('post-submit') as HTMLButtonElement
+ const statusEl = document.getElementById('post-status') as HTMLDivElement
+
+ form.addEventListener('submit', async (e) => {
+ e.preventDefault()
+
+ const title = titleInput.value.trim()
+ const body = bodyInput.value.trim()
+
+ if (!title || !body) return
+
+ submitBtn.disabled = true
+ submitBtn.textContent = 'Posting...'
+ statusEl.innerHTML = ''
+
+ try {
+ const result = await createPost(collection, title, body)
+ if (result) {
+ statusEl.innerHTML = `Posted successfully!`
+ titleInput.value = ''
+ bodyInput.value = ''
+ setTimeout(() => {
+ onSuccess()
+ }, 1000)
+ }
+ } catch (err) {
+ statusEl.innerHTML = `Error: ${err}`
+ } finally {
+ submitBtn.disabled = false
+ submitBtn.textContent = 'Post'
+ }
+ })
+}
diff --git a/src/components/posts.ts b/src/components/posts.ts
new file mode 100644
index 0000000..817d146
--- /dev/null
+++ b/src/components/posts.ts
@@ -0,0 +1,54 @@
+import type { BlogPost } from '../types.js'
+
+function formatDate(dateStr: string): string {
+ const date = new Date(dateStr)
+ return date.toLocaleDateString('ja-JP', {
+ year: 'numeric',
+ month: '2-digit',
+ day: '2-digit',
+ })
+}
+
+function escapeHtml(str: string): string {
+ return str
+ .replace(/&/g, '&')
+ .replace(//g, '>')
+ .replace(/"/g, '"')
+}
+
+export function mountPostList(container: HTMLElement, posts: BlogPost[]): void {
+ if (posts.length === 0) {
+ container.innerHTML = 'No posts yet
'
+ return
+ }
+
+ const html = posts.map(post => {
+ const rkey = post.uri.split('/').pop()
+ return `
+
+
+ ${escapeHtml(post.title)}
+ ${formatDate(post.createdAt)}
+
+
+ `
+ }).join('')
+
+ container.innerHTML = ``
+}
+
+export function mountPostDetail(container: HTMLElement, post: BlogPost, handle: string): void {
+ container.innerHTML = `
+
+
+ ${escapeHtml(post.content)}
+
+
+ `
+}
diff --git a/src/components/profile.ts b/src/components/profile.ts
new file mode 100644
index 0000000..6ba2aad
--- /dev/null
+++ b/src/components/profile.ts
@@ -0,0 +1,18 @@
+import type { Profile } from '../types.js'
+
+export function renderProfile(profile: Profile): string {
+ return `
+
+ ${profile.avatar ? `

` : ''}
+
+
${profile.displayName || profile.handle}
+
@${profile.handle}
+ ${profile.description ? `
${profile.description}
` : ''}
+
+
+ `
+}
+
+export function mountProfile(container: HTMLElement, profile: Profile): void {
+ container.innerHTML = renderProfile(profile)
+}
diff --git a/src/config.rs b/src/config.rs
deleted file mode 100644
index 41c8d7a..0000000
--- a/src/config.rs
+++ /dev/null
@@ -1,71 +0,0 @@
-use anyhow::{Context, Result};
-use serde::{Deserialize, Serialize};
-use std::collections::HashMap;
-use std::path::PathBuf;
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct Config {
- pub pds: String,
- pub handle: String,
- pub did: String,
- pub access_jwt: String,
- pub refresh_jwt: String,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct RecordMapping {
- pub rkey: String,
- pub uri: String,
- pub cid: String,
-}
-
-pub type Mapping = HashMap;
-
-impl Config {
- pub fn config_path() -> Result {
- let home = dirs::home_dir().context("Failed to get home directory")?;
- let config_dir = home.join(".config/syui/ai/log");
- std::fs::create_dir_all(&config_dir)?;
- Ok(config_dir.join("config.json"))
- }
-
- pub fn mapping_path() -> Result {
- let home = dirs::home_dir().context("Failed to get home directory")?;
- let config_dir = home.join(".config/syui/ai/log");
- std::fs::create_dir_all(&config_dir)?;
- Ok(config_dir.join("mapping.json"))
- }
-
- pub fn load() -> Result {
- let path = Self::config_path()?;
- let content = std::fs::read_to_string(&path)
- .context("Failed to read config file. Please run 'ailog login' first.")?;
- let config: Config = serde_json::from_str(&content)?;
- Ok(config)
- }
-
- pub fn save(&self) -> Result<()> {
- let path = Self::config_path()?;
- let content = serde_json::to_string_pretty(self)?;
- std::fs::write(&path, content)?;
- println!("Config saved to: {}", path.display());
- Ok(())
- }
-
- pub fn load_mapping() -> Result {
- let path = Self::mapping_path()?;
- if !path.exists() {
- return Ok(HashMap::new());
- }
- let content = std::fs::read_to_string(&path)?;
- let mapping: Mapping = serde_json::from_str(&content)?;
- Ok(mapping)
- }
-
- pub fn save_mapping(mapping: &Mapping) -> Result<()> {
- let path = Self::mapping_path()?;
- let content = serde_json::to_string_pretty(mapping)?;
- std::fs::write(&path, content)?;
- Ok(())
- }
-}
diff --git a/src/delete.rs b/src/delete.rs
deleted file mode 100644
index e66eac6..0000000
--- a/src/delete.rs
+++ /dev/null
@@ -1,89 +0,0 @@
-use anyhow::{Context, Result};
-use serde::{Deserialize, Serialize};
-
-use crate::config::Config;
-
-#[derive(Debug, Serialize)]
-struct DeleteRecordRequest {
- repo: String,
- collection: String,
- rkey: String,
-}
-
-#[derive(Debug, Deserialize)]
-#[allow(dead_code)]
-struct ListRecordsResponse {
- records: Vec,
- cursor: Option,
-}
-
-#[derive(Debug, Deserialize)]
-#[allow(dead_code)]
-struct Record {
- uri: String,
-}
-
-pub async fn execute() -> Result<()> {
- let mut config = Config::load()?;
-
- // Refresh session before API calls
- crate::refresh::refresh_session(&mut config).await?;
-
- let mut mapping = Config::load_mapping()?;
- println!("Deleting all records from ai.syui.log.post...");
-
- let pds_url = format!("https://{}", config.pds);
- let client = reqwest::Client::new();
-
- // List all records
- let list_url = format!(
- "{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=ai.syui.log.post&limit=100",
- pds_url, config.did
- );
-
- let res: ListRecordsResponse = client
- .get(&list_url)
- .send()
- .await
- .context("Failed to list records")?
- .json()
- .await
- .context("Failed to parse listRecords response")?;
-
- if res.records.is_empty() {
- println!("No records to delete.");
- return Ok(());
- }
-
- println!("Found {} records to delete", res.records.len());
-
- // Delete each record
- for record in &res.records {
- let rkey = record.uri.split('/').last().unwrap();
-
- let delete_req = DeleteRecordRequest {
- repo: config.did.clone(),
- collection: "ai.syui.log.post".to_string(),
- rkey: rkey.to_string(),
- };
-
- let delete_url = format!("{}/xrpc/com.atproto.repo.deleteRecord", pds_url);
- client
- .post(&delete_url)
- .header("Authorization", format!("Bearer {}", config.access_jwt))
- .json(&delete_req)
- .send()
- .await
- .context("Failed to delete record")?;
-
- println!(" ✓ Deleted: {}", rkey);
- }
-
- // Clear mapping (all records deleted)
- mapping.clear();
- Config::save_mapping(&mapping)?;
- println!("Mapping cleared.");
-
- println!("Done! All records deleted.");
- Ok(())
-}
diff --git a/src/lib/api.ts b/src/lib/api.ts
new file mode 100644
index 0000000..c7bda89
--- /dev/null
+++ b/src/lib/api.ts
@@ -0,0 +1,217 @@
+import { AtpAgent } from '@atproto/api'
+import type { Profile, BlogPost, NetworkConfig } from '../types.js'
+
+const agents: Map = new Map()
+
+let networkConfig: NetworkConfig | null = null
+
+export function setNetworkConfig(config: NetworkConfig): void {
+ networkConfig = config
+}
+
+function getPlc(): string {
+ return networkConfig?.plc || 'https://plc.directory'
+}
+
+function getBsky(): string {
+ return networkConfig?.bsky || 'https://public.api.bsky.app'
+}
+
+export function getAgent(service: string): AtpAgent {
+ if (!agents.has(service)) {
+ agents.set(service, new AtpAgent({ service }))
+ }
+ return agents.get(service)!
+}
+
+export async function resolvePds(did: string): Promise {
+ const res = await fetch(`${getPlc()}/${did}`)
+ const doc = await res.json()
+ const service = doc.service?.find((s: any) => s.type === 'AtprotoPersonalDataServer')
+ return service?.serviceEndpoint || getBsky()
+}
+
+export async function resolveHandle(handle: string): Promise {
+ const agent = getAgent(getBsky())
+ const res = await agent.resolveHandle({ handle })
+ return res.data.did
+}
+
+export async function getProfile(actor: string): Promise {
+ const agent = getAgent(getBsky())
+ const res = await agent.getProfile({ actor })
+ return {
+ did: res.data.did,
+ handle: res.data.handle,
+ displayName: res.data.displayName,
+ description: res.data.description,
+ avatar: res.data.avatar,
+ banner: res.data.banner,
+ }
+}
+
+export async function listRecords(
+ did: string,
+ collection: string,
+ limit = 50
+): Promise {
+ const pds = await resolvePds(did)
+ const agent = getAgent(pds)
+ const res = await agent.com.atproto.repo.listRecords({
+ repo: did,
+ collection,
+ limit,
+ })
+
+ return res.data.records.map((record: any) => ({
+ uri: record.uri,
+ cid: record.cid,
+ title: record.value.title || '',
+ content: record.value.content || '',
+ createdAt: record.value.createdAt || '',
+ }))
+}
+
+export async function getRecord(
+ did: string,
+ collection: string,
+ rkey: string
+): Promise {
+ const pds = await resolvePds(did)
+ const agent = getAgent(pds)
+ try {
+ const res = await agent.com.atproto.repo.getRecord({
+ repo: did,
+ collection,
+ rkey,
+ })
+ return {
+ uri: res.data.uri,
+ cid: res.data.cid || '',
+ title: (res.data.value as any).title || '',
+ content: (res.data.value as any).content || '',
+ createdAt: (res.data.value as any).createdAt || '',
+ }
+ } catch {
+ return null
+ }
+}
+
+export async function describeRepo(did: string): Promise {
+ const pds = await resolvePds(did)
+ const agent = getAgent(pds)
+ const res = await agent.com.atproto.repo.describeRepo({ repo: did })
+ return res.data.collections || []
+}
+
+export async function listRecordsRaw(
+ did: string,
+ collection: string,
+ limit = 100
+): Promise {
+ const pds = await resolvePds(did)
+ const agent = getAgent(pds)
+ const res = await agent.com.atproto.repo.listRecords({
+ repo: did,
+ collection,
+ limit,
+ })
+ return res.data.records
+}
+
+export async function getRecordRaw(
+ did: string,
+ collection: string,
+ rkey: string
+): Promise {
+ const pds = await resolvePds(did)
+ const agent = getAgent(pds)
+ try {
+ const res = await agent.com.atproto.repo.getRecord({
+ repo: did,
+ collection,
+ rkey,
+ })
+ return res.data
+ } catch {
+ return null
+ }
+}
+
+// Known lexicon prefixes that have schemas
+const KNOWN_LEXICON_PREFIXES = [
+ 'app.bsky.',
+ 'chat.bsky.',
+ 'com.atproto.',
+ 'sh.tangled.',
+ 'pub.leaflet.',
+ 'blue.linkat.',
+ 'fyi.unravel.frontpage.',
+ 'com.whtwnd.',
+ 'com.shinolabs.pinksea.',
+]
+
+export function hasKnownSchema(nsid: string): boolean {
+ return KNOWN_LEXICON_PREFIXES.some(prefix => nsid.startsWith(prefix))
+}
+
+export async function fetchLexicon(nsid: string): Promise {
+ // Check if it's a known lexicon first
+ if (hasKnownSchema(nsid)) {
+ return { id: nsid, known: true }
+ }
+
+ // Extract authority from NSID (e.g., "ai.syui.log.post" -> "syui.ai")
+ const parts = nsid.split('.')
+ if (parts.length < 3) return null
+
+ const authority = parts.slice(0, 2).reverse().join('.')
+ const url = `https://${authority}/.well-known/lexicon/${nsid}.json`
+
+ try {
+ const res = await fetch(url)
+ if (!res.ok) return null
+ return await res.json()
+ } catch {
+ return null
+ }
+}
+
+// Known service mappings for collections
+const SERVICE_MAP: Record = {
+ 'app.bsky': { name: 'Bluesky', domain: 'bsky.app', icon: 'https://bsky.app/static/favicon-32x32.png' },
+ 'ai.syui': { name: 'syui.ai', domain: 'syui.ai' },
+ 'com.whtwnd': { name: 'WhiteWind', domain: 'whtwnd.com' },
+ 'fyi.unravel.frontpage': { name: 'Frontpage', domain: 'frontpage.fyi' },
+ 'com.shinolabs.pinksea': { name: 'PinkSea', domain: 'pinksea.art' },
+ 'blue.linkat': { name: 'Linkat', domain: 'linkat.blue' },
+ 'sh.tangled': { name: 'Tangled', domain: 'tangled.sh' },
+ 'pub.leaflet': { name: 'Leaflet', domain: 'leaflet.pub' },
+ 'chat.bsky': { name: 'Bluesky Chat', domain: 'bsky.app' },
+}
+
+export function getServiceInfo(collection: string): { name: string; domain: string; favicon: string } | null {
+ // Try to find matching service prefix
+ for (const [prefix, info] of Object.entries(SERVICE_MAP)) {
+ if (collection.startsWith(prefix)) {
+ return {
+ name: info.name,
+ domain: info.domain,
+ favicon: info.icon || `https://www.google.com/s2/favicons?domain=${info.domain}&sz=32`
+ }
+ }
+ }
+
+ // Fallback: extract domain from first 2 parts of NSID
+ const parts = collection.split('.')
+ if (parts.length >= 2) {
+ const domain = parts.slice(0, 2).reverse().join('.')
+ return {
+ name: domain,
+ domain: domain,
+ favicon: `https://www.google.com/s2/favicons?domain=${domain}&sz=32`
+ }
+ }
+
+ return null
+}
diff --git a/src/lib/auth.ts b/src/lib/auth.ts
new file mode 100644
index 0000000..7c4342f
--- /dev/null
+++ b/src/lib/auth.ts
@@ -0,0 +1,147 @@
+import { BrowserOAuthClient } from '@atproto/oauth-client-browser'
+import { Agent } from '@atproto/api'
+import type { NetworkConfig } from '../types.js'
+
+let oauthClient: BrowserOAuthClient | null = null
+let agent: Agent | null = null
+let currentNetworkConfig: NetworkConfig | null = null
+
+export interface AuthSession {
+ did: string
+ handle: string
+ agent: Agent
+}
+
+export function setAuthNetworkConfig(config: NetworkConfig): void {
+ currentNetworkConfig = config
+ // Reset client when network changes
+ oauthClient = null
+}
+
+export async function initOAuthClient(): Promise {
+ if (oauthClient) return oauthClient
+
+ const handleResolver = currentNetworkConfig?.bsky || 'https://bsky.social'
+ const plcDirectoryUrl = currentNetworkConfig?.plc || 'https://plc.directory'
+
+ oauthClient = await BrowserOAuthClient.load({
+ clientId: getClientId(),
+ handleResolver,
+ plcDirectoryUrl,
+ })
+
+ return oauthClient
+}
+
+function getClientId(): string {
+ const host = window.location.host
+ // For localhost development
+ if (host.includes('localhost') || host.includes('127.0.0.1')) {
+ // client_id must start with http://localhost, redirect_uri must use 127.0.0.1
+ const port = window.location.port || '3000'
+ const redirectUri = `http://127.0.0.1:${port}/`
+ return `http://localhost?redirect_uri=${encodeURIComponent(redirectUri)}&scope=${encodeURIComponent('atproto transition:generic')}`
+ }
+ // For production, use the client-metadata.json
+ return `${window.location.origin}/client-metadata.json`
+}
+
+export async function login(handle: string): Promise {
+ const client = await initOAuthClient()
+ await client.signIn(handle, {
+ scope: 'atproto transition:generic',
+ })
+}
+
+export async function handleOAuthCallback(): Promise {
+ const params = new URLSearchParams(window.location.search)
+ if (!params.has('code') && !params.has('state')) {
+ return null
+ }
+
+ try {
+ const client = await initOAuthClient()
+ const result = await client.callback(params)
+
+ agent = new Agent(result.session)
+
+ // Get profile to get handle
+ const profile = await agent.getProfile({ actor: result.session.did })
+
+ // Clear URL params
+ window.history.replaceState({}, '', window.location.pathname)
+
+ return {
+ did: result.session.did,
+ handle: profile.data.handle,
+ agent,
+ }
+ } catch (err) {
+ console.error('OAuth callback error:', err)
+ return null
+ }
+}
+
+export async function restoreSession(): Promise {
+ try {
+ const client = await initOAuthClient()
+ const result = await client.init()
+
+ if (result?.session) {
+ agent = new Agent(result.session)
+ const profile = await agent.getProfile({ actor: result.session.did })
+
+ return {
+ did: result.session.did,
+ handle: profile.data.handle,
+ agent,
+ }
+ }
+ } catch (err) {
+ console.error('Session restore error:', err)
+ }
+ return null
+}
+
+export async function logout(): Promise {
+ // Clear all storage
+ sessionStorage.clear()
+ localStorage.clear()
+
+ // Clear IndexedDB (used by OAuth client)
+ const databases = await indexedDB.databases()
+ for (const db of databases) {
+ if (db.name) {
+ indexedDB.deleteDatabase(db.name)
+ }
+ }
+
+ agent = null
+ oauthClient = null
+}
+
+export function getAgent(): Agent | null {
+ return agent
+}
+
+export async function createPost(collection: string, title: string, content: string): Promise<{ uri: string; cid: string } | null> {
+ if (!agent) return null
+
+ try {
+ const result = await agent.com.atproto.repo.createRecord({
+ repo: agent.assertDid,
+ collection,
+ record: {
+ $type: collection,
+ title,
+ content,
+ createdAt: new Date().toISOString(),
+ },
+ })
+
+ return { uri: result.data.uri, cid: result.data.cid }
+ } catch (err) {
+ console.error('Create post error:', err)
+ throw err
+ }
+}
diff --git a/src/login.rs b/src/login.rs
deleted file mode 100644
index 4d50b08..0000000
--- a/src/login.rs
+++ /dev/null
@@ -1,83 +0,0 @@
-use anyhow::{Context, Result};
-use serde::{Deserialize, Serialize};
-
-use crate::config::Config;
-
-#[derive(Debug, Serialize)]
-struct CreateSessionRequest {
- identifier: String,
- password: String,
-}
-
-#[derive(Debug, Deserialize)]
-#[allow(dead_code)]
-struct CreateSessionResponse {
- #[serde(rename = "accessJwt")]
- access_jwt: String,
- #[serde(rename = "refreshJwt")]
- refresh_jwt: String,
- handle: String,
- did: String,
-}
-
-#[derive(Debug, Deserialize)]
-#[allow(dead_code)]
-struct DescribeRepoResponse {
- handle: String,
- did: String,
-}
-
-pub async fn execute(handle: &str, password: &str, pds: &str) -> Result<()> {
- println!("Logging in as {} to {}...", handle, pds);
-
- // Resolve handle to DID
- let pds_url = format!("https://{}", pds);
- let describe_url = format!(
- "{}/xrpc/com.atproto.repo.describeRepo?repo={}",
- pds_url, handle
- );
-
- let client = reqwest::Client::new();
- let describe_res: DescribeRepoResponse = client
- .get(&describe_url)
- .send()
- .await
- .context("Failed to resolve handle")?
- .json()
- .await
- .context("Failed to parse describeRepo response")?;
-
- println!("Resolved handle to DID: {}", describe_res.did);
-
- // Create session
- let session_url = format!("{}/xrpc/com.atproto.server.createSession", pds_url);
- let session_req = CreateSessionRequest {
- identifier: handle.to_string(),
- password: password.to_string(),
- };
-
- let session_res: CreateSessionResponse = client
- .post(&session_url)
- .json(&session_req)
- .send()
- .await
- .context("Failed to create session")?
- .json()
- .await
- .context("Failed to parse createSession response")?;
-
- println!("Successfully authenticated!");
-
- // Save config
- let config = Config {
- pds: pds.to_string(),
- handle: handle.to_string(),
- did: session_res.did,
- access_jwt: session_res.access_jwt,
- refresh_jwt: session_res.refresh_jwt,
- };
-
- config.save()?;
-
- Ok(())
-}
diff --git a/src/main.rs b/src/main.rs
deleted file mode 100644
index 6bcafbb..0000000
--- a/src/main.rs
+++ /dev/null
@@ -1,75 +0,0 @@
-use anyhow::Result;
-use clap::{Parser, Subcommand};
-
-mod config;
-mod login;
-mod post;
-mod build;
-mod delete;
-mod refresh;
-mod serve;
-
-#[derive(Parser)]
-#[command(name = "ailog")]
-#[command(about = "A simple static blog generator with atproto integration")]
-struct Cli {
- #[command(subcommand)]
- command: Commands,
-}
-
-#[derive(Subcommand)]
-enum Commands {
- /// Login to atproto PDS
- #[command(alias = "l")]
- Login {
- /// Handle (e.g., ai.syui.ai)
- handle: String,
- /// Password
- #[arg(short, long)]
- password: String,
- /// PDS server (e.g., syu.is, bsky.social)
- #[arg(short = 's', long, default_value = "syu.is")]
- pds: String,
- },
- /// Post markdown files to atproto
- #[command(alias = "p")]
- Post,
- /// Build static site from atproto records
- #[command(alias = "b")]
- Build,
- /// Delete all records from atproto
- #[command(alias = "d")]
- Delete,
- /// Start local preview server
- #[command(alias = "s")]
- Serve {
- /// Port number
- #[arg(short, long, default_value = "3000")]
- port: u16,
- },
-}
-
-#[tokio::main]
-async fn main() -> Result<()> {
- let cli = Cli::parse();
-
- match cli.command {
- Commands::Login { handle, password, pds } => {
- login::execute(&handle, &password, &pds).await?;
- }
- Commands::Post => {
- post::execute().await?;
- }
- Commands::Build => {
- build::execute().await?;
- }
- Commands::Delete => {
- delete::execute().await?;
- }
- Commands::Serve { port } => {
- serve::execute(port).await?;
- }
- }
-
- Ok(())
-}
diff --git a/src/main.ts b/src/main.ts
new file mode 100644
index 0000000..8881432
--- /dev/null
+++ b/src/main.ts
@@ -0,0 +1,158 @@
+import { getProfile, listRecords, getRecord, setNetworkConfig } from './lib/api.js'
+import { login, logout, restoreSession, handleOAuthCallback, setAuthNetworkConfig, type AuthSession } from './lib/auth.js'
+import { mountProfile } from './components/profile.js'
+import { mountPostList, mountPostDetail } from './components/posts.js'
+import { mountHeader } from './components/browser.js'
+import { mountAtBrowser } from './components/atbrowser.js'
+import { mountPostForm } from './components/postform.js'
+import type { AppConfig, Networks } from './types.js'
+
+let authSession: AuthSession | null = null
+
+async function loadConfig(): Promise {
+ const res = await fetch('/config.json')
+ return res.json()
+}
+
+async function loadNetworks(): Promise {
+ const res = await fetch('/networks.json')
+ return res.json()
+}
+
+function renderFooter(handle: string): string {
+ const parts = handle.split('.')
+ const username = parts[0] || handle
+ return `
+
+ `
+}
+
+function renderTabs(handle: string, mode: string | null, isLoggedIn: boolean): string {
+ const blogActive = !mode || mode === 'blog' ? 'active' : ''
+ const browserActive = mode === 'browser' ? 'active' : ''
+ const postActive = mode === 'post' ? 'active' : ''
+
+ let tabs = `
+ Blog
+ Browser
+ `
+
+ if (isLoggedIn) {
+ tabs += `Post`
+ }
+
+ return `${tabs}
`
+}
+
+async function init(): Promise {
+ const [config, networks] = await Promise.all([loadConfig(), loadNetworks()])
+
+ // Set page title
+ document.title = config.title || 'ailog'
+
+ // Set network config
+ const networkConfig = networks[config.network]
+ if (networkConfig) {
+ setNetworkConfig(networkConfig)
+ setAuthNetworkConfig(networkConfig)
+ }
+
+ // Handle OAuth callback
+ const callbackSession = await handleOAuthCallback()
+ if (callbackSession) {
+ authSession = callbackSession
+ } else {
+ // Try to restore existing session
+ authSession = await restoreSession()
+ }
+
+ const params = new URLSearchParams(window.location.search)
+ const mode = params.get('mode')
+ const rkey = params.get('rkey')
+ const collection = params.get('collection')
+ const handle = params.get('handle') || config.handle
+
+ const profileEl = document.getElementById('profile')
+ const contentEl = document.getElementById('content')
+ const headerEl = document.getElementById('header')
+ const footerEl = document.getElementById('footer')
+
+ if (!profileEl || !contentEl || !headerEl) return
+
+ // Footer
+ if (footerEl) {
+ footerEl.innerHTML = renderFooter(config.handle)
+ }
+
+ const isLoggedIn = !!authSession
+
+ // Header with login
+ mountHeader(headerEl, handle, isLoggedIn, authSession?.handle, {
+ onBrowse: (newHandle) => {
+ const currentMode = params.get('mode')
+ if (currentMode === 'browser') {
+ window.location.href = `?mode=browser&handle=${newHandle}`
+ } else {
+ window.location.href = `?handle=${newHandle}`
+ }
+ },
+ onLogin: async () => {
+ const inputHandle = (document.getElementById('header-input') as HTMLInputElement)?.value || handle
+ try {
+ await login(inputHandle)
+ } catch (err) {
+ console.error('Login error:', err)
+ alert('Login failed: ' + err)
+ }
+ },
+ onLogout: async () => {
+ await logout()
+ window.location.reload()
+ }
+ })
+
+ // Post mode (requires login)
+ if (mode === 'post' && isLoggedIn) {
+ profileEl.innerHTML = renderTabs(handle, mode, isLoggedIn)
+ mountPostForm(contentEl, config.collection, () => {
+ window.location.href = `?handle=${handle}`
+ })
+ return
+ }
+
+ // AT Browser mode
+ if (mode === 'browser') {
+ profileEl.innerHTML = renderTabs(handle, mode, isLoggedIn)
+ await mountAtBrowser(contentEl, handle, collection, rkey)
+ return
+ }
+
+ // Blog mode (default)
+ try {
+ const profile = await getProfile(handle)
+
+ profileEl.innerHTML = renderTabs(handle, mode, isLoggedIn)
+ const profileContentEl = document.createElement('div')
+ profileEl.appendChild(profileContentEl)
+ mountProfile(profileContentEl, profile)
+
+ if (rkey) {
+ const post = await getRecord(profile.did, config.collection, rkey)
+ if (post) {
+ mountPostDetail(contentEl, post, handle)
+ } else {
+ contentEl.innerHTML = 'Post not found
'
+ }
+ } else {
+ const posts = await listRecords(profile.did, config.collection)
+ mountPostList(contentEl, posts)
+ }
+ } catch (err) {
+ console.error(err)
+ contentEl.innerHTML = `Failed to load: ${err}
`
+ }
+}
+
+init()
diff --git a/src/post.rs b/src/post.rs
deleted file mode 100644
index 6ceb97e..0000000
--- a/src/post.rs
+++ /dev/null
@@ -1,172 +0,0 @@
-use anyhow::{Context, Result};
-use serde::{Deserialize, Serialize};
-use walkdir::WalkDir;
-
-use crate::config::{Config, RecordMapping};
-
-#[derive(Debug, Serialize)]
-struct PutRecordRequest {
- repo: String,
- collection: String,
- #[serde(skip_serializing_if = "Option::is_none")]
- rkey: Option,
- record: PostRecord,
-}
-
-#[derive(Debug, Serialize, Clone)]
-struct PostRecord {
- #[serde(rename = "$type")]
- schema_type: String,
- title: String,
- content: String,
- #[serde(rename = "createdAt")]
- created_at: String,
-}
-
-#[derive(Debug, Deserialize)]
-#[allow(dead_code)]
-struct PutRecordResponse {
- uri: String,
- cid: String,
- #[serde(default)]
- commit: Option,
- #[serde(rename = "validationStatus", default)]
- validation_status: Option,
-}
-
-pub async fn execute() -> Result<()> {
- let mut config = Config::load()?;
-
- // Refresh session before API calls
- crate::refresh::refresh_session(&mut config).await?;
-
- let mut mapping = Config::load_mapping()?;
- println!("Posting markdown files from ./content/post/...");
-
- let pds_url = format!("https://{}", config.pds);
- let client = reqwest::Client::new();
-
- // Walk through ./content/post/
- for entry in WalkDir::new("./content/post")
- .into_iter()
- .filter_map(|e| e.ok())
- .filter(|e| e.path().extension().and_then(|s| s.to_str()) == Some("md"))
- {
- let path = entry.path();
- let filename = path
- .file_name()
- .and_then(|s| s.to_str())
- .context("Invalid filename")?
- .to_string();
-
- println!("Processing: {}", filename);
-
- let content = std::fs::read_to_string(path)?;
-
- // Use filename as title (simplified)
- let title = path
- .file_stem()
- .and_then(|s| s.to_str())
- .unwrap_or("Untitled");
-
- // Check if this file already has a mapping
- let existing_rkey = mapping.get(&filename).map(|m| m.rkey.clone());
-
- // Create record
- let record = PostRecord {
- schema_type: "ai.syui.log.post".to_string(),
- title: title.to_string(),
- content,
- created_at: chrono::Utc::now().to_rfc3339(),
- };
-
- let res: PutRecordResponse = if let Some(rkey) = existing_rkey.clone() {
- // Update existing record with putRecord
- let put_req = PutRecordRequest {
- repo: config.did.clone(),
- collection: "ai.syui.log.post".to_string(),
- rkey: Some(rkey),
- record: record.clone(),
- };
-
- let put_url = format!("{}/xrpc/com.atproto.repo.putRecord", pds_url);
- let response = client
- .post(&put_url)
- .header("Authorization", format!("Bearer {}", config.access_jwt))
- .json(&put_req)
- .send()
- .await
- .context("Failed to put record")?;
-
- let status = response.status();
- let body_text = response.text().await?;
-
- if !status.is_success() {
- eprintln!("Error response ({}): {}", status, body_text);
- anyhow::bail!("API returned error: {}", body_text);
- }
-
- serde_json::from_str(&body_text)
- .context(format!("Failed to parse putRecord response. Body: {}", body_text))?
- } else {
- // Create new record with createRecord (auto-generates TID)
- #[derive(Serialize)]
- struct CreateRecordRequest {
- repo: String,
- collection: String,
- record: PostRecord,
- }
-
- let create_req = CreateRecordRequest {
- repo: config.did.clone(),
- collection: "ai.syui.log.post".to_string(),
- record,
- };
-
- let create_url = format!("{}/xrpc/com.atproto.repo.createRecord", pds_url);
- let response = client
- .post(&create_url)
- .header("Authorization", format!("Bearer {}", config.access_jwt))
- .json(&create_req)
- .send()
- .await
- .context("Failed to create record")?;
-
- let status = response.status();
- let body_text = response.text().await?;
-
- if !status.is_success() {
- eprintln!("Error response ({}): {}", status, body_text);
- anyhow::bail!("API returned error: {}", body_text);
- }
-
- serde_json::from_str(&body_text)
- .context(format!("Failed to parse createRecord response. Body: {}", body_text))?
- };
-
- // Extract rkey from URI
- let rkey = res.uri.split('/').last().unwrap().to_string();
-
- // Update mapping
- mapping.insert(
- filename.clone(),
- RecordMapping {
- rkey: rkey.clone(),
- uri: res.uri.clone(),
- cid: res.cid.clone(),
- },
- );
-
- if existing_rkey.is_some() {
- println!(" ✓ Updated: {} ({})", title, rkey);
- } else {
- println!(" ✓ Created: {} ({})", title, rkey);
- }
- }
-
- // Save mapping
- Config::save_mapping(&mapping)?;
- println!("Mapping saved to: {}", Config::mapping_path()?.display());
- println!("Done!");
- Ok(())
-}
diff --git a/src/refresh.rs b/src/refresh.rs
deleted file mode 100644
index 878854b..0000000
--- a/src/refresh.rs
+++ /dev/null
@@ -1,50 +0,0 @@
-use anyhow::{Context, Result};
-use serde::Deserialize;
-
-use crate::config::Config;
-
-#[derive(Debug, Deserialize)]
-#[allow(dead_code)]
-struct RefreshSessionResponse {
- #[serde(rename = "accessJwt")]
- access_jwt: String,
- #[serde(rename = "refreshJwt")]
- refresh_jwt: String,
- handle: String,
- did: String,
-}
-
-pub async fn refresh_session(config: &mut Config) -> Result<()> {
- let pds_url = format!("https://{}", config.pds);
- let refresh_url = format!("{}/xrpc/com.atproto.server.refreshSession", pds_url);
-
- let client = reqwest::Client::new();
- let response = client
- .post(&refresh_url)
- .header("Authorization", format!("Bearer {}", config.refresh_jwt))
- .send()
- .await
- .context("Failed to refresh session")?;
-
- let status = response.status();
- let body_text = response.text().await?;
-
- if !status.is_success() {
- eprintln!("Refresh session failed ({}): {}", status, body_text);
- anyhow::bail!("Failed to refresh session. Please run 'ailog login' again.");
- }
-
- let res: RefreshSessionResponse = serde_json::from_str(&body_text)
- .context(format!("Failed to parse refreshSession response. Body: {}", body_text))?;
-
- // Update config with new tokens
- config.access_jwt = res.access_jwt;
- config.refresh_jwt = res.refresh_jwt;
-
- // Save updated config (silent)
- let path = Config::config_path()?;
- let content = serde_json::to_string_pretty(config)?;
- std::fs::write(&path, content)?;
-
- Ok(())
-}
diff --git a/src/serve.rs b/src/serve.rs
deleted file mode 100644
index 0104094..0000000
--- a/src/serve.rs
+++ /dev/null
@@ -1,29 +0,0 @@
-use anyhow::Result;
-use axum::Router;
-use std::net::SocketAddr;
-use tower_http::services::ServeDir;
-
-pub async fn execute(port: u16) -> Result<()> {
- let public_dir = "./public";
-
- // Check if public directory exists
- if !std::path::Path::new(public_dir).exists() {
- anyhow::bail!("Public directory not found. Run 'ailog build' first.");
- }
-
- println!("Starting server...");
- println!(" → Serving: {}", public_dir);
- println!(" → Address: http://localhost:{}", port);
- println!(" → Blog: http://localhost:{}/", port);
- println!(" → AT Browser: http://localhost:{}/at/", port);
- println!("\nPress Ctrl+C to stop");
-
- let app = Router::new().nest_service("/", ServeDir::new(public_dir));
-
- let addr = SocketAddr::from(([127, 0, 0, 1], port));
- let listener = tokio::net::TcpListener::bind(addr).await?;
-
- axum::serve(listener, app).await?;
-
- Ok(())
-}
diff --git a/src/styles/main.css b/src/styles/main.css
new file mode 100644
index 0000000..974f718
--- /dev/null
+++ b/src/styles/main.css
@@ -0,0 +1,557 @@
+* {
+ box-sizing: border-box;
+ margin: 0;
+ padding: 0;
+}
+
+body {
+ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Helvetica, Arial, sans-serif;
+ line-height: 1.6;
+ color: #1a1a1a;
+ background: #fff;
+}
+
+#app {
+ max-width: 800px;
+ margin: 0 auto;
+ padding: 20px;
+}
+
+/* Dark mode */
+@media (prefers-color-scheme: dark) {
+ body {
+ background: #0a0a0a;
+ color: #e0e0e0;
+ }
+ .profile {
+ background: #1a1a1a;
+ }
+ .post-item {
+ border-color: #333;
+ }
+ .post-link:hover {
+ background: #1a1a1a;
+ }
+ .browser-input {
+ background: #1a1a1a;
+ border-color: #333;
+ color: #e0e0e0;
+ }
+}
+
+/* Header */
+#header {
+ margin-bottom: 24px;
+}
+
+.header-form {
+ display: flex;
+ gap: 8px;
+ align-items: center;
+}
+
+.header-input {
+ flex: 1;
+ padding: 8px 12px;
+ border: 1px solid #ddd;
+ border-radius: 6px;
+ font-size: 14px;
+}
+
+.header-btn {
+ width: 36px;
+ height: 36px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ background: #f0f0f0;
+ color: #333;
+ border: 1px solid #ddd;
+ border-radius: 6px;
+ cursor: pointer;
+ font-size: 16px;
+ font-weight: bold;
+}
+
+.header-btn:hover {
+ background: #e0e0e0;
+}
+
+.header-btn.at-btn {
+ background: #0066cc;
+ color: #fff;
+ border-color: #0066cc;
+}
+
+.header-btn.at-btn:hover {
+ background: #0052a3;
+}
+
+.header-btn.login-btn {
+ color: #666;
+}
+
+.header-btn.user-btn {
+ background: #0066cc;
+ color: #fff;
+ border-color: #0066cc;
+}
+
+/* Post Form */
+.post-form-container {
+ padding: 20px 0;
+}
+
+.post-form-container h3 {
+ font-size: 18px;
+ margin-bottom: 16px;
+}
+
+.post-form {
+ display: flex;
+ flex-direction: column;
+ gap: 12px;
+}
+
+.post-form-title {
+ padding: 10px 12px;
+ border: 1px solid #ddd;
+ border-radius: 6px;
+ font-size: 16px;
+}
+
+.post-form-body {
+ padding: 10px 12px;
+ border: 1px solid #ddd;
+ border-radius: 6px;
+ font-size: 14px;
+ resize: vertical;
+ min-height: 120px;
+ font-family: inherit;
+}
+
+.post-form-footer {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+}
+
+.post-form-collection {
+ font-size: 12px;
+ color: #888;
+ font-family: monospace;
+}
+
+.post-form-btn {
+ padding: 10px 24px;
+ background: #0066cc;
+ color: #fff;
+ border: none;
+ border-radius: 6px;
+ font-size: 14px;
+ cursor: pointer;
+}
+
+.post-form-btn:hover {
+ background: #0052a3;
+}
+
+.post-form-btn:disabled {
+ background: #ccc;
+ cursor: not-allowed;
+}
+
+.post-status {
+ margin-top: 12px;
+}
+
+.post-success {
+ color: #155724;
+}
+
+.post-error {
+ color: #dc3545;
+}
+
+/* Profile */
+.profile {
+ display: flex;
+ gap: 16px;
+ padding: 20px;
+ background: #f5f5f5;
+ border-radius: 12px;
+ margin-bottom: 24px;
+}
+
+.profile-avatar {
+ width: 80px;
+ height: 80px;
+ border-radius: 50%;
+ object-fit: cover;
+}
+
+.profile-info {
+ flex: 1;
+}
+
+.profile-name {
+ font-size: 20px;
+ font-weight: 600;
+ margin-bottom: 4px;
+}
+
+.profile-handle {
+ font-size: 14px;
+ color: #666;
+ margin-bottom: 8px;
+}
+
+.profile-desc {
+ font-size: 14px;
+ color: #444;
+}
+
+/* Post List */
+.post-list {
+ list-style: none;
+}
+
+.post-item {
+ border-bottom: 1px solid #eee;
+}
+
+.post-link {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 16px 8px;
+ text-decoration: none;
+ color: inherit;
+}
+
+.post-link:hover {
+ background: #f9f9f9;
+}
+
+.post-title {
+ font-weight: 500;
+}
+
+.post-date {
+ font-size: 13px;
+ color: #888;
+}
+
+/* Post Detail */
+.post-detail {
+ padding: 20px 0;
+}
+
+.post-header {
+ margin-bottom: 24px;
+ padding-bottom: 16px;
+ border-bottom: 1px solid #eee;
+}
+
+.post-header .post-title {
+ font-size: 28px;
+ font-weight: 600;
+ margin-bottom: 8px;
+}
+
+.post-meta {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+}
+
+.post-header .post-date {
+ font-size: 14px;
+ color: #888;
+}
+
+.json-btn {
+ display: inline-flex;
+ align-items: center;
+ justify-content: center;
+ padding: 4px 8px;
+ background: #f0f0f0;
+ color: #666;
+ border-radius: 4px;
+ text-decoration: none;
+ font-family: monospace;
+ font-size: 12px;
+}
+
+.json-btn:hover {
+ background: #e0e0e0;
+ color: #333;
+}
+
+.post-content {
+ font-size: 16px;
+ line-height: 1.8;
+ white-space: pre-wrap;
+}
+
+.post-footer {
+ margin-top: 32px;
+ padding-top: 16px;
+ border-top: 1px solid #eee;
+}
+
+.back-link {
+ color: #0066cc;
+ text-decoration: none;
+}
+
+.back-link:hover {
+ text-decoration: underline;
+}
+
+/* Utility */
+.no-posts,
+.no-data,
+.error {
+ padding: 40px;
+ text-align: center;
+ color: #888;
+}
+
+.loading {
+ padding: 40px;
+ text-align: center;
+ color: #666;
+}
+
+/* Footer */
+.site-footer {
+ margin-top: 60px;
+ padding: 20px 0;
+ text-align: center;
+ font-size: 13px;
+ color: #888;
+}
+
+.site-footer p {
+ margin: 4px 0;
+}
+
+/* Mode Tabs */
+.mode-tabs {
+ display: flex;
+ gap: 4px;
+ margin-bottom: 16px;
+}
+
+.tab {
+ padding: 8px 16px;
+ text-decoration: none;
+ color: #666;
+ border-radius: 6px;
+ font-size: 14px;
+}
+
+.tab:hover {
+ background: #f0f0f0;
+}
+
+.tab.active {
+ background: #0066cc;
+ color: #fff;
+}
+
+/* AT Browser */
+.collections,
+.records,
+.record-detail {
+ padding: 16px 0;
+}
+
+.collections h3,
+.records h3,
+.record-detail h3 {
+ font-size: 18px;
+ margin-bottom: 12px;
+}
+
+.collection-list,
+.record-list {
+ list-style: none;
+}
+
+.collection-item,
+.record-item {
+ border-bottom: 1px solid #eee;
+}
+
+.collection-link,
+.record-link {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+ padding: 12px 8px;
+ text-decoration: none;
+ color: inherit;
+ font-family: monospace;
+ font-size: 14px;
+}
+
+.collection-link:hover,
+.record-link:hover {
+ background: #f9f9f9;
+}
+
+.collection-favicon {
+ width: 20px;
+ height: 20px;
+ flex-shrink: 0;
+}
+
+.collection-nsid {
+ flex: 1;
+}
+
+.collection-service {
+ font-size: 12px;
+ color: #888;
+ font-family: -apple-system, BlinkMacSystemFont, sans-serif;
+}
+
+.record-link {
+ display: flex;
+ gap: 16px;
+}
+
+.record-rkey {
+ color: #0066cc;
+ min-width: 120px;
+}
+
+.record-preview {
+ color: #666;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+}
+
+.record-count {
+ font-size: 13px;
+ color: #888;
+ margin-bottom: 12px;
+}
+
+/* Record Detail */
+.record-header {
+ margin-bottom: 16px;
+ padding-bottom: 16px;
+ border-bottom: 1px solid #eee;
+}
+
+.record-uri,
+.record-cid {
+ font-family: monospace;
+ font-size: 12px;
+ color: #666;
+ margin: 4px 0;
+ word-break: break-all;
+}
+
+.schema-status {
+ display: inline-block;
+ padding: 4px 8px;
+ border-radius: 4px;
+ font-size: 12px;
+ margin-top: 8px;
+}
+
+.schema-verified {
+ background: #d4edda;
+ color: #155724;
+}
+
+.schema-none {
+ background: #f0f0f0;
+ color: #666;
+}
+
+/* JSON View */
+.json-view {
+ background: #f5f5f5;
+ border-radius: 8px;
+ padding: 16px;
+ overflow-x: auto;
+}
+
+.json-view pre {
+ margin: 0;
+}
+
+.json-view code {
+ font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
+ font-size: 13px;
+ line-height: 1.5;
+}
+
+/* Dark mode additions */
+@media (prefers-color-scheme: dark) {
+ .header-input {
+ background: #1a1a1a;
+ border-color: #333;
+ color: #e0e0e0;
+ }
+ .header-btn {
+ background: #2a2a2a;
+ border-color: #333;
+ color: #e0e0e0;
+ }
+ .header-btn:hover {
+ background: #333;
+ }
+ .header-btn.at-btn,
+ .header-btn.user-btn {
+ background: #0066cc;
+ border-color: #0066cc;
+ color: #fff;
+ }
+ .post-form-title,
+ .post-form-body {
+ background: #1a1a1a;
+ border-color: #333;
+ color: #e0e0e0;
+ }
+ .json-btn {
+ background: #2a2a2a;
+ color: #888;
+ }
+ .json-btn:hover {
+ background: #333;
+ color: #e0e0e0;
+ }
+ .tab:hover {
+ background: #333;
+ }
+ .tab.active {
+ background: #0066cc;
+ }
+ .collection-link:hover,
+ .record-link:hover {
+ background: #1a1a1a;
+ }
+ .collection-item,
+ .record-item,
+ .record-header {
+ border-color: #333;
+ }
+ .json-view {
+ background: #1a1a1a;
+ }
+ .schema-verified {
+ background: #1e3a29;
+ color: #75b798;
+ }
+ .schema-none {
+ background: #2a2a2a;
+ color: #888;
+ }
+}
diff --git a/src/types.ts b/src/types.ts
new file mode 100644
index 0000000..75a8975
--- /dev/null
+++ b/src/types.ts
@@ -0,0 +1,30 @@
+export interface Profile {
+ did: string
+ handle: string
+ displayName?: string
+ description?: string
+ avatar?: string
+ banner?: string
+}
+
+export interface BlogPost {
+ uri: string
+ cid: string
+ title: string
+ content: string
+ createdAt: string
+}
+
+export interface NetworkConfig {
+ plc: string
+ bsky: string
+}
+
+export interface AppConfig {
+ title: string
+ handle: string
+ collection: string
+ network: string
+}
+
+export type Networks = Record
diff --git a/tsconfig.json b/tsconfig.json
new file mode 100644
index 0000000..5b581f2
--- /dev/null
+++ b/tsconfig.json
@@ -0,0 +1,18 @@
+{
+ "compilerOptions": {
+ "target": "ES2022",
+ "module": "ESNext",
+ "moduleResolution": "bundler",
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "forceConsistentCasingInFileNames": true,
+ "outDir": "./dist",
+ "rootDir": "./src",
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true
+ },
+ "include": ["src/**/*"],
+ "exclude": ["node_modules", "dist"]
+}
diff --git a/vite.config.ts b/vite.config.ts
new file mode 100644
index 0000000..046a0df
--- /dev/null
+++ b/vite.config.ts
@@ -0,0 +1,14 @@
+import { defineConfig } from 'vite'
+
+export default defineConfig({
+ root: '.',
+ publicDir: 'public',
+ build: {
+ outDir: 'dist',
+ emptyDirBeforeWrite: true,
+ },
+ server: {
+ port: 3000,
+ host: '0.0.0.0',
+ },
+})