583 lines
17 KiB
Bash
Executable File
583 lines
17 KiB
Bash
Executable File
#!/bin/zsh
|
|
|
|
# Sed compatibility wrapper
|
|
function sediment() {
|
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
|
sed -i '' "$@"
|
|
else
|
|
sed -i "$@"
|
|
fi
|
|
}
|
|
|
|
|
|
# Patch compatibility wrapper
|
|
function patchment() {
|
|
# -f : Force. Do not ask questions. (Standard in GNU and BSD patch)
|
|
# -N : Ignore patches that seem to be reversed or already applied (Forward)
|
|
# But we control these flags in the caller.
|
|
patch "$@"
|
|
}
|
|
|
|
function at-repos-env() {
|
|
APP_PASSWORD=xxx
|
|
host=syu.is
|
|
handle=ai.syui.ai
|
|
did=did:plc:6qyecktefllvenje24fcxnie
|
|
repos=(
|
|
"https://github.com/did-method-plc/did-method-plc"
|
|
"https://github.com/bluesky-social/indigo"
|
|
"https://github.com/bluesky-social/atproto"
|
|
"https://github.com/bluesky-social/social-app"
|
|
"https://github.com/bluesky-social/feed-generator"
|
|
"https://github.com/bluesky-social/ozone"
|
|
"https://github.com/bluesky-social/jetstream"
|
|
)
|
|
services=(
|
|
"bsky"
|
|
"plc"
|
|
"pds"
|
|
"jetstream"
|
|
"bgs"
|
|
"ozone"
|
|
"social-app"
|
|
"feed"
|
|
)
|
|
handles=(
|
|
"syui.syui.ai"
|
|
"ai.syui.ai"
|
|
"apple.syu.is"
|
|
)
|
|
d=${0:a:h}
|
|
dh=${0:a:h:h}
|
|
name=${host%%.*}
|
|
domain=${host##*.}
|
|
dport=5000
|
|
|
|
typeset -A PINNED_COMMITS
|
|
PINNED_COMMITS=(
|
|
[indigo]="d49b454196351c988ceb5ce1f5e21b689487b5ab"
|
|
[atproto]="104e6ed37b0589cc000109dc76316be35b2257e1"
|
|
)
|
|
}
|
|
|
|
# Arrays for patch management
|
|
typeset -a FAILED_PATCHES
|
|
|
|
# Patch file lists
|
|
typeset -a PATCH_FILES_CURL
|
|
PATCH_FILES_CURL=(
|
|
"4367-atproto-services-bsky-api.diff:https://raw.githubusercontent.com/bluesky-social/atproto/refs/heads/main/services/bsky/api.js:services/bsky/api.js"
|
|
"4367-atproto-services-pds-index.diff:https://raw.githubusercontent.com/bluesky-social/atproto/refs/heads/main/services/pds/index.js:services/pds/index.js"
|
|
)
|
|
|
|
typeset -a PATCH_FILES
|
|
PATCH_FILES=(
|
|
"170-pds-oauth-same-site-fix.patch"
|
|
"8980-social-app-disable-proxy.diff"
|
|
"disable-statsig-sdk.diff"
|
|
"140-social-app-yarn-network-timeout.patch"
|
|
"130-atproto-ozone-enable-daemon-v2.patch"
|
|
"190-bgs-disable-ratelimit.patch"
|
|
"200-feed-generator-custom.patch"
|
|
)
|
|
|
|
function at-repos-clone() {
|
|
if [ ! -d $d/repos ];then
|
|
mkdir -p $d/repos
|
|
fi
|
|
cd $d/repos
|
|
for ((i=1; i<=${#repos}; i++)); do
|
|
repo=${repos[$i]}
|
|
echo $repo
|
|
if [ ! -d $d/repos/${repo##*/} ];then
|
|
git clone $repo
|
|
|
|
fi
|
|
done
|
|
if [ ! -f $d/repos/feed-generator/Dockerfile ] && [ -f $d/docker/feed/Dockerfile ];then
|
|
cp -rf $d/docker/feed/Dockerfile $d/repos/feed-generator/
|
|
fi
|
|
}
|
|
|
|
function at-repos-pull() {
|
|
cd $d/repos
|
|
for ((i=1; i<=${#repos}; i++)); do
|
|
repo=${repos[$i]}
|
|
echo $repo
|
|
if [ -d $d/repos/${repo##*/} ];then
|
|
cd $d/repos/${repo##*/}
|
|
# Clean up before pull: reset changes, remove .orig files and untracked patch-created files
|
|
git checkout -- .
|
|
find . -name "*.orig" -type f -delete 2>/dev/null
|
|
git clean -fd 2>/dev/null
|
|
git stash -u
|
|
if ! git pull;then
|
|
rm -rf $d/repos/${repo##*/}
|
|
at-repos-clone
|
|
fi
|
|
fi
|
|
rv=$(echo "$repos_v" | jq -r ".[\"${repo##*/}\"]")
|
|
if [ "$rv" != "null" ];then
|
|
cd $d/repos/${repo##*/}
|
|
git reset --hard $rv
|
|
cd ..
|
|
fi
|
|
done
|
|
cd $d
|
|
}
|
|
|
|
function at-repos-checkout-pinned() {
|
|
echo "🔒 Checking out pinned commits..."
|
|
cd $d/repos
|
|
for repo_name pinned_commit in ${(kv)PINNED_COMMITS}; do
|
|
if [ -n "$pinned_commit" ] && [ -d "$d/repos/$repo_name" ]; then
|
|
echo " 📌 $repo_name -> $pinned_commit"
|
|
cd $d/repos/$repo_name
|
|
git fetch origin
|
|
git checkout $pinned_commit
|
|
cd $d/repos
|
|
fi
|
|
done
|
|
cd $d
|
|
}
|
|
|
|
function at-repos-social-app-ios-patch() {
|
|
$d/ios/setup.zsh
|
|
}
|
|
|
|
# Common patch function with status detection
|
|
function apply-patch() {
|
|
local patch_name=$1
|
|
local target_dir=$2
|
|
local patch_file=$3
|
|
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo "📝 Patch: ${patch_name}"
|
|
echo " Target: ${target_dir}"
|
|
echo " File: ${patch_file}"
|
|
|
|
pushd ${target_dir} > /dev/null
|
|
|
|
# Check if patch is already applied (reverse dry-run succeeds)
|
|
# Use -f to force dry-run to fail instead of asking questions if unapplied
|
|
if patch -f --dry-run -p1 -R < ${patch_file} > /dev/null 2>&1; then
|
|
echo "✅ Already applied - skipping"
|
|
popd > /dev/null
|
|
echo ""
|
|
return 0
|
|
fi
|
|
|
|
# Check if patch can be applied (forward dry-run succeeds)
|
|
if patch -f --dry-run -p1 < ${patch_file} > /dev/null 2>&1; then
|
|
echo "🔧 Applying patch..."
|
|
if patch -f -p1 < ${patch_file}; then
|
|
echo "✅ Applied successfully"
|
|
popd > /dev/null
|
|
echo ""
|
|
return 0
|
|
else
|
|
echo "❌ Failed to apply"
|
|
FAILED_PATCHES+=("${patch_name} (${patch_file})")
|
|
popd > /dev/null
|
|
echo ""
|
|
return 1
|
|
fi
|
|
else
|
|
echo "⚠️ Cannot apply - file may have been modified"
|
|
echo " Please check manually"
|
|
FAILED_PATCHES+=("${patch_name} (${patch_file}) - file modified")
|
|
popd > /dev/null
|
|
echo ""
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Function to display failed patches summary
|
|
function show-failed-patches() {
|
|
if [ ${#FAILED_PATCHES[@]} -eq 0 ]; then
|
|
echo ""
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo "✅ All patches applied successfully!"
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo ""
|
|
return 0
|
|
fi
|
|
|
|
echo ""
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo "⚠️ FAILED PATCHES SUMMARY"
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo ""
|
|
echo "The following patches could not be applied:"
|
|
echo ""
|
|
for failed_patch in "${FAILED_PATCHES[@]}"; do
|
|
echo " ❌ ${failed_patch}"
|
|
done
|
|
echo ""
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo ""
|
|
}
|
|
|
|
# Helper function for applying patches
|
|
function patch-apply() {
|
|
local name=$1
|
|
local target=$2
|
|
local patch_file=$3
|
|
apply-patch "${name}" "$d/repos/${target}" "$d/patching/${patch_file}"
|
|
}
|
|
|
|
# Helper function for patches with curl download
|
|
function patch-apply-with-curl() {
|
|
local name=$1
|
|
local target=$2
|
|
local patch_file=$3
|
|
local download_url=$4
|
|
local download_target=$5
|
|
|
|
curl -sL "${download_url}" -o "$d/repos/${target}/${download_target}"
|
|
apply-patch "${name}" "$d/repos/${target}" "$d/patching/${patch_file}"
|
|
}
|
|
|
|
# Auto-apply patches from list
|
|
function at-repos-patch-apply-all() {
|
|
# Apply curl patches
|
|
for patch_info in "${PATCH_FILES_CURL[@]}"; do
|
|
local filename="${patch_info%%:*}"
|
|
local rest="${patch_info#*:}"
|
|
local download_url="${rest%%:*}"
|
|
local download_target="${rest#*:}"
|
|
|
|
local title="${filename%.*}"
|
|
local repo=""
|
|
|
|
# Determine repo from filename
|
|
if [[ $filename == *"atproto"* ]]; then
|
|
repo="atproto"
|
|
elif [[ $filename == *"pds"* ]]; then
|
|
repo="atproto"
|
|
fi
|
|
|
|
patch-apply-with-curl "$title" "$repo" "$filename" "$download_url" "$download_target"
|
|
done
|
|
|
|
# Apply regular patches
|
|
for filename in "${PATCH_FILES[@]}"; do
|
|
local title="${filename%.*}"
|
|
local repo=""
|
|
|
|
# Determine repo from filename
|
|
if [[ $filename == *"social-app"* || $filename == *"statsig"* ]]; then
|
|
repo="social-app"
|
|
elif [[ $filename == *"atproto"* ]]; then
|
|
repo="atproto"
|
|
elif [[ $filename == *"pds"* ]]; then
|
|
repo="atproto"
|
|
elif [[ $filename == *"indigo"* || $filename == *"bgs"* ]]; then
|
|
repo="indigo"
|
|
elif [[ $filename == *"feed"* ]]; then
|
|
repo="feed-generator"
|
|
fi
|
|
|
|
patch-apply "$title" "$repo" "$filename"
|
|
done
|
|
}
|
|
|
|
function at-repos-ozone-patch() {
|
|
cd $d/repos
|
|
d_=$d/repos/ozone
|
|
rm -rf ${d_}
|
|
git clone https://github.com/bluesky-social/ozone
|
|
|
|
apply-patch "Ozone enable daemon" "${d_}" "$d/patching/122-ozone-enable-daemon.diff"
|
|
|
|
if [ -f "$d/patching/150-ozone-plc-fix.patch" ]; then
|
|
apply-patch "Ozone plc fix" "${d_}" "$d/patching/150-ozone-plc-fix.patch"
|
|
fi
|
|
|
|
if [ -f "$d/patching/160-ozone-oauth-redirect-fix.patch" ]; then
|
|
apply-patch "Ozone oauth redirect fix" "${d_}" "$d/patching/160-ozone-oauth-redirect-fix.patch"
|
|
fi
|
|
|
|
# Apply constants fix and do additional sed replacements
|
|
pushd ${d_} > /dev/null
|
|
if [ -f "$d/patching/121-ozone-constants-fix.patch" ]; then
|
|
patch -p1 < "$d/patching/121-ozone-constants-fix.patch" 2>/dev/null || true
|
|
fi
|
|
|
|
# Replace process.env with env()
|
|
sediment 's/process\.env\.\(NEXT_PUBLIC_[A-Z_]*\)/env('\''\1'\'')/g' lib/constants.ts 2>/dev/null || true
|
|
sediment 's/process\.env\.NODE_ENV/env('\''NODE_ENV'\'')/g' lib/constants.ts 2>/dev/null || true
|
|
# Add missing SOCIAL_APP_DOMAIN constant after SOCIAL_APP_URL
|
|
sediment '/^export const SOCIAL_APP_URL =/,/^$/{ /^$/a\
|
|
export const SOCIAL_APP_DOMAIN =\
|
|
env('\''NEXT_PUBLIC_SOCIAL_APP_DOMAIN'\'') || '\''bsky.app'\''\
|
|
|
|
}' lib/constants.ts 2>/dev/null || true
|
|
# Fix multiline process.env patterns
|
|
sediment '/^export const NEW_ACCOUNT_MARKER_THRESHOLD_IN_DAYS = process\.env$/,/^ : 7$/ {
|
|
s/^export const NEW_ACCOUNT_MARKER_THRESHOLD_IN_DAYS = process\.env$/export const NEW_ACCOUNT_MARKER_THRESHOLD_IN_DAYS = env('\''NEXT_PUBLIC_NEW_ACCOUNT_MARKER_THRESHOLD_IN_DAYS'\'')/
|
|
/^ \.NEXT_PUBLIC_NEW_ACCOUNT_MARKER_THRESHOLD_IN_DAYS$/d
|
|
}' lib/constants.ts 2>/dev/null || true
|
|
sediment '/^export const YOUNG_ACCOUNT_MARKER_THRESHOLD_IN_DAYS = process\.env$/,/^ : 30$/ {
|
|
s/^export const YOUNG_ACCOUNT_MARKER_THRESHOLD_IN_DAYS = process\.env$/export const YOUNG_ACCOUNT_MARKER_THRESHOLD_IN_DAYS = env('\''NEXT_PUBLIC_YOUNG_ACCOUNT_MARKER_THRESHOLD_IN_DAYS'\'')/
|
|
/^ \.NEXT_PUBLIC_YOUNG_ACCOUNT_MARKER_THRESHOLD_IN_DAYS$/d
|
|
}' lib/constants.ts 2>/dev/null || true
|
|
sediment '/^export const HIGH_PROFILE_FOLLOWER_THRESHOLD = process\.env$/,/^ : Infinity$/ {
|
|
s/^export const HIGH_PROFILE_FOLLOWER_THRESHOLD = process\.env$/export const HIGH_PROFILE_FOLLOWER_THRESHOLD = env('\''NEXT_PUBLIC_HIGH_PROFILE_FOLLOWER_THRESHOLD'\'')/
|
|
/^ \.NEXT_PUBLIC_HIGH_PROFILE_FOLLOWER_THRESHOLD$/d
|
|
}' lib/constants.ts 2>/dev/null || true
|
|
# Fix parseInt() to handle undefined by adding || ''
|
|
sediment "s/parseInt(env('\([^']*\)'))/parseInt(env('\1') || '0')/g" lib/constants.ts 2>/dev/null || true
|
|
popd > /dev/null
|
|
}
|
|
|
|
function at-repos-build-docker-atproto() {
|
|
cd $d
|
|
docker image prune -a
|
|
if [ -z "$1" ];then
|
|
for ((i=1; i<=${#services}; i++)); do
|
|
service=${services[$i]}
|
|
docker compose build --no-cache $service
|
|
if [ "$service" = "ozone" ]; then
|
|
docker compose build --no-cache ${service}-web
|
|
fi
|
|
done
|
|
else
|
|
docker compose build --no-cache $1
|
|
fi
|
|
}
|
|
|
|
function at-repos-push-reset() {
|
|
if [ -n "$(docker ps -q -f name=registry)" ]; then
|
|
echo "Registry is already running."
|
|
docker restart registry
|
|
docker stop registry
|
|
docker rm registry
|
|
docker volume rm registry-data 2>/dev/null || true
|
|
fi
|
|
docker run -d -p ${dport}:${dport} --name registry \
|
|
--restart=always \
|
|
-v registry-data:/var/lib/registry \
|
|
registry:2
|
|
}
|
|
|
|
function at-repos-push-docker() {
|
|
if [ -z "$1" ] || [ "$1" = "push" ]; then
|
|
for service in "${services[@]}"; do
|
|
docker tag at-${service}:latest localhost:${dport}/${service}:latest
|
|
docker push localhost:${dport}/${service}:latest
|
|
if [ "$service" = "ozone" ]; then
|
|
docker tag at-${service}-web:latest localhost:${dport}/${service}-web:latest
|
|
docker push localhost:${dport}/${service}-web:latest
|
|
fi
|
|
done
|
|
else
|
|
docker tag at-${1}:latest localhost:${dport}/${1}:latest
|
|
docker push localhost:${dport}/${1}:latest
|
|
fi
|
|
}
|
|
|
|
function at-repos-pull-docker() {
|
|
cd $d
|
|
docker image prune -a
|
|
docker compose up -d --pull always
|
|
}
|
|
|
|
function at-repos-reset-bgs-db() {
|
|
dp=at-database-1
|
|
BGS_ADMIN_KEY=`cat $d/envs/bgs | grep BGS_ADMIN_KEY | cut -d '=' -f 2`
|
|
|
|
echo "🛑 Stopping BGS..."
|
|
docker compose stop bgs
|
|
|
|
echo "🗑️ Cleaning data..."
|
|
sudo rm -rf $d/data/bgs/*
|
|
|
|
echo "♻️ Resetting Databases..."
|
|
docker exec -i $dp psql -U postgres -c "DROP DATABASE IF EXISTS bgs;"
|
|
docker exec -i $dp psql -U postgres -c "CREATE DATABASE bgs;"
|
|
docker exec -i $dp psql -U postgres -c "GRANT ALL PRIVILEGES ON DATABASE bgs TO postgres;"
|
|
|
|
docker exec -i $dp psql -U postgres -c "DROP DATABASE IF EXISTS carstore;"
|
|
docker exec -i $dp psql -U postgres -c "CREATE DATABASE carstore;"
|
|
docker exec -i $dp psql -U postgres -c "GRANT ALL PRIVILEGES ON DATABASE carstore TO postgres;"
|
|
|
|
echo "🚀 Starting BGS to initialize tables..."
|
|
docker compose up -d bgs
|
|
|
|
echo "⏳ Waiting 10s for BGS migration..."
|
|
sleep 10
|
|
|
|
echo "⚙️ Updating Slurp Config..."
|
|
docker exec -i $dp psql -U postgres -d bgs -c "UPDATE slurp_configs SET new_subs_disabled = false, new_pds_per_day_limit = 1000 WHERE id = 1;"
|
|
|
|
# host=pds:3000
|
|
echo "🔗 Registering Trusted Domain..."
|
|
# Retry loop for addTrustedDomain as BGS might still be warming up
|
|
for i in {1..5}; do
|
|
if curl -f -X POST "https://bgs.${host}/admin/pds/addTrustedDomain?domain=${host}" -H "Authorization: Bearer ${BGS_ADMIN_KEY}"; then
|
|
echo ""
|
|
echo "✅ Trusted domain registered"
|
|
break
|
|
fi
|
|
echo "Failed to contact BGS (attempt $i/5)... waiting 5s"
|
|
sleep 5
|
|
done
|
|
|
|
echo "🔗 Requesting PDS Crawl..."
|
|
# Request BGS to crawl the PDS - this registers the PDS and starts subscription
|
|
for i in {1..5}; do
|
|
result=$(curl -s -X POST "https://bgs.${host}/admin/pds/requestCrawl" \
|
|
-H "Authorization: Bearer ${BGS_ADMIN_KEY}" \
|
|
-H "Content-Type: application/json" \
|
|
-d "{\"hostname\":\"{$host}\"}" \
|
|
-w "%{http_code}" -o /dev/null)
|
|
if [ "$result" = "200" ]; then
|
|
echo "✅ PDS crawl requested successfully"
|
|
break
|
|
fi
|
|
echo "Failed to request crawl (attempt $i/5, status: $result)... waiting 5s"
|
|
sleep 5
|
|
done
|
|
|
|
echo "⏳ Waiting 5s for BGS to connect to PDS..."
|
|
sleep 5
|
|
|
|
echo "🔄 Triggering repo sync for existing users..."
|
|
for ((i=1; i<=${#handles}; i++)); do
|
|
handle=${handles[$i]}
|
|
did=$(curl -sL "https://${host}/xrpc/com.atproto.repo.describeRepo?repo=${handle}" | jq -r .did)
|
|
if [ -n "$did" ] && [ "$did" != "null" ]; then
|
|
echo " Syncing repo: $handle ($did)"
|
|
# Use takedown=false to trigger a resync without actually taking down
|
|
curl -s -X POST "https://bgs.${host}/admin/repo/takedown?did=${did}&takedown=false" \
|
|
-H "Authorization: Bearer ${BGS_ADMIN_KEY}" || true
|
|
else
|
|
echo " Skipping $handle (DID not found)"
|
|
fi
|
|
done
|
|
|
|
echo ""
|
|
echo "✅ BGS reset complete!"
|
|
echo " PDS should now be subscribed and syncing repos."
|
|
}
|
|
|
|
function at-repos-feed-generator-start-push() {
|
|
cd $d/repos/feed-generator
|
|
yarn install
|
|
FEEDGEN_HANDLE=${handle}
|
|
FEEDGEN_PASSWORD=${APP_PASSWORD}
|
|
FEEDGEN_RECORD_NAME=app
|
|
FEEDGEN_AVATAR=$d/repos/atproto/packages/dev-env/assets/at.png
|
|
npx tsx scripts/publish.ts
|
|
}
|
|
|
|
function at-repos-feed-generator-update() {
|
|
|
|
resp=$(curl -sL -X POST -H "Content-Type: application/json" -d "{\"identifier\":\"$handle\",\"password\":\"${APP_PASSWORD}\"}" https://${host}/xrpc/com.atproto.server.createSession)
|
|
token=$(echo $resp | jq -r .accessJwt)
|
|
if [ -z "$token" ] || [ "$token" == "null" ]; then
|
|
echo "Login failed: $resp"
|
|
exit 1
|
|
fi
|
|
|
|
avatar_json="{\"\$type\":\"blob\",\"ref\":{\"\$link\":\"${img_id}\"},\"mimeType\":\"image/jpeg\",\"size\":375259}"
|
|
|
|
# 3. Delete cmd record
|
|
#curl -sL -X POST -H "Content-Type: application/json" -H "Authorization: Bearer $token" \
|
|
# -d "{\"repo\":\"$handle\",\"collection\":\"app.bsky.feed.generator\",\"rkey\":\"cmd\"}" \
|
|
# https://${host}/xrpc/com.atproto.repo.deleteRecord
|
|
|
|
# 4. Put app record
|
|
echo "Creating app record..."
|
|
now=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
# Create JSON payload
|
|
# Note: feeding avatar_json directly into jq
|
|
payload=$(jq -n \
|
|
--arg repo "$handle" \
|
|
--arg collection "app.bsky.feed.generator" \
|
|
--arg rkey "app" \
|
|
--arg did "did:web:feed.${host}" \
|
|
--arg type "app.bsky.feed.generator" \
|
|
--arg created "$now" \
|
|
--arg display "App Feed" \
|
|
--arg desc "Automated App Feed" \
|
|
--argjson avatar "$avatar_json" \
|
|
'{
|
|
repo: $repo,
|
|
collection: $collection,
|
|
rkey: $rkey,
|
|
record: {
|
|
did: $did,
|
|
"$type": $type,
|
|
createdAt: $created,
|
|
displayName: $display,
|
|
description: $desc,
|
|
avatar: $avatar
|
|
}
|
|
}')
|
|
curl -sL -X POST -H "Content-Type: application/json" -H "Authorization: Bearer $token" \
|
|
-d @- \
|
|
https://${host}/xrpc/com.atproto.repo.putRecord
|
|
}
|
|
|
|
at-repos-env
|
|
case "$1" in
|
|
pull)
|
|
at-repos-clone
|
|
at-repos-pull
|
|
exit
|
|
;;
|
|
patch)
|
|
at-repos-social-app-ios-patch
|
|
at-repos-patch-apply-all
|
|
at-repos-ozone-patch
|
|
show-failed-patches
|
|
exit
|
|
;;
|
|
build)
|
|
at-repos-build-docker-atproto $2
|
|
exit
|
|
;;
|
|
push)
|
|
at-repos-push-docker $2
|
|
exit
|
|
;;
|
|
reset)
|
|
at-repos-push-reset
|
|
exit
|
|
;;
|
|
down)
|
|
cd $d;docker compose down
|
|
exit
|
|
;;
|
|
feed-push)
|
|
at-repos-feed-generator-start-push
|
|
exit
|
|
;;
|
|
esac
|
|
|
|
case "`cat /etc/hostname`" in
|
|
at)
|
|
if [ "$1" = "bgs-reset" ];then
|
|
at-repos-reset-bgs-db
|
|
exit
|
|
fi
|
|
at-repos-pull-docker
|
|
exit
|
|
;;
|
|
*)
|
|
at-repos-clone
|
|
at-repos-pull
|
|
at-repos-checkout-pinned
|
|
at-repos-social-app-ios-patch
|
|
at-repos-patch-apply-all
|
|
at-repos-ozone-patch
|
|
show-failed-patches
|
|
at-repos-build-docker-atproto
|
|
at-repos-push-docker
|
|
cd $d; docker compose down
|
|
;;
|
|
esac
|