run.config: engine: ruby engine.config: runtime: ruby-2.5 extra_packages: # basic servers: - nginx - nodejs # for images: - ImageMagick - jemalloc # for videos: - ffmpeg3 # to prep the .env file: - gettext-tools # for node-gyp, used in the asset compilation process: - python-2 # i18n: - libidn cache_dirs: - node_modules extra_path_dirs: - node_modules/.bin build_triggers: - .ruby-version - Gemfile - Gemfile.lock - package.json - yarn.lock extra_steps: - cp .env.nanobox .env - yarn fs_watch: true deploy.config: extra_steps: - NODE_ENV=production bundle exec rake assets:precompile transform: - "envsubst < /app/.env.nanobox > /app/.env.production" - |- if [ -z "$LOCAL_DOMAIN" ] then . /app/.env.production export LOCAL_DOMAIN fi erb /app/nanobox/nginx-web.conf.erb > /app/nanobox/nginx-web.conf erb /app/nanobox/nginx-stream.conf.erb > /app/nanobox/nginx-stream.conf - touch /app/log/production.log before_live: web.web: - bin/tootctl cache clear - bundle exec rake db:migrate:setup after_live: worker.sidekiq: - |- if [[ "${ES_ENABLED}" != "false" ]] then bin/tootctl search deploy fi web.web: start: nginx: nginx -c /app/nanobox/nginx-web.conf rails: bundle exec puma -C /app/config/puma.rb routes: - '/' writable_dirs: - tmp log_watch: rails: 'log/production.log' network_dirs: data.storage: - public/system web.stream: start: nginx: nginx -c /app/nanobox/nginx-stream.conf node: yarn run start routes: - '/api/v1/streaming*' # Somehow we're getting requests for scheme://domain//api/v1/streaming* - match those, too - '//api/v1/streaming*' writable_dirs: - tmp worker.sidekiq: start: default: bundle exec sidekiq -c 5 -q default -L /app/log/sidekiq.log mailers: bundle exec sidekiq -c 5 -q mailers -L /app/log/sidekiq.log pull: bundle exec sidekiq -c 5 -q pull -L /app/log/sidekiq.log push: bundle exec sidekiq -c 5 -q push -L /app/log/sidekiq.log writable_dirs: - tmp log_watch: rails: 'log/production.log' sidekiq: 'log/sidekiq.log' network_dirs: data.storage: - public/system data.db: image: nanobox/postgresql:9.6 cron: - id: backup schedule: '0 3 * * *' command: | PGPASSWORD=${DATA_DB_PASS} pg_dump -U ${DATA_DB_USER} -w -Fc -O gonano | gzip | curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz -X POST -T - >&2 curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | sed 's/,/\n/g' | grep ${HOSTNAME} | sort | head -n-${BACKUP_COUNT:-1} | sed 's/.*: \?"\(.*\)".*/\1/' | while read file do curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE done data.elastic: image: nanobox/elasticsearch:5 cron: - id: backup schedule: '0 3 * * *' command: | id=$(cat /proc/sys/kernel/random/uuid) curl -X PUT -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}" -d "{\"type\": \"fs\",\"settings\": {\"location\": \"/var/tmp/${id}\",\"compress\": true}}" curl -X PUT -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}/backup?wait_for_completion=true&pretty" tar -cz -C "/var/tmp/${id}" . | curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz -X POST -T - >&2 curl -X DELETE -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}" rm -rf "/var/tmp/${id}" curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | sed 's/,/\n/g' | grep ${HOSTNAME} | sort | head -n-${BACKUP_COUNT:-1} | sed 's/.*: \?"\(.*\)".*/\1/' | while read file do curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE done data.redis: image: nanobox/redis:4.0 cron: - id: backup schedule: '0 3 * * *' command: | curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb -X POST -T /data/var/db/redis/dump.rdb >&2 curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | sed 's/,/\n/g' | grep ${HOSTNAME} | sort | head -n-${BACKUP_COUNT:-1} | sed 's/.*: \?"\(.*\)".*/\1/' | while read file do curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE done data.storage: image: nanobox/unfs:0.9 cron: - id: backup schedule: '0 3 * * *' command: | tar cz -C /data/var/db/unfs/ . | curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz -X POST -T - >&2 curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | sed 's/,/\n/g' | grep ${HOSTNAME} | sort | head -n-${BACKUP_COUNT:-1} | sed 's/.*: \?"\(.*\)".*/\1/' | while read file do curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE done