Skip to content

fix: crawler doesn't have config (#691) #100

fix: crawler doesn't have config (#691)

fix: crawler doesn't have config (#691) #100

Workflow file for this run

name: Release
on:
push:
branches:
- main
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Node.js 18
uses: actions/setup-node@v4
with:
node-version: 18
- name: Install dependencies
run: |
npm install -g yarn
yarn install --frozen-lockfile
- name: Ignore OpenAPI
run: |
./scripts/ignore-openapi
- name: Build
env:
NODE_OPTIONS: "--max_old_space_size=7168"
run: |
yarn gen-api-docs
yarn build
- name: Upload static files as artifact
id: deployment
uses: actions/upload-pages-artifact@v3
with:
path: ./build
release:
runs-on: ubuntu-latest
permissions:
id-token: write
pages: write
needs: build
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Checkout code
uses: actions/checkout@v4
- name: Crawler
run: |
docker run \
-e APPLICATION_ID=${{ secrets.ALGOLIA_APP_ID }} \
-e API_KEY=${{ secrets.ALGOLIA_CRAWLER_API_KEY }} \
-e "CONFIG=$(cat ./docsearch.json | jq -r tostring)" \
algolia/docsearch-scraper