diff --git a/.github/workflows/deploy-api-dev.yml b/.github/workflows/deploy-api-dev.yml
index c57e27b..4cd2a37 100644
--- a/.github/workflows/deploy-api-dev.yml
+++ b/.github/workflows/deploy-api-dev.yml
@@ -24,18 +24,10 @@
# and best practices on handling the access key credentials.
on:
- push:
- branches:
- - master
- paths:
- - deployment/config/api-dev.bedbase.org.yaml
workflow_dispatch:
inputs: null
-
- workflow_run:
- workflows: [ "Build and push dev image to Dockerhub" ]
- types:
- - completed
+ paths:
+ - deployment/config/api-dev.bedbase.org.yaml
name: Deploy to Amazon ECS - dev1
@@ -46,8 +38,6 @@ jobs:
name: Deploy
runs-on: ubuntu-latest
- needs: [dockerhub-dev]
-
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -79,7 +69,7 @@ jobs:
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
- task-definition: task_defs/port82.json
+ task-definition: deployment/task_defs/port82.json
container-name: bedhost
image: ${{ steps.build-image.outputs.image }}
diff --git a/.github/workflows/deploy-api.yml b/.github/workflows/deploy-api.yml
index d2a10ea..22baf78 100644
--- a/.github/workflows/deploy-api.yml
+++ b/.github/workflows/deploy-api.yml
@@ -24,13 +24,10 @@
# and best practices on handling the access key credentials.
on:
- push:
- branches:
- - master
+ workflow_dispatch:
+ inputs: null
paths:
- deployment/config/api.bedbase.org.yaml
- workflow_dispatch:
- inputs: null
name: Deploy to Amazon ECS - primary
@@ -70,7 +67,7 @@ jobs:
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
- task-definition: task_defs/port81.json
+ task-definition: deployment/task_defs/port81.json
container-name: bedhost
image: ${{ steps.build-image.outputs.image }}
diff --git a/README.md b/README.md
index 8ef5d57..5ea73bc 100644
--- a/README.md
+++ b/README.md
@@ -21,53 +21,3 @@ It needs a path to the *bedbase configuration file*, which can be provided eithe
**DEV UI**: https://dev.bedhost.pages.dev/
**Source Code**: https://github.com/databio/bedhost/
-
----
-
-## Running for development
-
-Running with `uvicorn` provides auto-reload. To configure, this assumes you have previously set up `databio/secrets`.
-
-1. Source `.env` file to populate the environment variables referenced in the configuration file.
-2. Start `bedhost` using `uvicorn` and pass the configuration file via the `BEDBASE_CONFIG` env var.
-
-
-```console
-source ../bedbase.org/environment/production.env
-BEDBASE_CONFIG=../bedbase.org/config/api.bedbase.org.yaml uvicorn bedhost.main:app --reload
-```
-
-You can change the database you're connecting to by using a different config file:
-- Using a local config: `BEDBASE_CONFIG=../bbconf/tests/data/config.yaml uvicorn bedhost.main:app --reload`
-- With new database: `BEDBASE_CONFIG=../bedbase.org/config/bedbase2.yaml uvicorn bedhost.main:app --reload`
-
-Now, you can access the service at [http://127.0.0.1:8000](http://127.0.0.1:8000). Example endpoints:
-- 127.0.0.1:8000/bed/78c0e4753d04b238fc07e4ebe5a02984/img/open_chromatin
-- 127.0.0.1:8000/bed/78c0e4753d04b238fc07e4ebe5a02984/img_path/open_chromatin
-- 127.0.0.1:8000/bed/78c0e4753d04b238fc07e4ebe5a02984/file/bedfile
-- 127.0.0.1:8000/bed/78c0e4753d04b238fc07e4ebe5a02984/file_path/bedfile
-- 127.0.0.1:8000/bed/78c0e4753d04b238fc07e4ebe5a02984/metadata
-- 127.0.0.1:8000/bed/78c0e4753d04b238fc07e4ebe5a02984/metadata?attr_ids=md5sum&attr_ids=genome
-
-----
-## Running the server in Docker
-
-### Building image
-
-- Primary image: `docker build -t databio/bedhost -f .Dockerfile .`
-- Dev image `docker build -t databio/bedhost:dev -f dev.Dockerfile .`
-- Test image: `docker build -t databio/bedhost:dev -f test.Dockerfile .`
-
-Existing images can be found [at dockerhub](https://hub.docker.com/r/databio/bedhost).
-
-### Running container for development
-
-Configuration settings and deployment instructions are in the `bedbase.org` repository.
-
----
-
-## Deploying updates automatically
-
-The `bedhost/databio` image is built by a github action. It will build and push the `latest` image whenever a release is made. It will also tag that release with a tag for the release name.
-
-For the dev tag, you must deploy this through manual dispatch
diff --git a/deployment/Dockerfiles/dev1.Dockerfile b/deployment/Dockerfiles/dev1.Dockerfile
index 79c656b..30bc080 100644
--- a/deployment/Dockerfiles/dev1.Dockerfile
+++ b/deployment/Dockerfiles/dev1.Dockerfile
@@ -1,6 +1,6 @@
FROM databio/bedhost:dev
-COPY config/api-dev.bedbase.org.yaml /bedbase.yaml
+COPY deployment/config/api-dev.bedbase.org.yaml /bedbase.yaml
ENV BEDBASE_CONFIG=/bedbase.yaml
RUN mkdir -p /data/outputs/bedstat_output
diff --git a/deployment/Dockerfiles/primary.Dockerfile b/deployment/Dockerfiles/primary.Dockerfile
index 826575d..f472de9 100644
--- a/deployment/Dockerfiles/primary.Dockerfile
+++ b/deployment/Dockerfiles/primary.Dockerfile
@@ -1,6 +1,6 @@
FROM databio/bedhost:latest
-COPY config/api.bedbase.org.yaml /bedbase.yaml
+COPY deployment/config/api.bedbase.org.yaml /bedbase.yaml
ENV BEDBASE_CONFIG=/bedbase.yaml
RUN mkdir -p /data/outputs/bedstat_output
diff --git a/deployment/README.md b/deployment/README.md
deleted file mode 100644
index 31408ea..0000000
--- a/deployment/README.md
+++ /dev/null
@@ -1,118 +0,0 @@
-# Deploying bedbase.org
-
-This repository deploys the API for bedbase. It will run these services:
-
-1. production API: https://api.bedbase.org/
-2. dev API: https://api-dev.bedbase.org/
-
-This repo will deploy a new service by following these steps:
-
-1. Build an image by packaging the bedhost image (from dockerhub) with the bbconf file in this repository.
-2. Push that image to AWS.
-3. Deploy it to yeti cluster with aws task def.
-
-## Build the container
-
-Here we use the `databio/bedhost` container on dockerhub, and just add the configuration file in this repo to it, so build is super fast.
-
-```
-docker build -t databio/bedhost-configured -f Dockerfiles/primary.Dockerfile .
-```
-
-Or for dev:
-
-```
-docker build -t databio/bedhost-configured-dev -f Dockerfiles/dev1.Dockerfile .
-```
-
-## Run it locally to test
-
-First, source the .env file to set env vars in the calling environment.
-Then, use `--env-file` to pass those env vars through to the container
-
-```
-source environment/production.env
-docker run --rm --network="host" \
- --env-file environment/docker.env \
- databio/bedhost-configured-dev
-```
-
-Here's another example for running the container:
-
-```
-docker run --rm --init -p 8000:8000 --name bedstat-rest-server \
- --network="host" \
- --volume ~/code/bedbase.org/config/api.bedbase.org.yaml:/bedbase.yaml \
- --env-file ../bedbase.org/environment/docker.env \
- --env BEDBASE_CONFIG=/bedbase.yaml \
- databio/bedhost uvicorn bedhost.main:app --reload
-```
-
-## Building the Amazon-tagged version
-
-You could build and push to ECR like this if you need it... but the github action will do this for you.
-
-Authenticate with AWS ECR:
-```
-aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin 235728444054.dkr.ecr.us-east-1.amazonaws.com
-```
-
-Build/tag/push image:
-```
-docker build -t 235728444054.dkr.ecr.us-east-1.amazonaws.com/bedhost -f Dockerfiles/primary.Dockerfile .
-docker push 235728444054.dkr.ecr.us-east-1.amazonaws.com/bedhost
-```
-
-## Upload data to the bedbase AWS S3 bucket
-
-You'll also have to upload the actual artifacts produced by the pipelines into our s3 buckets. These are then hosted via the basic s3 file server, which is configured using the `path: remote_url_base:` in the bedbase configuration yaml.
-
-For this server we're using the `data.bedbase.org` bucket.
-
-Upload all BED files
-```
-cd $BEDBASE_DATA_PATH/bed_files
-aws s3 sync . s3://data.bedbase.org/big_files/ --include '*.bed.gz'
-```
-Upload all .bigBed files
-```
-cd $BEDBASE_DATA_PATH/bigbed_files
-aws s3 sync . s3://data.bedbase.org/bigbed_files/ --exclude 'tmp*' --include '*.bigBed'
-```
-Upload `bedstat` and `bedbuncher` outputs
-```
-cd $BEDBASE_DATA_PATH/outputs
-aws s3 sync . s3://data.bedbase.org/outputs/
-```
-
-
-## Uploading files to S3 (new back-end)
-
-To upload files to S3, we need some credentials. Set the credentials for the aws CLI int to the proper environment variables like this:
-
-```
-source code/bedbase.org/environment/production.env
-```
-
-Then you can list files with `aws s3 ls bedbase`.
-
-```
-aws s3 ls s3://bedbase
-```
-
-And you can add new ones like this:
-
-```
-aws s3 cp s3://bedbase
-```
-
-aws s3 cp $HOME/garage/bedbase_tutorial/out2023/* s3://bedbase
-
-aws s3 sync $HOME/garage/bedbase_tutorial/out2023/output s3://bedbase/output
-
-
-
-```
-bedboss all --sample-name tutorial_f1 --input-file bed_files/GSE105587_ENCFF018NNF_conservative_idr_thresholded_peaks_GRCh38.bed.gz --input-type bed --outfolder out2023 --genome GRCh38 --bedbase-config ~/code/bedbase.org/config/bedbase2.yaml
-```
-
diff --git a/ui/src/motions/landing-animations.tsx b/ui/src/motions/landing-animations.tsx
index 4a0e5be..e172871 100644
--- a/ui/src/motions/landing-animations.tsx
+++ b/ui/src/motions/landing-animations.tsx
@@ -2,7 +2,7 @@ import { motion } from 'framer-motion';
import { PRIMARY_COLOR } from '../const';
const STROKE_WIDTH = 2;
-const STROKE_SPEAD = 8;
+const STROKE_SPEAD = 0;
export const InPaths = () => {
return (