From f526bb7bf8cc4118b4b8b64f5b66cc96ebccb37c Mon Sep 17 00:00:00 2001 From: James Edington Date: Mon, 14 Mar 2022 09:24:48 -0500 Subject: [PATCH] Initial commit: feature/docker-compose --- Dockerfile | 9 ++++--- config/docker.exs | 1 + docker-compose.yml | 79 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 86 insertions(+), 3 deletions(-) create mode 100644 docker-compose.yml diff --git a/Dockerfile b/Dockerfile index db1a6b457..42feef173 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,6 +17,9 @@ FROM alpine:3.11 ARG BUILD_DATE ARG VCS_REF +ARG UID=911 +ARG GID=911 + LABEL maintainer="ops@pleroma.social" \ org.opencontainers.image.title="pleroma" \ org.opencontainers.image.description="Pleroma for Docker" \ @@ -34,7 +37,7 @@ ARG DATA=/var/lib/pleroma RUN echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories &&\ apk update &&\ apk add exiftool ffmpeg imagemagick libmagic ncurses postgresql-client &&\ - adduser --system --shell /bin/false --home ${HOME} pleroma &&\ + adduser --system --shell /bin/false -u $UID -g $GID --home ${HOME} pleroma &&\ mkdir -p ${DATA}/uploads &&\ mkdir -p ${DATA}/static &&\ chown -R pleroma ${DATA} &&\ @@ -44,10 +47,10 @@ RUN echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/ USER pleroma COPY --from=build --chown=pleroma:0 /release ${HOME} +ENV PATH="${HOME}/bin:${PATH}" COPY ./config/docker.exs /etc/pleroma/config.exs -COPY ./docker-entrypoint.sh ${HOME} EXPOSE 4000 -ENTRYPOINT ["/opt/pleroma/docker-entrypoint.sh"] +CMD ["pleroma", "start"] diff --git a/config/docker.exs b/config/docker.exs index f9f27d141..80ec69935 100644 --- a/config/docker.exs +++ b/config/docker.exs @@ -26,6 +26,7 @@ config :web_push_encryption, :vapid_details, subject: "mailto:#{System.get_env(" config :pleroma, :database, rum_enabled: false config :pleroma, :instance, static_dir: "/var/lib/pleroma/static" config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads" +config :pleroma, configurable_from_database: true # We can't store the secrets in this file, since this is baked into the docker image if not File.exists?("/var/lib/pleroma/secret.exs") do diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..2481d901b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,79 @@ +version: '3' +services: + + web: + build: . + image: pleroma/pleroma + restart: always +# env_file: .env.production + networks: + - external_network + - internal_network + healthcheck: + test: ["CMD", "curl", "-s", "-f", "--noproxy", "localhost:4000", "localhost:4000/health"] + ports: + - "127.0.0.1:4000:4000" + depends_on: + - db +# - es + volumes: + - ./uploads:/var/lib/pleroma/uploads + - ./config:/var/lib/pleroma/config + environment: + - "DB_PASS=" + + db: + restart: always + image: postgres:14-alpine +# shm_size: 256mb + networks: + - internal_network + healthcheck: + test: ["CMD", "pg_isready", "-U", "postgres"] + volumes: + - ./postgres:/var/lib/postgresql/data + environment: + - "POSTGRES_HOST_AUTH_METHOD=trust" + +## https://coffee-and-dreams.uk/tutorials/2021/12/15/integrating-elasticsearch-with-pleroma.html +## https://git.pleroma.social/pleroma/pleroma/-/issues/1331 +# es: +# restart: always +# image: docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.2 +# environment: +# - "ES_JAVA_OPTS=-Xms512m -Xmx512m" +# - "cluster.name=es-pleroma" +# - "discovery.type=single-node" +# - "bootstrap.memory_lock=true" +# networks: +# - internal_network +# healthcheck: +# test: ["CMD", "curl", "-s", "-f", "localhost:9200/_cluster/health"] +# volumes: +# - ./elasticsearch:/usr/share/elasticsearch/data +# ulimits: +# memlock: +# soft: -1 +# hard: -1 + +## Uncomment to enable federation with tor instances along with adding the following ENV variables +## http_proxy=http://privoxy:8118 +## ALLOW_ACCESS_TO_HIDDEN_SERVICE=true +# tor: +# image: sirboops/tor +# networks: +# - external_network +# - internal_network +# +# privoxy: +# image: sirboops/privoxy +# volumes: +# - ./priv-config:/opt/config +# networks: +# - external_network +# - internal_network + +networks: + external_network: + internal_network: + internal: true