Error when configuring email in OpenRemote deployment

Hi everyone,

I’m trying to test the email functionality in my OpenRemote deployment using Docker Compose, but I’m running into some issues. I followed the documentation and set up my docker-compose.yml file with the email settings as below:

# OpenRemote v3
#
# Profile for deploying the custom stack; uses deployment-data named volume
# to expose customisations to the manager and keycloak images. To run this profile you need to specify the following
# environment variables:
#
#    OR_ADMIN_PASSWORD - Initial admin user password
#    OR_HOSTNAME - FQDN hostname of where this instance will be exposed (localhost, IP address or public domain)
#    DEPLOYMENT_VERSION - Tag to use for deployment image (must match the tag used when building the deployment image)
#
# Please see openremote/profile/deploy.yml for configuration details for each service.
#
# To perform updates, build code and prepare Docker images:
#
#   ./gradlew clean installDist
#
# Then recreate deployment image:
#
#  DEPLOYMENT_VERSION=$(git rev-parse --short HEAD)
#  MANAGER_VERSION=$(cd openremote; git rev-parse --short HEAD; cd ..)
#  docker build -t openremote/manager:$MANAGER_VERSION ./openremote/manager/build/install/manager/
#  docker build -t openremote/custom-deployment:$DEPLOYMENT_VERSION ./deployment/build/
#  docker-compose -p custom down
#  docker volume rm custom_deployment-data
#  Do the following volume rm command if you want a clean install (wipe all existing data)
#  docker volume rm custom_postgresql-data
#  OR_ADMIN_PASSWORD=secret OR_HOSTNAME=my.domain.com docker-compose -p custom up -d
#
# All data is kept in volumes. Create a backup of the volumes to preserve data.
#
version: "3.9"

volumes:
  proxy-data:
  deployment-data:
  postgresql-data:
  manager-data:

# Add an NFS volume to the stack
#  efs-data:
#    driver: local
#    driver_opts:
#      type: nfs
#      o: "addr=${EFS_DNS?DNS must be set to mount NFS volume},rw,nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport"
#      device: ":/"

x-logging: &awslogs
  logging:
    driver: awslogs
    options:
      awslogs-region: ${AWS_REGION:-eu-west-1}
      awslogs-group: ${OR_HOSTNAME}
      awslogs-create-group: "true"
      tag: "{{.Name}}/{{.ID}}"

services:
  # This service will only populate an empty volume on startup and then exit.
  # If the volume already contains data, it exits immediately.
  deployment:
    image: openremote/deployment:${DEPLOYMENT_VERSION:-latest}
    volumes:
      - deployment-data:/deployment

  proxy:
    image: openremote/proxy:${PROXY_VERSION:-latest}
    restart: always
    depends_on:
      manager:
        condition: service_healthy
    ports:
      - "80:80"
      - "443:443"
      - "8883:8883"
      - "1883:1883"
    volumes:
      - proxy-data:/deployment
      - deployment-data:/data
    environment:
      LE_EMAIL: ${OR_EMAIL_ADMIN}
      DOMAINNAME: ${OR_HOSTNAME:-0.0.0.0}
      DOMAINNAMES: ${OR_ADDITIONAL_HOSTNAMES:-}
      # USE A CUSTOM PROXY CONFIG - COPY FROM https://github.com/openremote/proxy/blob/main/haproxy.cfg
      #HAPROXY_CONFIG: '/data/proxy/haproxy.cfg'
    <<: *awslogs

  postgresql:
    image: openremote/postgresql:${POSTGRESQL_VERSION:-latest}
    restart: always
    volumes:
      - postgresql-data:/var/lib/postgresql/data
      - manager-data:/storage
    <<: *awslogs

  keycloak:
    image: openremote/keycloak:${KEYCLOAK_VERSION:-latest}
    restart: always
    depends_on:
      postgresql:
        condition: service_healthy
    volumes:
      - deployment-data:/deployment
    environment:
      KEYCLOAK_ADMIN_PASSWORD: ${OR_ADMIN_PASSWORD:-admin}
      KC_HOSTNAME: ${OR_HOSTNAME:-localhost}
      KC_HOSTNAME_PORT: ${OR_SSL_PORT:--1}
    <<: *awslogs

  manager:
    #    image: openremote/manager:${MANAGER_VERSION:-latest}
    image: openremote/manager:latest
    #    image: openremote/manager:manhv3
    restart: always
    depends_on:
      keycloak:
        condition: service_healthy
    volumes:
      - manager-data:/storage
      - deployment-data:/deployment
      # Map data should be accessed from a volume mount
      # 1). Host filesystem - /deployment.local:/deployment.local
      # 2) NFS/EFS network mount - efs-data:/efs
    environment:
      # Here are some typical environment variables you want to set
      # see openremote/profile/deploy.yml for details
      OR_ADMIN_PASSWORD: ${OR_ADMIN_PASSWORD:-admin}
      OR_SETUP_TYPE: # Typical values to support are staging and production
      OR_SETUP_RUN_ON_RESTART:
      OR_EMAIL_HOST: smtp.gmail.com
      OR_EMAIL_USER: ngmdang1306@gmail.com
      OR_EMAIL_PASSWORD: ****************
      OR_EMAIL_X_HEADERS:
      OR_EMAIL_FROM: ngmdang1306@gmail.com
      OR_EMAIL_ADMIN:
      OR_HOSTNAME: ${OR_HOSTNAME:-0.0.0.0}
      OR_ADDITIONAL_HOSTNAMES: ${OR_ADDITIONAL_HOSTNAMES:-}
      OR_SSL_PORT: ${OR_SSL_PORT:--1}
      OR_DEV_MODE: ${OR_DEV_MODE:-false}
      OR_MAP_TILES_PATH: "/efs/europe.mbtiles"
      #OR_MAP_TILES_PATH: '/efs/europe.mbtiles'
    <<: *awslogs

And then i run : docker-compose down
docker-compose up -d

and it’s doesn’t seem to work .

I have this error in terminal :

Windows PowerShell
Copyright (C) Microsoft Corporation. All rights reserved.

Try the new cross-platform PowerShell https://aka.ms/pscore6

PS C:\Users\dangd\iotplatform\iotplatform>    docker-compose down
time="2025-05-12T10:51:27+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:27+07:00" level=warning msg="The \"OR_EMAIL_ADMIN\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:27+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:27+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:27+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:27+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:27+07:00" level=warning msg="C:\\Users\\dangd\\iotplatform\\iotplatform\\docker-compose.yml: the attribute `version` is obsolete, it will be ignored, please remove it to avoid potential confusion"
PS C:\Users\dangd\iotplatform\iotplatform>    docker-compose up -d
time="2025-05-12T10:51:28+07:00" level=warning msg="The \"OR_EMAIL_ADMIN\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:28+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:28+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:28+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:28+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:28+07:00" level=warning msg="The \"OR_HOSTNAME\" variable is not set. Defaulting to a blank string."
time="2025-05-12T10:51:28+07:00" level=warning msg="C:\\Users\\dangd\\iotplatform\\iotplatform\\docker-compose.yml: the attribute `version` is obsolete, it will be ignored, please remove it to avoid potential confusion"
[+] Running 1/1
 ✘ deployment Error pull access denied for openremote/deployment, repository does not exist or may require 'docker login'                                                 1.8s 
Error response from daemon: pull access denied for openremote/deployment, repository does not exist or may require 'docker login'
PS C:\Users\dangd\iotplatform\iotplatform>

Hi!

The openremote/deployment image is not on Docker Hub.
It’s a local image you build with the Dockerfile in the /deployment/build folder.
This imports all your custom configurations and code.

  1. First, run ./gradlew clean installDist to build dependencies / compile the code.
    Afterwards. you should see a new deployment/build directory.
    This includes all your custom themes, configs, and any custom code you’ve written.

  2. Then, use docker build to build the openremote/deployment image;

dcker build --platform <linux/amd64 or linux/aarch64> -t openremote/deployment:<tag> deployment/build
  1. Now, you can deploy your stack like normal; docker-compose up -d
    On startup, it will inject all custom themes, configs, and custom code into the manager.

  2. (optionally you can remove the deployment container and/or image)

.
For your information; you cannot use 0.0.0.0 as a hostname.
Normally, you’d use localhost, <number IP> or mywebsite.com.
Multiple hostnames are supported using the OR_ADDITIONAL_HOSTNAMES variable.

1 Like

Hi Martin,

Thank you so much for your clear and helpful explanation!

I’ve successfully built the image and applied my configurations. Now I’d like to test the email functionality in the project. Could you please guide me on how to configure and test the email feature properly?

Thanks again for your support

By the way, I have one more question. For the notification feature in the custom project, is there an existing UI that is just hidden or disabled, or do I need to implement the UI myself from scratch?

Hi!

Configuring the mail server is done using environment variables;

# Configure Email, SMTP server to send password reset emails etc.
OR_EMAIL_HOST:
OR_EMAIL_USER:
OR_EMAIL_PASSWORD:
OR_EMAIL_PORT:
OR_EMAIL_TLS:

# Where to send admin emails to
OR_EMAIL_ADMIN:

# From address for emails sent by the system
OR_EMAIL_FROM:

# Optional oAuth2 configuration to use instead of username/password
OR_EMAIL_OAUTH2_URL: # url to request an access token
OR_EMAIL_OAUTH2_CLIENT_ID:
OR_EMAIL_OAUTH2_CLIENT_SECRET:

,
It should work when configuring those.
If not, it should appear in the container logs during startup: docker logs or-manager-1

In case of smtp.gmail.com, being Gmail, it works a bit different.
I remember you need to use an app password, instead of your regular one;

1 Like

Sending email notifications can be done through When-Then rules;

.
or by using Alarms;

.
We’re busy with a notification page, listing all notifications being sent / received,
but that is still work in progress. You can track this PR on GitHub here.

Or, if it doesn’t fit your needs, you can always build something custom.
You can use org.openremote.manager.notification.NotificationService for that. :wink:

1 Like

I used the app password and configured it in the docker.compose file. And I ran docker again and went to rules test but nothing was sent to gmail. Can you help me?

# OpenRemote v3
#
# Profile for deploying the custom stack; uses deployment-data named volume
# to expose customisations to the manager and keycloak images. To run this profile you need to specify the following
# environment variables:
#
#    OR_ADMIN_PASSWORD - Initial admin user password
#    OR_HOSTNAME - FQDN hostname of where this instance will be exposed (localhost, IP address or public domain)
#    DEPLOYMENT_VERSION - Tag to use for deployment image (must match the tag used when building the deployment image)
#
# Please see openremote/profile/deploy.yml for configuration details for each service.
#
# To perform updates, build code and prepare Docker images:
#
#   ./gradlew clean installDist
#
# Then recreate deployment image:
#
#  DEPLOYMENT_VERSION=$(git rev-parse --short HEAD)
#  MANAGER_VERSION=$(cd openremote; git rev-parse --short HEAD; cd ..)
#  docker build -t openremote/manager:$MANAGER_VERSION ./openremote/manager/build/install/manager/
#  docker build -t openremote/custom-deployment:$DEPLOYMENT_VERSION ./deployment/build/
#  docker-compose -p custom down
#  docker volume rm custom_deployment-data
#  Do the following volume rm command if you want a clean install (wipe all existing data)
#  docker volume rm custom_postgresql-data
#  OR_ADMIN_PASSWORD=secret OR_HOSTNAME=my.domain.com docker-compose -p custom up -d
#
# All data is kept in volumes. Create a backup of the volumes to preserve data.
#
version: '3.9'

volumes:
  proxy-data:
  deployment-data:
  postgresql-data:
  manager-data:

# Add an NFS volume to the stack
#  efs-data:
#    driver: local
#    driver_opts:
#      type: nfs
#      o: "addr=${EFS_DNS?DNS must be set to mount NFS volume},rw,nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport"
#      device: ":/"

#x-logging: &awslogs
#  logging:
#    driver: awslogs
#    options:
#      awslogs-region: ${AWS_REGION:-eu-west-1}
#      awslogs-group: ${OR_HOSTNAME}
#      awslogs-create-group: "true"
#      tag: "{{.Name}}/{{.ID}}"

services:
  # This service will only populate an empty volume on startup and then exit.
  # If the volume already contains data, it exits immediately.
#  deployment:
#    image: openremote/deployment:${DEPLOYMENT_VERSION:-latest}
#    volumes:
#      - deployment-data:/deployment

  proxy:
    image: openremote/proxy:${PROXY_VERSION:-latest}
    restart: always
    depends_on:
      manager:
        condition: service_healthy
    ports:
      - "80:80"
      - "443:443"
      - "8883:8883"
      - "1884:1883"
    volumes:
      - proxy-data:/deployment
      - deployment-data:/data
    environment:
      LE_EMAIL: ${OR_EMAIL_ADMIN}
      DOMAINNAME: ${OR_HOSTNAME:-0.0.0.0}
      DOMAINNAMES: ${OR_ADDITIONAL_HOSTNAMES:-}
      # USE A CUSTOM PROXY CONFIG - COPY FROM https://github.com/openremote/proxy/blob/main/haproxy.cfg
      #HAPROXY_CONFIG: '/data/proxy/haproxy.cfg'
#    <<: *awslogs

  postgresql:
    image: openremote/postgresql:${POSTGRESQL_VERSION:-latest}
    restart: always
    volumes:
      - postgresql-data:/var/lib/postgresql/data
      - manager-data:/storage
#    <<: *awslogs

  keycloak:
    image: openremote/keycloak:${KEYCLOAK_VERSION:-latest}
    restart: always
    depends_on:
      postgresql:
        condition: service_healthy
    volumes:
      - deployment-data:/deployment
    environment:
      KEYCLOAK_ADMIN_PASSWORD: ${OR_ADMIN_PASSWORD:-admin}
      KC_HOSTNAME: ${OR_HOSTNAME:-localhost}
      KC_HOSTNAME_PORT: ${OR_SSL_PORT:--1}
#    <<: *awslogs

  manager:
    #    image: openremote/manager:${MANAGER_VERSION:-latest}
    image: openremote/manager:${MANAGER_VERSION:-latest}
    #    image: openremote/manager:manhv3
    restart: always
    depends_on:
      keycloak:
        condition: service_healthy
    volumes:
      - manager-data:/storage
      - deployment-data:/deployment
      # Map data should be accessed from a volume mount
      # 1). Host filesystem - /deployment.local:/deployment.local
      # 2) NFS/EFS network mount - efs-data:/efs
    environment:
      # Here are some typical environment variables you want to set
      # see openremote/profile/deploy.yml for details
      OR_ADMIN_PASSWORD: ${OR_ADMIN_PASSWORD:-admin}
      OR_SETUP_TYPE: # Typical values to support are staging and production
      OR_SETUP_RUN_ON_RESTART:
      OR_EMAIL_HOST: smtp.gmail.com
      OR_EMAIL_USER: ngmdang1306@gmail.com
      OR_EMAIL_PASSWORD: wdglgcjjavgvawbi
      OR_EMAIL_X_HEADERS:
      OR_EMAIL_FROM: ngmdang1306@gmail.com
      OR_EMAIL_ADMIN:
      OR_EMAIL_PORT: ${OR_EMAIL_PORT:-587}
      OR_EMAIL_TLS: true
      OR_HOSTNAME: localhost
      OR_ADDITIONAL_HOSTNAMES: ${OR_ADDITIONAL_HOSTNAMES:-}
      OR_SSL_PORT: ${OR_SSL_PORT:--1}
      OR_DEV_MODE: ${OR_DEV_MODE:-false}
      OR_MAP_TILES_PATH: "/efs/europe.mbtiles"
      #OR_MAP_TILES_PATH: '/efs/europe.mbtiles'
#    <<: *awslogs