Docker-compose local or production.yml? + Unmarshal errors + field version not found in type model.devType

I am using cookiecutter-django and it creates two docker-compose files:

local.yml and production.yml so in this context which one do we use for okteto now that the lines between local and production is blurred from my perspective.

Or better managed with a single okteto.yml? Also why docker-compose.yml is even not recommended by a number of people?

@ramiro

Also why:

okteto deploy --build -f local.yml
x Invalid manifest:

x Syntax error volumes should be ‘local_path:remote_path’ or ‘remote_path’

Can you share the contents of the docker-compose files? BTW, have you seen How to Develop Django and Postgres Applications with Docker and Okteto CLI 2.0 ?

Hi @ramiro

Main difference is presence of traefik and no django volume in production.yml that was scaffolded:

local.yml

volumes:
  local_mongodb_data: {}
  elasticsearch-data:
  logs_volume:

services:
  django: &django
    build:
      context: .
      dockerfile: ./compose/local/django/Dockerfile
    image: local_django:v1.0.0
    container_name: local_django
    depends_on:
      - mongo
      - elasticsearch
      - kibana

    volumes:
      - .:/app:z
      - logs_volume:/app/config/logs
      - ../samples:/uploaddata

    env_file:
      - ./.envs/.local/.django
      - ./.envs/.local/.mongodb
    ports:
      - "8000:8000"
      - "3000:3000"
    environment:
      - LOGSTASH_HOST=logstash
    command: /start
    stdin_open: true
    tty: true

  mongo:
    image: mongo:5.0.6
    container_name: "mongo"
    restart: always
    env_file:
      - ./.envs/.local/.mongodb
    environment:
      - MONGO_INITDB_ROOT_USERNAME=XXXX
      - MONGO_INITDB_ROOT_PASSWORD=XXXX
      - MONGO_INITDB_DATABASE=XXXX
      - MONGO_INITDB_USERNAME=XXXX
      - MONGO_INITDB_PASSWORD=XXXX
    volumes:
      - local_mongodb_data:/data/db
    ports:
      - 27017:27017

  filebeat:
    build: ./filebeat
    container_name: "filebeat"
    #    restart: always
    env_file:
      - ./.envs/.local/.filebeat
    #    environment:
    volumes:
      - logs_volume:/app/logs
    command: filebeat -c /etc/filebeat/filebeat.yml -e -d "*" -strict.perms=false
    depends_on:
      - django

  elasticsearch:
    image: elasticsearch:7.17.6
    container_name: elasticsearch
    environment:
      - discovery.type=single-node
      - xpack.security.enabled=false
    volumes:
      - elasticsearch-data:/usr/share/elasticsearch/data
    ports:
      - "9200:9200"

  logstash:
    restart: always
    build: ./logstash
    image: arthur_logstash:7.17.6
    volumes:
      - ./:/logstash_dir
    # command: logstash -f /logstash_dir/logstash.conf
    depends_on:
      - elasticsearch
    ports:
      - "5959:5959"
      - "5044:5044"

  kibana:
    image: kibana:7.17.6
    container_name: django_elk_kibana
    ports:
      - "5601:5601"
    environment:
      - ELASTICSEARCH_URL=http://elasticsearch:9200
    depends_on:
      - elasticsearch
      - logstash

production.yml

volumes:
  production_mongodb_data: {}
  production_traefik: {}
  elasticsearch-data: {}
  logs_volume:

services:
  django: &django
    build:
      context: .
      dockerfile: ./compose/production/django/Dockerfile
    image: myacr.azurecr.io/my_production_django
    container_name: my_production_django
    depends_on:
      - mongo
      - elasticsearch
      - kibana

    env_file:
      - ./.envs/.production/.django
      - ./.envs/.production/.mongodb

    ports:
      - "8000:8000"
      - "3000:3000"

    command: /start
    stdin_open: true
    tty: true

    labels:
      kompose.service.group: sidecar

      kompose.service.healthcheck.liveness.http_get_path: /health/ping
      kompose.service.healthcheck.liveness.http_get_port: 8080

      kompose.service.healthcheck.readiness.test: CMD curl -f "http://localhost:8080/health/ping"
      kompose.service.healthcheck.readiness.interval: 10s
      kompose.service.healthcheck.readiness.timeout: 10s
      kompose.service.healthcheck.readiness.retries: 3
      kompose.service.healthcheck.readiness.start_period: 30s
      kompose.image-pull-secret: "acr-secret"
      kompose.image-pull-policy: "IfNotPresent"

  mongo:
    image: myacr.azurecr.io/mongo
    container_name: "mongo"
    restart: always
    env_file:
      - ./.envs/.production/.mongodb
    environment:
      - MONGO_INITDB_ROOT_USERNAME=XXXX
      - MONGO_INITDB_ROOT_PASSWORD=XXXX
      - MONGO_INITDB_DATABASE=XXXX       
      - MONGO_INITDB_USERNAME=XXXX
      - MONGO_INITDB_PASSWORD=XXXX
    volumes:
      - production_mongodb_data:/data/db
      - 27017:27017
      - 9216:9216 # mongodb-exporter

    labels:
      kompose.image-pull-secret: "my-secret"
      kompose.image-pull-policy: "IfNotPresent"


  filebeat:
    build: ./filebeat
    container_name: "filebeat"
    #    restart: always
    env_file:
      - ./.envs/.local/.filebeat
    #    environment:
    volumes:
      - logs_volume:/app/logs
    command: filebeat -c /etc/filebeat/filebeat.yml -e -d "*" -strict.perms=false
    depends_on:
      - django

  elasticsearch:
    image: elasticsearch:7.17.6
    container_name: elasticsearch
    environment:
      - discovery.type=single-node
      - xpack.security.enabled=false
    volumes:
      - elasticsearch-data:/usr/share/elasticsearch/data
    ports:
      - "9200:9200"

  logstash:
    restart: always
    build: ./logstash
    image: arthur_logstash:7.17.6
    volumes:
      - ./:/logstash_dir
    # command: logstash -f /logstash_dir/logstash.conf
    depends_on:
      - elasticsearch
    ports:
      - "5959:5959"
      - "5044:5044"

  kibana:
    image: kibana:7.17.6
    container_name: django_elk_kibana
    ports:
      - "5601:5601"
    environment:
      - ELASTICSEARCH_URL=http://elasticsearch:9200
    depends_on:
      - elasticsearch
      - logstash

  traefik:
    build:
      context: .
      dockerfile: ./compose/production/traefik/Dockerfile
    image: myacr.azurecr.io/roduction_traefik
    container_name: production_traefik

    depends_on:
      - django
    volumes:
      - production_traefik:/etc/traefik/acme:z
      # Required for Traefik to listen to the Docker events
      - /var/run/docker.dock:/var/run/docker.dock:ro
    ports:
      - "0.0.0.0:80:80"
      - "0.0.0.0:443:443"
    env_file:
      - ./.envs/.production/.traefik
    command: /start

(post deleted by author)

Yes followed it but

ok deploy --build --wait
i Using XXX @ cloud.okteto.com as context
i Building ‘Dockerfile’ in tcp://buildkit.cloud.okteto.net:1234…
[+] Building 0.5s (2/2) FINISHED
=> [internal] load build definition from buildkit-2780937346 0.4s
=> => transferring dockerfile: 422B 0.4s
=> [internal] load .dockerignore 0.3s
=> => transferring context: 2B 0.3s
x Error building service ‘app’: error building image ‘registry.cloud.okteto.net/XXX/django-docker-okteto-app:okteto’: build failed: failed to solve: dockerfile parse error on line 9: unknown instruction: gcc

The manifests that you share have some issues. Maybe it’s a copy/paste issue?

For example, in mongo service, inside the volumes key, it seems like you are mixing volumes and ports

mongo:
    image: myacr.azurecr.io/mongo
    container_name: "mongo"
    restart: always
    env_file:
      - ./.envs/.production/.mongodb
    environment:
      - MONGO_INITDB_ROOT_USERNAME=XXXX
      - MONGO_INITDB_ROOT_PASSWORD=XXXX
      - MONGO_INITDB_DATABASE=XXXX       
      - MONGO_INITDB_USERNAME=XXXX
      - MONGO_INITDB_PASSWORD=XXXX
    volumes:
      - production_mongodb_data:/data/db
      - 27017:27017
      - 9216:9216 # mongodb-exporter

I think that should be:

mongo:
    image: myacr.azurecr.io/mongo
    container_name: "mongo"
    restart: always
    env_file:
      - ./.envs/.production/.mongodb
    environment:
      - MONGO_INITDB_ROOT_USERNAME=XXXX
      - MONGO_INITDB_ROOT_PASSWORD=XXXX
      - MONGO_INITDB_DATABASE=XXXX       
      - MONGO_INITDB_USERNAME=XXXX
      - MONGO_INITDB_PASSWORD=XXXX
    volumes:
      - production_mongodb_data:/data/db
    ports:
      - 27017:27017
      - 9216:9216 # mongodb-exporter

This means that your Dockerfile has an issue. You might be missing the RUN keyword at the beginning of the command.