Datadog monitoring

Hello,

I am trying to setup datadog monitoring in my Airbyte deployment using the information i found on this thread.

I am currently getting the Docker events, showing that the client setup was successfull, however, i am not getting any logs.

This is how my current yaml file looks like

version: "3.7"
#https://github.com/compose-spec/compose-spec/blob/master/spec.md#using-extensions-as-fragments
x-logging: &default-logging
  options:
    max-size: "100m"
    max-file: "5"
  driver: json-file
services:
  # hook in case we need to add init behavior
  # every root service (no depends_on) should depend on init
  init:
    image: airbyte/init:${VERSION}
    logging: *default-logging
    container_name: init
    command: /bin/sh -c "./scripts/create_mount_directories.sh /local_parent ${HACK_LOCAL_ROOT_PARENT} ${LOCAL_ROOT}"
    environment:
      - LOCAL_ROOT=${LOCAL_ROOT}
      - HACK_LOCAL_ROOT_PARENT=${HACK_LOCAL_ROOT_PARENT}
    volumes:
      - ${HACK_LOCAL_ROOT_PARENT}:/local_parent
  bootloader:
    image: airbyte/bootloader:${VERSION}
    logging: *default-logging
    container_name: airbyte-bootloader
    environment:
      - AIRBYTE_VERSION=${VERSION}
      - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-}
      - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-}
      - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-}
      - DATABASE_PASSWORD=${DATABASE_PASSWORD}
      - DATABASE_URL=${DATABASE_URL}
      - DATABASE_USER=${DATABASE_USER}
      - LOG_LEVEL=${LOG_LEVEL}
      - RUN_DATABASE_MIGRATION_ON_STARTUP=${RUN_DATABASE_MIGRATION_ON_STARTUP}
  db:
    image: airbyte/db:${VERSION}
    logging: *default-logging
    container_name: airbyte-db
    restart: unless-stopped
    environment:
      - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-}
      - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-}
      - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-}
      - DATABASE_PASSWORD=${DATABASE_PASSWORD}
      - DATABASE_URL=${DATABASE_URL}
      - DATABASE_USER=${DATABASE_USER}
      - POSTGRES_PASSWORD=${DATABASE_PASSWORD}
      - POSTGRES_USER=${DATABASE_USER}
    volumes:
      - db:/var/lib/postgresql/data
  worker:
    image: airbyte/worker:${VERSION}
    logging: *default-logging
    container_name: airbyte-worker
    restart: unless-stopped
    environment:
      - AIRBYTE_VERSION=${VERSION}
      - PUBLISH_METRICS=true
      - DD_AGENT_HOST=dd-agent
      - DD_DOGSTATSD_PORT=8125  
      - AUTO_DISABLE_FAILING_CONNECTIONS=${AUTO_DISABLE_FAILING_CONNECTIONS}
      - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-}
      - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-}
      - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-}
      - CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=${CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION:-}
      - CONFIG_ROOT=${CONFIG_ROOT}
      - DATABASE_PASSWORD=${DATABASE_PASSWORD}
      - DATABASE_URL=${DATABASE_URL}
      - DATABASE_USER=${DATABASE_USER}
      - DEPLOYMENT_MODE=${DEPLOYMENT_MODE}
      - JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=${JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION:-}
      - JOB_MAIN_CONTAINER_CPU_LIMIT=${JOB_MAIN_CONTAINER_CPU_LIMIT}
      - JOB_MAIN_CONTAINER_CPU_REQUEST=${JOB_MAIN_CONTAINER_CPU_REQUEST}
      - JOB_MAIN_CONTAINER_MEMORY_LIMIT=${JOB_MAIN_CONTAINER_MEMORY_LIMIT}
      - JOB_MAIN_CONTAINER_MEMORY_REQUEST=${JOB_MAIN_CONTAINER_MEMORY_REQUEST}
      - LOCAL_DOCKER_MOUNT=${LOCAL_DOCKER_MOUNT}
      - LOCAL_ROOT=${LOCAL_ROOT}
      - LOG_LEVEL=${LOG_LEVEL}
      - MAX_CHECK_WORKERS=${MAX_CHECK_WORKERS}
      - MAX_DISCOVER_WORKERS=${MAX_DISCOVER_WORKERS}
      - MAX_SPEC_WORKERS=${MAX_SPEC_WORKERS}
      - MAX_SYNC_WORKERS=${MAX_SYNC_WORKERS}
      - SECRET_PERSISTENCE=${SECRET_PERSISTENCE}
      - SYNC_JOB_MAX_ATTEMPTS=${SYNC_JOB_MAX_ATTEMPTS}
      - SYNC_JOB_MAX_TIMEOUT_DAYS=${SYNC_JOB_MAX_TIMEOUT_DAYS}
      - TEMPORAL_HOST=${TEMPORAL_HOST}
      - TRACKING_STRATEGY=${TRACKING_STRATEGY}
      - WEBAPP_URL=${WEBAPP_URL}
      - WORKER_ENVIRONMENT=${WORKER_ENVIRONMENT}
      - WORKSPACE_DOCKER_MOUNT=${WORKSPACE_DOCKER_MOUNT}
      - WORKSPACE_ROOT=${WORKSPACE_ROOT}
      - METRIC_CLIENT=datadog
      - OTEL_COLLECTOR_ENDPOINT=${OTEL_COLLECTOR_ENDPOINT}
      - JOB_ERROR_REPORTING_STRATEGY=${JOB_ERROR_REPORTING_STRATEGY}
      - JOB_ERROR_REPORTING_SENTRY_DSN=${JOB_ERROR_REPORTING_SENTRY_DSN}
      - ACTIVITY_MAX_ATTEMPT=${ACTIVITY_MAX_ATTEMPT}
      - ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS=${ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS}
      - ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS=${ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS}
      - WORKFLOW_FAILURE_RESTART_DELAY_SECONDS=${WORKFLOW_FAILURE_RESTART_DELAY_SECONDS}
    volumes:
      - /var/run/docker.sock:/var/run/docker.sock
      - workspace:${WORKSPACE_ROOT}
      - ${LOCAL_ROOT}:${LOCAL_ROOT}
  server:
    image: airbyte/server:${VERSION}
    logging: *default-logging
    container_name: airbyte-server
    restart: unless-stopped
    environment:
      - AIRBYTE_ROLE=${AIRBYTE_ROLE:-}
      - AIRBYTE_VERSION=${VERSION}
      - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-}
      - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-}
      - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-}
      - CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=${CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION:-}
      - CONFIG_ROOT=${CONFIG_ROOT}
      - DATABASE_PASSWORD=${DATABASE_PASSWORD}
      - DATABASE_URL=${DATABASE_URL}
      - DATABASE_USER=${DATABASE_USER}
      - JOB_MAIN_CONTAINER_CPU_LIMIT=${JOB_MAIN_CONTAINER_CPU_LIMIT}
      - JOB_MAIN_CONTAINER_CPU_REQUEST=${JOB_MAIN_CONTAINER_CPU_REQUEST}
      - JOB_MAIN_CONTAINER_MEMORY_LIMIT=${JOB_MAIN_CONTAINER_MEMORY_LIMIT}
      - JOB_MAIN_CONTAINER_MEMORY_REQUEST=${JOB_MAIN_CONTAINER_MEMORY_REQUEST}
      - JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=${JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION:-}
      - LOG_LEVEL=${LOG_LEVEL}
      - NEW_SCHEDULER=${NEW_SCHEDULER}
      - SECRET_PERSISTENCE=${SECRET_PERSISTENCE}
      - TEMPORAL_HOST=${TEMPORAL_HOST}
      - TRACKING_STRATEGY=${TRACKING_STRATEGY}
      - WEBAPP_URL=${WEBAPP_URL}
      - WORKER_ENVIRONMENT=${WORKER_ENVIRONMENT}
      - WORKSPACE_ROOT=${WORKSPACE_ROOT}
    ports:
      - 8001:8001
    volumes:
      - workspace:${WORKSPACE_ROOT}
      - data:${CONFIG_ROOT}
      - ${LOCAL_ROOT}:${LOCAL_ROOT}
  webapp:
    image: airbyte/webapp:${VERSION}
    logging: *default-logging
    container_name: airbyte-webapp
    restart: unless-stopped
    ports:
      - 8000:80
    environment:
      - AIRBYTE_ROLE=${AIRBYTE_ROLE:-}
      - AIRBYTE_VERSION=${VERSION}
      - API_URL=${API_URL:-}
      - FULLSTORY=${FULLSTORY:-}
      - INTERNAL_API_HOST=${INTERNAL_API_HOST}
      - IS_DEMO=${IS_DEMO:-}
      - OPENREPLAY=${OPENREPLAY:-}
      - PAPERCUPS_STORYTIME=${PAPERCUPS_STORYTIME:-}
      - TRACKING_STRATEGY=${TRACKING_STRATEGY}
  airbyte-temporal:
    image: airbyte/temporal:${VERSION}
    logging: *default-logging
    container_name: airbyte-temporal
    restart: unless-stopped
    ports:
      - 7233:7233
    environment:
      - DB=postgresql
      - DB_PORT=${DATABASE_PORT}
      - DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development.yaml
      - LOG_LEVEL=${LOG_LEVEL}
      - POSTGRES_PWD=${DATABASE_PASSWORD}
      - POSTGRES_SEEDS=${DATABASE_HOST}
      - POSTGRES_USER=${DATABASE_USER}
    volumes:
      - ./temporal/dynamicconfig:/etc/temporal/config/dynamicconfig
      
  datadog:
    image: gcr.io/datadoghq/agent:7
    container_name: dd-agent
    environment:
      - DD_API_KEY=${DD_API_KEY}
      - DD_DOGSTATSD_NON_LOCAL_TRAFFIC=true
      - DD_LOGS_ENABLED=true
      - DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL=true
      - DD_SITE=datadoghq.com
    volumes:
      - /var/run/docker.sock:/var/run/docker.sock
      - /proc/:/host/proc/:ro
      - /sys/fs/cgroup:/host/sys/fs/cgroup:ro
      - /var/lib/docker/containers:/var/lib/docker/containers:ro
  airbyte-metrics:
    image: airbyte/metrics-reporter:${VERSION}
    container_name: airbyte-metrics
    environment:
      - PUBLISH_METRICS=true
      - DD_AGENT_HOST=dd-agent
      - DD_DOGSTATSD_PORT=8125
      - DATABASE_USER=${DATABASE_USER}
      - DATABASE_URL=${DATABASE_URL}
      - DATABASE_PASSWORD=${DATABASE_PASSWORD}
volumes:
  workspace:
    name: ${WORKSPACE_DOCKER_MOUNT}
  # the data volume is only needed for backward compatibility; when users upgrade
  # from an old Airbyte version that relies on file-based configs, the server needs
  # to read this volume to copy their configs to the database
  data:
    name: ${DATA_DOCKER_MOUNT}
  db:
    name: ${DB_DOCKER_MOUNT}

What might I be missing?

Hello, @gguerra. Have you tried debugging the logging agent via docker exec -it <CONTAINER_NAME> agent status ?

The debugger shows no error.

I have tried sending a test request using * openssl s_client -connect intake.logs.datadoghq.com:10516 and it worked.

Here are the logs from the docker command

===============
Agent (v7.37.1)
===============

  Status date: 2022-07-21 20:47:55.348 UTC (1658436475348)
  Agent start: 2022-07-21 20:42:52.557 UTC (1658436172557)
  Pid: 374
  Go Version: go1.17.11
  Python Version: 3.8.11
  Build arch: amd64
  Agent flavor: agent
  Check Runners: 4
  Log Level: info

  Paths
  =====
    Config File: /etc/datadog-agent/datadog.yaml
    conf.d: /etc/datadog-agent/conf.d
    checks.d: /etc/datadog-agent/checks.d

  Clocks
  ======
    NTP offset: -30.722ms
    System time: 2022-07-21 20:47:55.348 UTC (1658436475348)

  Host Info
  =========
    bootTime: 2022-07-21 10:40:03 UTC (1658400003000)
    hostId: 3d7f5344-99fd-43a3-942f-9cf676d7f4f4
    kernelArch: x86_64
    kernelVersion: 5.10.16.3-microsoft-standard-WSL2
    os: linux
    platform: ubuntu
    platformFamily: debian
    platformVersion: 21.10
    procs: 12
    uptime: 10h2m54s

  Hostnames
  =========
    hostname: docker-desktop
    socket-fqdn: 61ed235b1356
    socket-hostname: 61ed235b1356
    hostname provider: container
    unused hostname providers:
      aws: not retrieving hostname from AWS: the host is not an ECS instance and other providers already retrieve non-default hostnames
      azure: azure_hostname_style is set to 'os'
      configuration/environment: hostname is empty
      gce: unable to retrieve hostname from GCE: GCE metadata API error: Get "http://169.254.169.254/computeMetadata/v1/instance/hostname": dial tcp 169.254.169.254:80: connect: connection refused

  Metadata
  ========
    agent_version: 7.37.1
    config_apm_dd_url:
    config_dd_url:
    config_logs_dd_url:
    config_logs_socks5_proxy_address:
    config_no_proxy: []
    config_process_dd_url:
    config_proxy_http:
    config_proxy_https:
    config_site:
    feature_apm_enabled: true
    feature_cspm_enabled: false
    feature_cws_enabled: false
    feature_logs_enabled: true
    feature_networks_enabled: false
    feature_networks_http_enabled: false
    feature_networks_https_enabled: false
    feature_otlp_enabled: false
    feature_process_enabled: false
    feature_processes_container_enabled: true
    flavor: agent
    hostname_source: container
    install_method_installer_version: docker
    install_method_tool: docker
    install_method_tool_version: docker
    logs_transport: HTTP

=========
Collector
=========

  Running Checks
  ==============

    container
    ---------
      Instance ID: container [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/container.d/conf.yaml.default
      Total Runs: 20
      Metric Samples: Last Run: 132, Total: 2,660
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 11.288s
      Last Execution Date : 2022-07-21 20:47:53 UTC (1658436473000)
      Last Successful Execution Date : 2022-07-21 20:47:53 UTC (1658436473000)


    cpu
    ---
      Instance ID: cpu [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/cpu.d/conf.yaml.default
      Total Runs: 20
      Metric Samples: Last Run: 9, Total: 173
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 0s
      Last Execution Date : 2022-07-21 20:47:49 UTC (1658436469000)
      Last Successful Execution Date : 2022-07-21 20:47:49 UTC (1658436469000)


    disk (4.7.0)
    ------------
      Instance ID: disk:e5dffb8bef24336f [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/disk.d/conf.yaml.default
      Total Runs: 19
      Metric Samples: Last Run: 252, Total: 4,788
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 31ms
      Last Execution Date : 2022-07-21 20:47:41 UTC (1658436461000)
      Last Successful Execution Date : 2022-07-21 20:47:41 UTC (1658436461000)


    docker
    ------
      Instance ID: docker [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/docker.d/conf.yaml.default
      Total Runs: 13
      Metric Samples: Last Run: 25, Total: 325
      Events: Last Run: 1, Total: 10
      Service Checks: Last Run: 1, Total: 13
      Average Execution Time : 15.016s
      Last Execution Date : 2022-07-21 20:47:35 UTC (1658436455000)
      Last Successful Execution Date : 2022-07-21 20:47:35 UTC (1658436455000)


    file_handle
    -----------
      Instance ID: file_handle [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/file_handle.d/conf.yaml.default
      Total Runs: 19
      Metric Samples: Last Run: 5, Total: 95
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 0s
      Last Execution Date : 2022-07-21 20:47:40 UTC (1658436460000)
      Last Successful Execution Date : 2022-07-21 20:47:40 UTC (1658436460000)


    io
    --
      Instance ID: io [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/io.d/conf.yaml.default
      Total Runs: 20
      Metric Samples: Last Run: 93, Total: 1,797
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 0s
      Last Execution Date : 2022-07-21 20:47:47 UTC (1658436467000)
      Last Successful Execution Date : 2022-07-21 20:47:47 UTC (1658436467000)


    load
    ----
      Instance ID: load [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/load.d/conf.yaml.default
      Total Runs: 20
      Metric Samples: Last Run: 6, Total: 120
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 0s
      Last Execution Date : 2022-07-21 20:47:54 UTC (1658436474000)
      Last Successful Execution Date : 2022-07-21 20:47:54 UTC (1658436474000)


    memory
    ------
      Instance ID: memory [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/memory.d/conf.yaml.default
      Total Runs: 20
      Metric Samples: Last Run: 20, Total: 400
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 0s
      Last Execution Date : 2022-07-21 20:47:46 UTC (1658436466000)
      Last Successful Execution Date : 2022-07-21 20:47:46 UTC (1658436466000)


    network (2.7.0)
    ---------------
      Instance ID: network:d884b5186b651429 [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/network.d/conf.yaml.default
      Total Runs: 20
      Metric Samples: Last Run: 94, Total: 1,880
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 4ms
      Last Execution Date : 2022-07-21 20:47:53 UTC (1658436473000)
      Last Successful Execution Date : 2022-07-21 20:47:53 UTC (1658436473000)


    ntp
    ---
      Instance ID: ntp:d884b5186b651429 [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/ntp.d/conf.yaml.default
      Total Runs: 1
      Metric Samples: Last Run: 1, Total: 1
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 1, Total: 1
      Average Execution Time : 1.55s
      Last Execution Date : 2022-07-21 20:42:59 UTC (1658436179000)
      Last Successful Execution Date : 2022-07-21 20:42:59 UTC (1658436179000)


    uptime
    ------
      Instance ID: uptime [OK]
      Configuration Source: file:/etc/datadog-agent/conf.d/uptime.d/conf.yaml.default
      Total Runs: 20
      Metric Samples: Last Run: 1, Total: 20
      Events: Last Run: 0, Total: 0
      Service Checks: Last Run: 0, Total: 0
      Average Execution Time : 0s
      Last Execution Date : 2022-07-21 20:47:45 UTC (1658436465000)
      Last Successful Execution Date : 2022-07-21 20:47:45 UTC (1658436465000)

========
JMXFetch
========

  Information
  ==================
  Initialized checks
  ==================
    no checks

  Failed checks
  =============
    no checks

=========
Forwarder
=========

  Transactions
  ============
    Cluster: 0
    ClusterRole: 0
    ClusterRoleBinding: 0
    CronJob: 0
    DaemonSet: 0
    Deployment: 0
    Dropped: 0
    HighPriorityQueueFull: 0
    Ingress: 0
    Job: 0
    Node: 0
    PersistentVolume: 0
    PersistentVolumeClaim: 0
    Pod: 0
    ReplicaSet: 0
    Requeued: 0
    Retried: 0
    RetryQueueSize: 0
    Role: 0
    RoleBinding: 0
    Service: 0
    ServiceAccount: 0
    StatefulSet: 0

  Transaction Successes
  =====================
    Total number: 45
    Successes By Endpoint:
      check_run_v1: 20
      intake: 4
      metadata_v1: 1
      series_v1: 20

  On-disk storage
  ===============
    On-disk storage is disabled. Configure `forwarder_storage_max_size_in_bytes` to enable it.

  API Keys status
  ===============
    API key ending with a6564: API Key valid

==========
Endpoints
==========
  https://app.datadoghq.com - API Key ending with:
      - a6564

==========
Logs Agent
==========

    Reliable: Sending compressed logs in HTTPS to agent-http-intake.logs.datadoghq.com on port 443
    BytesSent: 0
    EncodedBytesSent: 0
    LogsProcessed: 0
    LogsSent: 0

  container_collect_all
  ---------------------
    - Type: file
      Identifier: 0ae6b0cf858a3d785d4ae27eae878191b338d4ced124444a95df421accafc98d
      Path: /var/lib/docker/containers/0ae6b0cf858a3d785d4ae27eae878191b338d4ced124444a95df421accafc98d/0ae6b0cf858a3d785d4ae27eae878191b338d4ced124444a95df421accafc98d-json.log
      Status: Error: cannot read file /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: stat /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: no such file or directory
      BytesRead: 0
      Average Latency (ms): 0
      24h Average Latency (ms): 0
      Peak Latency (ms): 0
      24h Peak Latency (ms): 0
    - Type: file
      Identifier: 17acd3af5e0d341d86f4aecb7468eb6c8e838deed11e8cfb78ab9e7e693473d0
      Path: /var/lib/docker/containers/17acd3af5e0d341d86f4aecb7468eb6c8e838deed11e8cfb78ab9e7e693473d0/17acd3af5e0d341d86f4aecb7468eb6c8e838deed11e8cfb78ab9e7e693473d0-json.log
      Status: Error: cannot read file /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: stat /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: no such file or directory
      BytesRead: 0
      Average Latency (ms): 0
      24h Average Latency (ms): 0
      Peak Latency (ms): 0
      24h Peak Latency (ms): 0
    - Type: file
      Identifier: 2a4c20a24e88cf1f1df95a1b0d2405e990f868577ba0e884df77de8fba3c9fd9
      Path: /var/lib/docker/containers/2a4c20a24e88cf1f1df95a1b0d2405e990f868577ba0e884df77de8fba3c9fd9/2a4c20a24e88cf1f1df95a1b0d2405e990f868577ba0e884df77de8fba3c9fd9-json.log
      Status: Error: cannot read file /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: stat /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: no such file or directory
      BytesRead: 0
      Average Latency (ms): 0
      24h Average Latency (ms): 0
      Peak Latency (ms): 0
      24h Peak Latency (ms): 0
    - Type: file
      Identifier: 61ed235b13566e2e53e93b1b85c2f6a1f076fb06c2832406b572e977b3f0dac2
      Path: /var/lib/docker/containers/61ed235b13566e2e53e93b1b85c2f6a1f076fb06c2832406b572e977b3f0dac2/61ed235b13566e2e53e93b1b85c2f6a1f076fb06c2832406b572e977b3f0dac2-json.log
      Status: Error: cannot read file /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: stat /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: no such file or directory
      BytesRead: 0
      Average Latency (ms): 0
      24h Average Latency (ms): 0
      Peak Latency (ms): 0
      24h Peak Latency (ms): 0
    - Type: file
      Identifier: c1a9af957ed8664fa4f755409a35a5af228cc900026a8d52920673d1299a3481
      Path: /var/lib/docker/containers/c1a9af957ed8664fa4f755409a35a5af228cc900026a8d52920673d1299a3481/c1a9af957ed8664fa4f755409a35a5af228cc900026a8d52920673d1299a3481-json.log
      Status: Error: cannot read file /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: stat /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: no such file or directory
      BytesRead: 0
      Average Latency (ms): 0
      24h Average Latency (ms): 0
      Peak Latency (ms): 0
      24h Peak Latency (ms): 0
    - Type: file
      Identifier: 177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def
      Path: /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log
      Status: Error: cannot read file /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: stat /var/lib/docker/containers/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def/177316b520118d69f8bf143e855509aa1cf6693a76bfb4fafe5303fab5ce6def-json.log: no such file or directory
      BytesRead: 0
      Average Latency (ms): 0
      24h Average Latency (ms): 0
      Peak Latency (ms): 0
      24h Peak Latency (ms): 0

=============
Process Agent
=============

  Version: 7.37.1
  Status date: 2022-07-21 20:47:55.35 UTC (1658436475350)
  Process Agent Start: 2022-07-21 20:42:52.735 UTC (1658436172735)
  Pid: 379
  Go Version: go1.17.11
  Build arch: amd64
  Log Level: info
  Enabled Checks: [container rtcontainer process_discovery]
  Allocated Memory: 13,754,048 bytes
  Hostname: docker-desktop

  =================
  Process Endpoints
  =================
    https://process.datadoghq.com - API Key ending with:
        - a6564

  =========
  Collector
  =========
    Last collection time: 2022-07-21 20:47:45
    Docker socket: /var/run/docker.sock
    Number of processes: 0
    Number of containers: 6
    Process Queue length: 0
    RTProcess Queue length: 0
    Pod Queue length: 0
    Process Bytes enqueued: 0
    RTProcess Bytes enqueued: 0
    Pod Bytes enqueued: 0
    Drop Check Payloads: []
=========
APM Agent
=========
  Status: Running
  Pid: 381
  Uptime: 302 seconds
  Mem alloc: 9,483,640 bytes
  Hostname: docker-desktop
  Receiver: 0.0.0.0:8126
  Endpoints:
    https://trace.agent.datadoghq.com

  Receiver (previous minute)
  ==========================
    No traces received in the previous minute.


  Writer (previous minute)
  ========================
    Traces: 0 payloads, 0 traces, 0 events, 0 bytes
    Stats: 0 payloads, 0 stats buckets, 0 bytes

=========
Aggregator
=========
  Checks Metric Sample: 14,114
  Dogstatsd Metric Sample: 3,756
  Event: 11
  Events Flushed: 11
  Number Of Flushes: 20
  Series Flushed: 13,509
  Service Check: 206
  Service Checks Flushed: 223
=========
DogStatsD
=========
  Event Packets: 0
  Event Parse Errors: 0
  Metric Packets: 3,755
  Metric Parse Errors: 0
  Service Check Packets: 0
  Service Check Parse Errors: 0
  Udp Bytes: 562,208
  Udp Packet Reading Errors: 0
  Udp Packets: 1,936
  Uds Bytes: 0
  Uds Origin Detection Errors: 0
  Uds Packet Reading Errors: 0
  Uds Packets: 0
  Unterminated Metric Errors: 0

=============
Autodiscovery
=============
  Enabled Features
  ================
    docker

====
OTLP
====

  Status: Not enabled
  Collector status: Not running

Thanks for giving me the logs from that command, I think I see the issue:

As per Datadog’s docs, the errors the Logs Agent is giving you are because the Agent is unable to find a log file for a given container. To resolve this issue, check that the folder containing Docker container logs is correctly exposed to the Datadog Agent container.

Here’s the link to the docs, look under “Docker log collection from file issues”:
https://docs.datadoghq.com/logs/guide/docker-logs-collection-troubleshooting-guide/

Let me know if this helps!