Post

Laravel Docker Kafka setup

docker Running a laravel app in a docker environment and publish messages to a kafka topic

Step 1: Creating the Laravel project

Create a new Laravel projects by running the following command

1
composer create-project laravel/laravel <project_name>

Step 2: Setting up docker compose

Once the project is created, copy the contents of .env to a new file app.env. Create a docker-compose.yml file and add the following contents.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
version: "3"  
  
services:  
	zookeeper:  
		image: 'bitnami/zookeeper:latest'  
		restart: always  
		volumes:  
			- zookeeper_data:/bitnami  
		networks:  
			- overlay  
		environment:  
			- ALLOW_ANONYMOUS_LOGIN=yes  
  
	kafka:  
		image: 'bitnami/kafka:latest'  
		networks:  
			- overlay  
		volumes:  
			- kafka_data:/bitnami  
		ports:  
			- '9092:9092'  
		environment:  
			- ALLOW_PLAINTEXT_LISTENER=yes  
			# Only for test environment  
			- KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true  
			- KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181  
		depends_on:  
			- zookeeper  
	nginx:  
		image: nginx:stable-alpine  
		container_name: nginx  
		ports:  
			- "8000:80"  
		volumes:  
			- .:/var/www/html  
			- ./nginx/default.conf:/etc/nginx/conf.d/default.conf  
		  
		depends_on:  
			- php  
			- mysql  
		  
		networks:  
			- overlay  
	  
	mysql:  
		image: mysql:5.7.41  
		container_name: mysql  
		restart: unless-stopped  
		tty: true  
		ports:  
			- "4306:3306"  
		volumes:  
			- ./mysql:/var/lib/mysql  
		environment:  
			MYSQL_DATABASE: leads  
			MYSQL_ROOT_PASSWORD: password  
			SERVICE_TAGS: dev  
			SERVICE_NAME: mysql  
		  
		networks:  
			- overlay  
	  
	php:  
		build:  
			context: .  
			dockerfile: Dockerfile  
		container_name: php  
		env_file:  
			- app.env  
		volumes:  
			- .:/var/www/html  
		ports:  
			- "9000:9000"  
		networks:  
			- overlay  
		depends_on:  
			- mysql  
	  
	composer:  
		image: composer:latest  
		container_name: composer  
		volumes:  
			- .:/var/www/html  
		working_dir: /var/www/html  
  
  
networks:  
	overlay:  
  
volumes:  
	zookeeper_data:  
	kafka_data:

The docker-compose.yml file lists all the services required for the stack.

  • zookeeper - used by kafka
  • kafka - to publish message on
  • nginx - to serve the app over http
  • mysql - for the database
  • php - to run the code
  • composer - to give access to composer commands

The php service uses a custom image that we have to build so that we can install extra deps. This is done in the Dockerfile. The contents of the docker file are as follows:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
FROM php:8.1-fpm-alpine AS kafka-app  
  
# Get push tag  
ARG COMMIT_TAG=prerelease  
ENV APP_COMMIT_TAG=${COMMIT_TAG}  
  
  
WORKDIR /var/www/html  
  
COPY . .  
  
#librdkafka for kafka-php  
RUN apk add --no-cache zip libzip-dev librdkafka-dev git unzip wget  
   
RUN apk add --no-cache ${PHPIZE_DEPS}  
  
RUN apk add --no-cache mysql-client msmtp perl wget procps shadow libzip libpng libjpeg-turbo libwebp freetype icu icu-data-full  
  
RUN pecl install rdkafka \  
&& docker-php-ext-enable rdkafka  
  
RUN apk add --no-cache --virtual build-essentials \  
icu-dev icu-libs zlib-dev g++ make automake autoconf libzip-dev \  
libpng-dev libwebp-dev libjpeg-turbo-dev freetype-dev && \  
docker-php-ext-configure gd --enable-gd --with-freetype --with-jpeg --with-webp && \  
docker-php-ext-install gd && \  
docker-php-ext-install mysqli && \  
docker-php-ext-install pdo_mysql && \  
docker-php-ext-install intl && \  
docker-php-ext-install opcache && \  
docker-php-ext-install exif && \  
docker-php-ext-install zip && \  
apk del build-essentials && rm -rf /usr/src/php*  
  
RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer  
  
ENV COMPOSER_ALLOW_SUPERUSER=1  
  
RUN composer install --no-dev --no-interaction --optimize-autoloader  
  
RUN composer dump-autoload --no-scripts --no-dev --optimize  
  
HEALTHCHECK --interval=5s --timeout=10s --start-period=10s \  
CMD ["bash", "-c", "curl http://127.0.0.1:8000/ || exit 1"]

The Dockerfile shows steps for building a docker image with the required deps. The line

1
RUN apk add --no-cache zip libzip-dev librdkafka-dev git unzip wget 

adds librdkafka-dev which is required to install the kafka php plugin

line

1
RUN apk add --no-cache ${PHPIZE_DEPS}

adds phpize deps(phpize and pecl)

line

1
2
RUN pecl install rdkafka \  
&& docker-php-ext-enable rdkafka 

installs rdkafka php plugin and enables the extension

The rest of the commands install other required deps to build the php image.

Run docker using the following command to spin up all containers

1
docker compose up --build -d

Once the containers are up and running you can create a kafka topic like:

1
/opt/bitnami/kafka/bin/kafka-topics.sh --create --topic test-topic --bootstrap-server localhost:9092 --replication-factor 1 --partitions 4

To list topics you can run the command:

1
/opt/bitnami/kafka/bin/kafka-topics.sh --list --bootstrap-server localhost:9092

Create a consumer using:

1
/opt/bitnami/kafka/bin/kafka-console-consumer.sh --topic test-topic --from-beginning --bootstrap-server localhost:9092

and a producer with:

1
/opt/bitnami/kafka/bin/kafka-console-producer.sh --topic test-topic --bootstrap-server localhost:9092

In the php code you can produce a message and write to kafka using the code below

Step 3: Writing to a topic with php

1
2
3
4
5
6
7
8
use Junges\Kafka\Facades\Kafka; 
use Junges\Kafka\Message\Message; 
$message = new Message( 
	headers: ['header-key' => 'header-value'], 
	body: ['key' => 'value'], 
	key: 'kafka key here' 
);
Kafka::publishOn('topic')->withMessage($message);

This assumes you have installed the Laravel-Kafka package documented here

This post is licensed under CC BY 4.0 by the author.