<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:content="http://purl.org/rss/1.0/modules/content/">
  <channel>
    <title>Topic on Micha Kops&#39; Tech Notes</title>
    <link>https://www.hascode.com/tags/topic/</link>
    <description>Recent content in Topic on Micha Kops&#39; Tech Notes</description>
    <generator>Hugo -- 0.147.8</generator>
    <language>en</language>
    <copyright>Copyright © 2010 - 2025 Micha Kops. #213243b1d6e8932079e09227d3f3ed0c806cd0c9</copyright>
    <lastBuildDate>Thu, 09 Jun 2022 00:00:00 +0200</lastBuildDate>
    <atom:link href="https://www.hascode.com/tags/topic/index.xml" rel="self" type="application/rss+xml" />
    <item>
      <title>Spring Boot Kafka Increase Message Size Limit</title>
      <link>https://www.hascode.com/spring-boot-kafka-increase-message-size-limit/</link>
      <pubDate>Thu, 09 Jun 2022 00:00:00 +0200</pubDate>
      <guid>https://www.hascode.com/spring-boot-kafka-increase-message-size-limit/</guid>
      <description>&lt;div id=&#34;preamble&#34;&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;Let’s say we would like to increase the limit t 10MB …​&lt;/p&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;sect1&#34;&gt;
&lt;h2 id=&#34;_broker_configuration&#34;&gt;Broker Configuration&lt;/h2&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;Apply the new limit either by modifying the &lt;code&gt;server.properties&lt;/code&gt; like this…​&lt;/p&gt;
&lt;/div&gt;
&lt;div class=&#34;listingblock&#34;&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;pre class=&#34;highlight&#34;&gt;&lt;code class=&#34;language-properties&#34; data-lang=&#34;properties&#34;&gt;max.message.bytes=10485760&lt;/code&gt;&lt;/pre&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;olist lowerroman&#34;&gt;
&lt;ol class=&#34;lowerroman&#34; type=&#34;i&#34;&gt;
&lt;li&gt;
&lt;p&gt;or apply it to a specific topic using&lt;/p&gt;
&lt;/li&gt;
&lt;/ol&gt;
&lt;/div&gt;
&lt;div class=&#34;listingblock&#34;&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;pre class=&#34;highlight&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;kafka-configs.sh --bootstrap-server localhost:9092 \
    --entity-type topics  \
    --entity-name thetopic \
    --alter \
    --add-config max.message.bytes=10485760&lt;/code&gt;&lt;/pre&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;sect1&#34;&gt;
&lt;h2 id=&#34;_producer_configuration_for_spring_boot&#34;&gt;Producer Configuration for Spring Boot&lt;/h2&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;We simply need to add the following line to our &lt;code&gt;application.properties&lt;/code&gt;:&lt;/p&gt;
&lt;/div&gt;
&lt;div class=&#34;listingblock&#34;&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;pre class=&#34;highlight&#34;&gt;&lt;code class=&#34;language-properties&#34; data-lang=&#34;properties&#34;&gt;spring.kafka.producer.properties.max.request.size=spring.kafka.producer.properties.max.request.size&lt;/code&gt;&lt;/pre&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;The following proof-of-concept demonstrates that without the property, sending a large message fails, with the property it succeeds:&lt;/p&gt;
&lt;/div&gt;</description>
    </item>
    <item>
      <title>Kafka Java Quickstart with Docker</title>
      <link>https://www.hascode.com/kafka-java-quickstart-with-docker/</link>
      <pubDate>Sat, 29 Jan 2022 00:00:00 +0100</pubDate>
      <guid>https://www.hascode.com/kafka-java-quickstart-with-docker/</guid>
      <description>&lt;div id=&#34;preamble&#34;&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;sidebarblock&#34;&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;div class=&#34;title&#34;&gt;Goals&lt;/div&gt;
&lt;div class=&#34;olist arabic&#34;&gt;
&lt;ol class=&#34;arabic&#34;&gt;
&lt;li&gt;
&lt;p&gt;Setup Kafka and Zookeeper with Docker and docker-compose&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;Create a message consumer and producer in Java&lt;/p&gt;
&lt;/li&gt;
&lt;/ol&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;sect1&#34;&gt;
&lt;h2 id=&#34;_kafka_setup&#34;&gt;Kafka Setup&lt;/h2&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;We’re using &lt;code&gt;docker-compose&lt;/code&gt; to set up our message broker, zookeper and other stuff using &lt;code&gt;confluent-platform&lt;/code&gt;.&lt;/p&gt;
&lt;/div&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;This is our &lt;code&gt;docker-compose.yaml&lt;/code&gt; config file from Confluent’s following
&lt;a href=&#34;https://github.com/confluentinc/cp-all-in-one/blob/7.0.1-post/cp-all-in-one-community/docker-compose.yml&#34;&gt;GitHub repository&lt;/a&gt;.&lt;/p&gt;
&lt;/div&gt;
&lt;div class=&#34;listingblock&#34;&gt;
&lt;div class=&#34;title&#34;&gt;docker-compose.yaml&lt;/div&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;pre class=&#34;highlight&#34;&gt;&lt;code class=&#34;language-yaml&#34; data-lang=&#34;yaml&#34;&gt;---
version: &amp;#39;2&amp;#39;
services:
  zookeeper:
    image: confluentinc/cp-zookeeper:7.0.1
    hostname: zookeeper
    container_name: zookeeper
    ports:
      - &amp;#34;2181:2181&amp;#34;
    environment:
      ZOOKEEPER_CLIENT_PORT: 2181
      ZOOKEEPER_TICK_TIME: 2000

  broker:
    image: confluentinc/cp-kafka:7.0.1
    hostname: broker
    container_name: broker
    depends_on:
      - zookeeper
    ports:
      - &amp;#34;29092:29092&amp;#34;
      - &amp;#34;9092:9092&amp;#34;
      - &amp;#34;9101:9101&amp;#34;
    environment:
      KAFKA_BROKER_ID: 1
      KAFKA_ZOOKEEPER_CONNECT: &amp;#39;zookeeper:2181&amp;#39;
      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
      KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
      KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
      KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
      KAFKA_JMX_PORT: 9101
      KAFKA_JMX_HOSTNAME: localhost

  schema-registry:
    image: confluentinc/cp-schema-registry:7.0.1
    hostname: schema-registry
    container_name: schema-registry
    depends_on:
      - broker
    ports:
      - &amp;#34;8081:8081&amp;#34;
    environment:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: &amp;#39;broker:29092&amp;#39;
      SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081

  connect:
    image: cnfldemos/kafka-connect-datagen:0.5.0-6.2.0
    hostname: connect
    container_name: connect
    depends_on:
      - broker
      - schema-registry
    ports:
      - &amp;#34;8083:8083&amp;#34;
    environment:
      CONNECT_BOOTSTRAP_SERVERS: &amp;#39;broker:29092&amp;#39;
      CONNECT_REST_ADVERTISED_HOST_NAME: connect
      CONNECT_GROUP_ID: compose-connect-group
      CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
      CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
      CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
      CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
      CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
      CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
      CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
      CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
      CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
      CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
      CONNECT_PLUGIN_PATH: &amp;#34;/usr/share/java,/usr/share/confluent-hub-components&amp;#34;
      CONNECT_LOG4J_LOGGERS: org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR

  ksqldb-server:
    image: confluentinc/cp-ksqldb-server:7.0.1
    hostname: ksqldb-server
    container_name: ksqldb-server
    depends_on:
      - broker
      - connect
    ports:
      - &amp;#34;8088:8088&amp;#34;
    environment:
      KSQL_CONFIG_DIR: &amp;#34;/etc/ksql&amp;#34;
      KSQL_BOOTSTRAP_SERVERS: &amp;#34;broker:29092&amp;#34;
      KSQL_HOST_NAME: ksqldb-server
      KSQL_LISTENERS: &amp;#34;http://0.0.0.0:8088&amp;#34;
      KSQL_CACHE_MAX_BYTES_BUFFERING: 0
      KSQL_KSQL_SCHEMA_REGISTRY_URL: &amp;#34;http://schema-registry:8081&amp;#34;
      KSQL_PRODUCER_INTERCEPTOR_CLASSES: &amp;#34;io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor&amp;#34;
      KSQL_CONSUMER_INTERCEPTOR_CLASSES: &amp;#34;io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor&amp;#34;
      KSQL_KSQL_CONNECT_URL: &amp;#34;http://connect:8083&amp;#34;
      KSQL_KSQL_LOGGING_PROCESSING_TOPIC_REPLICATION_FACTOR: 1
      KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: &amp;#39;true&amp;#39;
      KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: &amp;#39;true&amp;#39;

  ksqldb-cli:
    image: confluentinc/cp-ksqldb-cli:7.0.1
    container_name: ksqldb-cli
    depends_on:
      - broker
      - connect
      - ksqldb-server
    entrypoint: /bin/sh
    tty: true

  ksql-datagen:
    image: confluentinc/ksqldb-examples:7.0.1
    hostname: ksql-datagen
    container_name: ksql-datagen
    depends_on:
      - ksqldb-server
      - broker
      - schema-registry
      - connect
    command: &amp;#34;bash -c &amp;#39;echo Waiting for Kafka to be ready... &amp;amp;&amp;amp; \
                       cub kafka-ready -b broker:29092 1 40 &amp;amp;&amp;amp; \
                       echo Waiting for Confluent Schema Registry to be ready... &amp;amp;&amp;amp; \
                       cub sr-ready schema-registry 8081 40 &amp;amp;&amp;amp; \
                       echo Waiting a few seconds for topic creation to finish... &amp;amp;&amp;amp; \
                       sleep 11 &amp;amp;&amp;amp; \
                       tail -f /dev/null&amp;#39;&amp;#34;
    environment:
      KSQL_CONFIG_DIR: &amp;#34;/etc/ksql&amp;#34;
      STREAMS_BOOTSTRAP_SERVERS: broker:29092
      STREAMS_SCHEMA_REGISTRY_HOST: schema-registry
      STREAMS_SCHEMA_REGISTRY_PORT: 8081

  rest-proxy:
    image: confluentinc/cp-kafka-rest:7.0.1
    depends_on:
      - broker
      - schema-registry
    ports:
      - 8082:8082
    hostname: rest-proxy
    container_name: rest-proxy
    environment:
      KAFKA_REST_HOST_NAME: rest-proxy
      KAFKA_REST_BOOTSTRAP_SERVERS: &amp;#39;broker:29092&amp;#39;
      KAFKA_REST_LISTENERS: &amp;#34;http://0.0.0.0:8082&amp;#34;
      KAFKA_REST_SCHEMA_REGISTRY_URL: &amp;#39;http://schema-registry:8081&amp;#39;&lt;/code&gt;&lt;/pre&gt;
&lt;/div&gt;</description>
    </item>
    <item>
      <title>Playing around with MQTT and Java with Moquette and Eclipse Paho</title>
      <link>https://www.hascode.com/playing-around-with-mqtt-and-java-with-moquette-and-eclipse-paho/</link>
      <pubDate>Wed, 01 Jun 2016 00:00:00 +0200</pubDate>
      <guid>https://www.hascode.com/playing-around-with-mqtt-and-java-with-moquette-and-eclipse-paho/</guid>
      <description>&lt;div id=&#34;preamble&#34;&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;The MQ Telemetry Transport Protocol (MQTT) is a lightweight publish/subscribe messaging protocol developed in 1999 that experiences a growing popularity due to trends like the Internet-of-Things and the need to exchange information between low powered devices with aspects as CPU and bandwidth usage in mind.&lt;/p&gt;
&lt;/div&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;In the following tutorial I’d like to demonstrate how to set-up a broker for this protocol with the help of the Moquette library and how to create a client and publish messages for a specific topic using this broker and Eclipse Paho as client library.&lt;/p&gt;
&lt;/div&gt;</description>
    </item>
    <item>
      <title>AMQP and RabbitMQ Snippets</title>
      <link>https://www.hascode.com/amqp-and-rabbitmq-snippets/</link>
      <pubDate>Mon, 01 Mar 2010 00:00:00 +0100</pubDate>
      <guid>https://www.hascode.com/amqp-and-rabbitmq-snippets/</guid>
      <description>&lt;div class=&#34;sect1&#34;&gt;
&lt;h2 id=&#34;_rabbitmqctl&#34;&gt;rabbitmqctl&lt;/h2&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;sect2&#34;&gt;
&lt;h3 id=&#34;_create_admin_user_with_full_host_permissions&#34;&gt;Create admin user with full host permissions&lt;/h3&gt;
&lt;div class=&#34;listingblock&#34;&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;pre class=&#34;highlight&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;# create new user named &amp;#39;theadmin&amp;#39;
rabbitmqctl add_user theadmin thepassword

# make &amp;#39;theadmin&amp;#39; admin
rabbitmqctl set_user_tags theadmin administrator

# give &amp;#39;theadmin&amp;#39; permissions for all hosts
rabbitmqctl set_permissions -p / theadmin &amp;#34;.*&amp;#34; &amp;#34;.*&amp;#34; &amp;#34;.*&amp;#34;&lt;/code&gt;&lt;/pre&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;sect1&#34;&gt;
&lt;h2 id=&#34;_amqp_exchange_types&#34;&gt;AMQP Exchange Types&lt;/h2&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;sect2&#34;&gt;
&lt;h3 id=&#34;_1_fanout_exchange&#34;&gt;1. Fanout Exchange&lt;/h3&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;&lt;strong&gt;Description:&lt;/strong&gt; A fanout exchange routes messages to all of the queues that are bound to it. It doesn’t take the routing key into consideration. Instead, it simply broadcasts the message to all bound queues.&lt;/p&gt;
&lt;/div&gt;</description>
    </item>
    <item>
      <title>Kafka Snippets</title>
      <link>https://www.hascode.com/kafka-snippets/</link>
      <pubDate>Mon, 01 Mar 2010 00:00:00 +0100</pubDate>
      <guid>https://www.hascode.com/kafka-snippets/</guid>
      <description>&lt;div class=&#34;sect1&#34;&gt;
&lt;h2 id=&#34;_start_an_image_with_kcat_kafka_cat_for_debugging&#34;&gt;Start an Image with kcat / kafka-cat for Debugging&lt;/h2&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;listingblock&#34;&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;pre class=&#34;highlight&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;kubectl -n NAMESPACE run &amp;#34;$(whoami)-debug&amp;#34; -it --rm \
    --image=confluentinc/cp-kafkacat:6.1.9 \
    --restart=Never \
    -- bash&lt;/code&gt;&lt;/pre&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;sect1&#34;&gt;
&lt;h2 id=&#34;_dockerfile_for_kafka_analysis_container_with_different_tools&#34;&gt;Dockerfile for Kafka Analysis Container with different Tools&lt;/h2&gt;
&lt;div class=&#34;sectionbody&#34;&gt;
&lt;div class=&#34;paragraph&#34;&gt;
&lt;p&gt;With jq, kafka console tools, schema registry tools and kafkacat installed …​.&lt;/p&gt;
&lt;/div&gt;
&lt;div class=&#34;listingblock&#34;&gt;
&lt;div class=&#34;title&#34;&gt;Dockerfile&lt;/div&gt;
&lt;div class=&#34;content&#34;&gt;
&lt;pre class=&#34;highlight&#34;&gt;&lt;code&gt;FROM confluentinc/cp-kafka:6.2.1 as cp-kafka
FROM confluentinc/cp-schema-registry:6.2.1 as cp-schema-registry

FROM debian:10-slim
ARG DEBIAN_FRONTEND=noninteractive

# Install necessary tools
RUN apt-get update &amp;amp;&amp;amp; apt-get install -y \
    curl \
    jq \
    yq \
    &amp;amp;&amp;amp; rm -rf /var/lib/apt/lists/*

# Install kafkacat binary
RUN apt-get update &amp;amp;&amp;amp; apt-get install -y kafkacat &amp;amp;&amp;amp; rm -rf /var/lib/apt/lists/*

# Copy Kafka binaries
COPY --from=cp-kafka /usr/bin/kafka-* /usr/bin/
COPY --from=cp-schema-registry /usr/bin/schema-registry* /usr/bin/

# Copy entrypoint script
COPY entrypoint.sh /usr/bin/entrypoint.sh
RUN chmod +x /usr/bin/entrypoint.sh

ENTRYPOINT [&amp;#34;/usr/bin/entrypoint.sh&amp;#34;]&lt;/code&gt;&lt;/pre&gt;
&lt;/div&gt;</description>
    </item>
  </channel>
</rss>
