Skip to content

Commit 8614e15

Browse files
authored
MINOR: typo in javadoc (#20113)
This PR fixes a typo in the Javadoc. --------- Signed-off-by: see-quick <maros.orsak159@gmail.com> Reviewers: Luke Chen <showuon@gmail.com>
1 parent fba01c4 commit 8614e15

File tree

6 files changed

+7
-7
lines changed

6 files changed

+7
-7
lines changed

clients/src/main/java/org/apache/kafka/clients/consumer/internals/events/CompletableEvent.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ public interface CompletableEvent<T> {
4545
* (if applicable) is passed to {@link CompletableFuture#complete(Object)}. In the case where the generic
4646
* bound type is specified as {@link Void}, {@code null} is provided.</li>
4747
* <li>
48-
* Error: when the the event logic generates an error, the error is passed to
48+
* Error: when the event logic generates an error, the error is passed to
4949
* {@link CompletableFuture#completeExceptionally(Throwable)}.
5050
* </li>
5151
* <li>

release/templates.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ def requirements_instructions(prefs_file, prefs):
7070
Some of these may be used from these previous settings loaded from {prefs_file}:
7171
{prefs}
7272
73-
Do you have all of of these setup?"""
73+
Do you have all of these setup?"""
7474

7575

7676
def release_announcement_email(release_version, contributors):

streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -236,7 +236,7 @@ public class KafkaStreams implements AutoCloseable {
236236
* </li>
237237
* <li>
238238
* REBALANCING state will transit to RUNNING if all of its threads are in RUNNING state
239-
* (Note: a thread transits to RUNNING state, if all active tasks got restored are are ready for processing.
239+
* (Note: a thread transits to RUNNING state, if all active tasks got restored are ready for processing.
240240
* Standby tasks are not considered.)
241241
* </li>
242242
* <li>

streams/src/main/java/org/apache/kafka/streams/StreamsConfig.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1859,7 +1859,7 @@ public Map<String, Object> getMainConsumerConfigs(final String groupId, final St
18591859

18601860
if (segmentSize < batchSize) {
18611861
throw new IllegalArgumentException(String.format(
1862-
"Specified topic segment size %d is is smaller than the configured producer batch size %d, this will cause produced batch not able to be appended to the topic",
1862+
"Specified topic segment size %d is smaller than the configured producer batch size %d, this will cause produced batch not able to be appended to the topic",
18631863
segmentSize,
18641864
batchSize
18651865
));

tests/kafkatest/services/security/kafka_acls.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def add_cluster_acl(self, kafka, principal, additional_cluster_operations_to_gra
2727
:param additional_cluster_operations_to_grant may be set to ['Alter', 'Create'] if the cluster is secured since these are required
2828
to create SCRAM credentials and topics, respectively
2929
:param security_protocol set it to explicitly determine whether we use client or broker credentials, otherwise
30-
we use the the client security protocol unless inter-broker security protocol is PLAINTEXT, in which case we use PLAINTEXT.
30+
we use the client security protocol unless inter-broker security protocol is PLAINTEXT, in which case we use PLAINTEXT.
3131
Then we use the broker's credentials if the selected security protocol matches the inter-broker security protocol,
3232
otherwise we use the client's credentials.
3333
"""
@@ -49,7 +49,7 @@ def remove_cluster_acl(self, kafka, principal, additional_cluster_operations_to_
4949
:param additional_cluster_operations_to_remove may be set to ['Alter', 'Create'] if the cluster is secured since these are required
5050
to create SCRAM credentials and topics, respectively
5151
:param security_protocol set it to explicitly determine whether we use client or broker credentials, otherwise
52-
we use the the client security protocol unless inter-broker security protocol is PLAINTEXT, in which case we use PLAINTEXT.
52+
we use the client security protocol unless inter-broker security protocol is PLAINTEXT, in which case we use PLAINTEXT.
5353
Then we use the broker's credentials if the selected security protocol matches the inter-broker security protocol,
5454
otherwise we use the client's credentials.
5555
"""

tools/src/test/java/org/apache/kafka/tools/JmxToolTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ public class JmxToolTest {
5555
public static void beforeAll() throws Exception {
5656
int port = findRandomOpenPortOnAllLocalInterfaces();
5757
jmxUrl = format("service:jmx:rmi:///jndi/rmi://:%d/jmxrmi", port);
58-
// explicitly set the hostname returned to the the clients in the remote stub object
58+
// explicitly set the hostname returned to the clients in the remote stub object
5959
// when connecting to a multi-homed machine using RMI, the wrong address may be returned
6060
// by the RMI registry to the client, causing the connection to the RMI server to timeout
6161
System.setProperty("java.rmi.server.hostname", "localhost");

0 commit comments

Comments
 (0)