Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Send SSE delete event for sse-kafka binding … #30

Merged
merged 1 commit into from
May 5, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,14 @@

public class SseKafkaConfiguration extends Configuration
{
public static final IntPropertyDef SSE_KAFKA_MAXIMUM_KEY_LENGTH;

private static final ConfigurationDef SSE_KAFKA_CONFIG;

static
{
final ConfigurationDef config = new ConfigurationDef("zilla.binding.sse.kafka");
SSE_KAFKA_MAXIMUM_KEY_LENGTH = config.property("maximum.key.length", 1024);
SSE_KAFKA_CONFIG = config;
}

Expand All @@ -31,4 +34,9 @@ public SseKafkaConfiguration(
{
super(SSE_KAFKA_CONFIG, config);
}

public int maximumKeyLength()
{
return SSE_KAFKA_MAXIMUM_KEY_LENGTH.getAsInt(this);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,10 @@
import org.agrona.concurrent.UnsafeBuffer;

import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.Array32FW;
import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.Flyweight;
import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.KafkaOffsetFW;
import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.KafkaOffsetType;
import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.OctetsFW;
import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.String8FW;
import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.codec.SseKafkaEventIdFW;
import io.aklivity.zilla.runtime.binding.sse.kafka.internal.types.codec.SseKafkaEventIdPartitionV1FW;
Expand All @@ -40,6 +42,7 @@ public final class SseKafkaIdHelper
new SseKafkaEventIdFW.Builder().wrap(new UnsafeBuffer(new byte[256]), 0, 256);

private final String8FW.Builder stringRW = new String8FW.Builder().wrap(new UnsafeBuffer(new byte[256]), 0, 256);
private final OctetsFW octetsRO = new OctetsFW();

private final SseKafkaEventIdFW eventIdRO = new SseKafkaEventIdFW();
private final SseKafkaEventIdPartitionV1FW partitionV1RO = new SseKafkaEventIdPartitionV1FW();
Expand Down Expand Up @@ -80,15 +83,27 @@ public String8FW encode(
.limit();
});

final SseKafkaEventIdFW encodable = eventId;
return encode8(eventId);
}

public OctetsFW encode(
final Flyweight encodable)
{
offset.value = encodable.limit();
final String8FW encoded = encode8(encodable);
return encoded != null ? octetsRO.wrap(encoded.value(), 0, encoded.length()) : null;
}

private String8FW encode8(
final Flyweight encodable)
{
String8FW encodedBuf = null;

if (encodable != null)
{
final int encodableBytes = offset.value - encodable.offset();
final byte[] encodableRaw = byteArrays.computeIfAbsent(encodableBytes, byte[]::new);
buffer.getBytes(encodable.offset(), encodableRaw);
encodable.buffer().getBytes(encodable.offset(), encodableRaw);

final byte[] encodedBase64 = base64RW;
final int encodedBytes = encoder64.encode(encodableRaw, encodedBase64);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ public final class SseKafkaProxyFactory implements SseKafkaStreamFactory
private static final String SSE_TYPE_NAME = "sse";
private static final String KAFKA_TYPE_NAME = "kafka";

private static final String8FW EVENT_TYPE_MESSAGE = new String8FW(null);
private static final String8FW EVENT_TYPE_DELETE = new String8FW("delete");

private final OctetsFW emptyExRO = new OctetsFW().wrap(new UnsafeBuffer(0L, 0), 0, 0);

private final BeginFW beginRO = new BeginFW();
Expand Down Expand Up @@ -92,6 +95,7 @@ public final class SseKafkaProxyFactory implements SseKafkaStreamFactory
private final LongUnaryOperator supplyReplyId;
private final int sseTypeId;
private final int kafkaTypeId;
private final int kafkaReplyMin;

private final Long2ObjectHashMap<SseKafkaBindingConfig> bindings;

Expand All @@ -107,6 +111,7 @@ public SseKafkaProxyFactory(
this.bindings = new Long2ObjectHashMap<>();
this.sseTypeId = context.supplyTypeId(SSE_TYPE_NAME);
this.kafkaTypeId = context.supplyTypeId(KAFKA_TYPE_NAME);
this.kafkaReplyMin = config.maximumKeyLength();
}

@Override
Expand Down Expand Up @@ -356,7 +361,7 @@ private void onSseWindow(

assert replyAck <= replySeq;

delegate.doKafkaWindow(traceId, authorization, budgetId, padding, capabilities);
delegate.doKafkaWindow(traceId, authorization, budgetId, padding, kafkaReplyMin, capabilities);
}

private void doSseBegin(
Expand Down Expand Up @@ -441,7 +446,7 @@ private void doSseWindow(
initialMax = delegate.initialMax;

doWindow(sse, routeId, initialId, initialSeq, initialAck, initialMax,
traceId, authorization, budgetId, padding, capabilities);
traceId, authorization, budgetId, padding, 0, capabilities);
}

private void doSseReset(
Expand Down Expand Up @@ -628,15 +633,21 @@ private void onKafkaData(
final KafkaMergedDataExFW kafkaMergedDataEx =
kafkaDataEx != null && kafkaDataEx.kind() == KafkaDataExFW.KIND_MERGED ? kafkaDataEx.merged() : null;
final Array32FW<KafkaOffsetFW> progress = kafkaMergedDataEx != null ? kafkaMergedDataEx.progress() : null;
final String8FW encodedBuf = sseEventId.encode(progress);
final Flyweight sseDataEx = encodedBuf == null
final OctetsFW key = kafkaMergedDataEx != null ? kafkaMergedDataEx.key().value() : null;

final String8FW encodedId = sseEventId.encode(progress);

final String8FW eventType = payload == null ? EVENT_TYPE_DELETE : EVENT_TYPE_MESSAGE;
final Flyweight sseDataEx = encodedId == null
? emptyExRO
: sseDataExRW.wrap(extBuffer, 0, extBuffer.capacity())
.typeId(sseTypeId)
.id(encodedBuf)
.id(encodedId)
.type(eventType)
.build();

delegate.doSseData(traceId, authorization, budgetId, reserved, flags, payload, sseDataEx);
final OctetsFW eventData = payload == null && key != null ? sseEventId.encode(key) : payload;
delegate.doSseData(traceId, authorization, budgetId, reserved, flags, eventData, sseDataEx);
}
}

Expand Down Expand Up @@ -757,13 +768,14 @@ private void doKafkaWindow(
long authorization,
long budgetId,
int padding,
int minimum,
int capabilities)
{
replyAck = delegate.replyAck;
replyMax = delegate.replyMax;

doWindow(kafka, routeId, replyId, replySeq, replyAck, replyMax,
traceId, authorization, budgetId, padding, capabilities);
traceId, authorization, budgetId, padding, minimum, capabilities);
}
}

Expand Down Expand Up @@ -950,6 +962,7 @@ private void doWindow(
long authorization,
long budgetId,
int padding,
int minimum,
int capabilities)
{
final WindowFW window = windowRW.wrap(writeBuffer, 0, writeBuffer.capacity())
Expand All @@ -962,6 +975,7 @@ private void doWindow(
.authorization(authorization)
.budgetId(budgetId)
.padding(padding)
.minimum(minimum)
.capabilities(capabilities)
.build();

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
/*
* Copyright 2021-2022 Aklivity Inc
*
* Licensed under the Aklivity Community License (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at
*
* https://www.aklivity.io/aklivity-community-license/
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package io.aklivity.zilla.runtime.binding.sse.kafka.internal;

import static io.aklivity.zilla.runtime.binding.sse.kafka.internal.SseKafkaConfiguration.SSE_KAFKA_MAXIMUM_KEY_LENGTH;
import static org.junit.Assert.assertEquals;

import org.junit.Test;

public class SseKafkaConfigurationTest
{
public static final String SSE_KAFKA_MAXIMUM_KEY_LENGTH_NAME =
"zilla.binding.sse.kafka.maximum.key.length";

@Test
public void shouldVerifyConstants() throws Exception
{
assertEquals(SSE_KAFKA_MAXIMUM_KEY_LENGTH.name(), SSE_KAFKA_MAXIMUM_KEY_LENGTH_NAME);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,6 @@ private final class SseServer
private BudgetDebitor replyDebitor;
private long replyDebitorIndex = NO_DEBITOR_INDEX;


private SseServer(
MessageConsumer network,
long routeId,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,6 @@ connected
read zilla:data.ext ${sse:matchDataEx()
.typeId(zilla:id("sse"))
.id("AQQABAIC")
.type("delete")
.build()}
read zilla:data.null
read "a2V5"
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,7 @@ write flush
write zilla:data.ext ${sse:dataEx()
.typeId(zilla:id("sse"))
.id("AQQABAIC")
.type("delete")
.build()}
write "a2V5"
write flush