-
Notifications
You must be signed in to change notification settings - Fork 14.6k
KAFKA-19668: processValue() must be declared as value-changing operation #20470
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1306,7 +1306,12 @@ public <KOut, VOut> KStream<KOut, VOut> process( | |
final ProcessorToStateConnectorNode<? super K, ? super V> processNode = new ProcessorToStateConnectorNode<>( | ||
name, | ||
new ProcessorParameters<>(processorSupplier, name), | ||
stateStoreNames); | ||
stateStoreNames | ||
); | ||
if (builder.processProcessValueFixEnabled()) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I also added this here -- it's atm not strictly necessary, because but it just feel correct to add anyway, as we have plans to actually unify There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Cf #18800 \cc @appchemist for visibility There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thank you! |
||
processNode.setKeyChangingOperation(true); | ||
processNode.setValueChangingOperation(true); | ||
} | ||
|
||
builder.addGraphNode(graphNode, processNode); | ||
|
||
|
@@ -1350,7 +1355,11 @@ public <VOut> KStream<K, VOut> processValues( | |
final ProcessorToStateConnectorNode<? super K, ? super V> processNode = new ProcessorToStateConnectorNode<>( | ||
name, | ||
new ProcessorParameters<>(processorSupplier, name), | ||
stateStoreNames); | ||
stateStoreNames | ||
); | ||
if (builder.processProcessValueFixEnabled()) { | ||
processNode.setValueChangingOperation(true); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The line! There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yep. Basically a one-liner fix. |
||
} | ||
|
||
builder.addGraphNode(graphNode, processNode); | ||
// cannot inherit value serde | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -36,6 +36,8 @@ | |
import org.apache.kafka.streams.kstream.Suppressed; | ||
import org.apache.kafka.streams.kstream.TimeWindows; | ||
import org.apache.kafka.streams.kstream.ValueJoiner; | ||
import org.apache.kafka.streams.processor.api.ContextualFixedKeyProcessor; | ||
import org.apache.kafka.streams.processor.api.FixedKeyRecord; | ||
import org.apache.kafka.streams.processor.api.Processor; | ||
import org.apache.kafka.streams.processor.api.ProcessorContext; | ||
import org.apache.kafka.streams.processor.api.ProcessorSupplier; | ||
|
@@ -127,7 +129,7 @@ public void shouldNotThrowNPEWithMergeNodes() { | |
initializer = () -> ""; | ||
aggregator = (aggKey, value, aggregate) -> aggregate + value.length(); | ||
final ProcessorSupplier<String, String, String, String> processorSupplier = | ||
() -> new Processor<String, String, String, String>() { | ||
() -> new Processor<>() { | ||
private ProcessorContext<String, String> context; | ||
|
||
@Override | ||
|
@@ -186,13 +188,12 @@ public void process(final Record<String, String> record) { | |
|
||
@Test | ||
public void shouldNotOptimizeWithValueOrKeyChangingOperatorsAfterInitialKeyChange() { | ||
|
||
final Topology attemptedOptimize = getTopologyWithChangingValuesAfterChangingKey(StreamsConfig.OPTIMIZE); | ||
final Topology noOptimization = getTopologyWithChangingValuesAfterChangingKey(StreamsConfig.NO_OPTIMIZATION); | ||
|
||
assertEquals(attemptedOptimize.describe().toString(), noOptimization.describe().toString()); | ||
assertEquals(2, getCountOfRepartitionTopicsFound(attemptedOptimize.describe().toString())); | ||
assertEquals(2, getCountOfRepartitionTopicsFound(noOptimization.describe().toString())); | ||
assertEquals(3, getCountOfRepartitionTopicsFound(attemptedOptimize.describe().toString())); | ||
assertEquals(3, getCountOfRepartitionTopicsFound(noOptimization.describe().toString())); | ||
} | ||
|
||
@Test | ||
|
@@ -228,7 +229,6 @@ public void shouldNotOptimizeWhenRepartitionOperationIsDone() { | |
} | ||
|
||
private Topology getTopologyWithChangingValuesAfterChangingKey(final String optimizeConfig) { | ||
|
||
final StreamsBuilder builder = new StreamsBuilder(); | ||
final Properties properties = new Properties(); | ||
properties.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, optimizeConfig); | ||
|
@@ -238,9 +238,15 @@ private Topology getTopologyWithChangingValuesAfterChangingKey(final String opti | |
|
||
mappedKeyStream.mapValues(v -> v.toUpperCase(Locale.getDefault())).groupByKey().count().toStream().to("output"); | ||
mappedKeyStream.flatMapValues(v -> Arrays.asList(v.split("\\s"))).groupByKey().windowedBy(TimeWindows.ofSizeWithNoGrace(ofMillis(5000))).count().toStream().to("windowed-output"); | ||
mappedKeyStream.processValues( | ||
() -> new ContextualFixedKeyProcessor<>() { | ||
@Override | ||
public void process(final FixedKeyRecord<String, String> record) { | ||
context().forward(record.withValue(record.value().toUpperCase(Locale.getDefault()))); | ||
} | ||
}).groupByKey().count().toStream().to("output"); | ||
|
||
return builder.build(properties); | ||
|
||
} | ||
|
||
private Topology getTopologyWithRepartitionOperation(final String optimizeConfig) { | ||
|
@@ -386,20 +392,20 @@ private int getCountOfRepartitionTopicsFound(final String topologyString) { | |
" Source: KSTREAM-SOURCE-0000000000 (topics: [retryTopic])\n" + | ||
" --> KSTREAM-PROCESSOR-0000000001\n" + | ||
" Processor: KSTREAM-PROCESSOR-0000000001 (stores: [])\n" + | ||
" --> KSTREAM-FILTER-0000000005\n" + | ||
" --> KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition-filter\n" + | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am not 100% sure, what triggers this re-naming, but I believe it's only in-memory processor names, so I believe it's ok? -- The structure of the topology does not change, and all stateful things (stores, topics) keep their names. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes it should be, especially since this is a |
||
" <-- KSTREAM-SOURCE-0000000000\n" + | ||
" Processor: KSTREAM-FILTER-0000000005 (stores: [])\n" + | ||
" --> KSTREAM-SINK-0000000004\n" + | ||
" Processor: KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition-filter (stores: [])\n" + | ||
" --> KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition-sink\n" + | ||
" <-- KSTREAM-PROCESSOR-0000000001\n" + | ||
" Sink: KSTREAM-SINK-0000000004 (topic: KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition)\n" + | ||
" <-- KSTREAM-FILTER-0000000005\n" + | ||
" Sink: KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition-sink (topic: KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition)\n" + | ||
" <-- KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition-filter\n" + | ||
"\n" + | ||
" Sub-topology: 1\n" + | ||
" Source: KSTREAM-SOURCE-0000000006 (topics: [KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition])\n" + | ||
" Source: KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition-source (topics: [KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition])\n" + | ||
" --> KSTREAM-AGGREGATE-0000000003\n" + | ||
" Processor: KSTREAM-AGGREGATE-0000000003 (stores: [KSTREAM-AGGREGATE-STATE-STORE-0000000002])\n" + | ||
" --> KTABLE-SUPPRESS-0000000007\n" + | ||
" <-- KSTREAM-SOURCE-0000000006\n" + | ||
" <-- KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition-source\n" + | ||
" Source: KSTREAM-SOURCE-0000000019 (topics: [internal-topic-command])\n" + | ||
" --> KSTREAM-PEEK-0000000020\n" + | ||
" Processor: KTABLE-SUPPRESS-0000000007 (stores: [KTABLE-SUPPRESS-STATE-STORE-0000000008])\n" + | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Is the cast required, wouldn't the JVM use autoboxing to covert between
Boolean
andboolean
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes, because the type of
value
isObject
. We could also cast toBoolean
. So some auto-conversion/unboxing happens anyway.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I actually c&p this from
StreamsConfig.InternalConfigs
:)