Skip to content

Commit 3211ba6

Browse files
authored
Updated examples for bullet 1.0.0 (#38)
1 parent 10bbfcd commit 3211ba6

File tree

10 files changed

+59
-767
lines changed

10 files changed

+59
-767
lines changed

docs/ws/examples.md

Lines changed: 27 additions & 731 deletions
Large diffs are not rendered by default.

examples/spark/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,14 @@
44
<modelVersion>4.0.0</modelVersion>
55
<groupId>com.yahoo.bullet</groupId>
66
<artifactId>bullet-spark-example</artifactId>
7-
<version>0.0.1-SNAPSHOT</version>
7+
<version>1.0.0-SNAPSHOT</version>
88
<packaging>jar</packaging>
99
<properties>
1010
<scala.version>2.11.7</scala.version>
1111
<scala.dep.version>2.11</scala.dep.version>
1212
<spark.version>2.3.0</spark.version>
13-
<bullet.spark.version>0.2.2</bullet.spark.version>
14-
<bullet.record.version>0.2.0</bullet.record.version>
13+
<bullet.spark.version>1.0.0</bullet.spark.version>
14+
<bullet.record.version>1.0.0</bullet.record.version>
1515
</properties>
1616

1717
<repositories>

examples/spark/src/main/resources/bullet_spark_kafka_settings.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,4 +110,4 @@ bullet.query.aggregation.top.k.sketch.entries: 1024
110110
bullet.query.aggregation.top.k.sketch.error.type: "NFN"
111111
bullet.result.metadata.enable: true
112112
# Factory class to get new BulletRecords.
113-
bullet.record.provider.class.name: "com.yahoo.bullet.record.SimpleBulletRecordProvider"
113+
bullet.record.provider.class.name: "com.yahoo.bullet.record.simple.SimpleBulletRecordProvider"

examples/spark/src/main/scala/com/yahoo/bullet/spark/examples/RandomProducer.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@ import org.apache.spark.streaming.StreamingContext
1313
import org.apache.spark.streaming.dstream.DStream
1414

1515
class RandomProducer extends DataProducer {
16-
override def getBulletRecordStream(ssc: StreamingContext, config: BulletSparkConfig): DStream[BulletRecord] = {
16+
override def getBulletRecordStream(ssc: StreamingContext, config: BulletSparkConfig): DStream[BulletRecord[_ <: java.io.Serializable]] = {
1717
// Bullet record input stream.
1818
val bulletReceiver = new RandomReceiver(config)
19-
ssc.receiverStream(bulletReceiver).asInstanceOf[DStream[BulletRecord]]
19+
ssc.receiverStream(bulletReceiver).asInstanceOf[DStream[BulletRecord[_ <: java.io.Serializable]]]
2020
}
2121
}
2222

examples/spark/src/main/scala/com/yahoo/bullet/spark/examples/receiver/RandomReceiver.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,12 @@ import java.util.Map
1111
import java.util.Arrays.asList
1212

1313
import scala.util.Random
14-
15-
import com.yahoo.bullet.record.{BulletRecord, SimpleBulletRecord}
14+
import com.yahoo.bullet.record.BulletRecord
15+
import com.yahoo.bullet.record.simple.TypedSimpleBulletRecord
1616
import com.yahoo.bullet.spark.utils.{BulletSparkConfig, BulletSparkLogger}
1717
import org.apache.spark.storage.StorageLevel
1818
import org.apache.spark.streaming.receiver.Receiver
1919

20-
2120
object RandomReceiver {
2221
// Fields in BulletRecord
2322
private val STRING = "uuid"
@@ -47,7 +46,7 @@ object RandomReceiver {
4746
* @param config The BulletSparkConfig to load settings from.
4847
*/
4948
class RandomReceiver(val config: BulletSparkConfig)
50-
extends Receiver[BulletRecord](StorageLevel.MEMORY_AND_DISK_SER) with BulletSparkLogger {
49+
extends Receiver[BulletRecord[_ <: java.io.Serializable]](StorageLevel.MEMORY_AND_DISK_SER) with BulletSparkLogger {
5150
// Number of tuples to emit
5251
private val maxPerPeriod = 100L
5352
// Period in milliseconds. Default 1000 ms
@@ -100,8 +99,8 @@ class RandomReceiver(val config: BulletSparkConfig)
10099
randomMap
101100
}
102101

103-
private def generateRecord(): BulletRecord = {
104-
val record = new SimpleBulletRecord()
102+
private def generateRecord(): BulletRecord[_ <: java.io.Serializable] = {
103+
val record = new TypedSimpleBulletRecord()
105104
val uuid = UUID.randomUUID().toString
106105
record.setString(RandomReceiver.STRING, uuid)
107106
record.setLong(RandomReceiver.LONG, generatedThisPeriod)

examples/storm/bin/launch.sh

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,10 @@
11
#! /bin/bash
22

33
# We pass 20 and 100 to the RandomSpout, which means it generates up to 20 random records every 100 ms.
4-
storm jar bullet-storm-example-1.0-SNAPSHOT-jar-with-dependencies.jar \
4+
storm jar bullet-storm-example-1.0.0-SNAPSHOT-jar-with-dependencies.jar \
55
com.yahoo.bullet.storm.Topology \
66
--bullet-conf ./bullet_settings.yaml \
77
--bullet-spout com.yahoo.bullet.storm.examples.RandomSpout \
8-
--bullet-spout-parallelism 1 \
9-
--bullet-spout-cpu-load 100.0 \
10-
--bullet-spout-on-heap-memory-load 128.0 \
11-
--bullet-spout-off-heap-memory-load 196.0 \
12-
--bullet-spout-arg 20 \
13-
--bullet-spout-arg 101 \
148
-c topology.acker.executors=1 \
159
-c topology.max.spout.pending=1000 \
1610
-c topology.backpressure.enable=false

examples/storm/pom.xml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
<modelVersion>4.0.0</modelVersion>
44
<groupId>com.yahoo.bullet</groupId>
55
<artifactId>bullet-storm-example</artifactId>
6-
<version>1.0-SNAPSHOT</version>
6+
<version>1.0.0-SNAPSHOT</version>
77
<packaging>jar</packaging>
88
<name>bullet-storm-example</name>
99
<scm>
@@ -26,10 +26,10 @@
2626
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
2727
<maven.compiler.source>1.8</maven.compiler.source>
2828
<maven.compiler.target>1.8</maven.compiler.target>
29-
<bullet.storm.version>0.8.5</bullet.storm.version>
30-
<bullet.core.version>0.6.4</bullet.core.version>
31-
<bullet.record.version>0.3.0</bullet.record.version>
32-
<storm.version>1.1.3</storm.version>
29+
<bullet.storm.version>1.0.0</bullet.storm.version>
30+
<bullet.core.version>1.0.0</bullet.core.version>
31+
<bullet.record.version>1.0.0</bullet.record.version>
32+
<storm.version>2.1.0</storm.version>
3333
</properties>
3434

3535
<dependencies>

examples/storm/src/main/java/com/yahoo/bullet/storm/examples/RandomSpout.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@
66
package com.yahoo.bullet.storm.examples;
77

88
import com.yahoo.bullet.common.BulletConfig;
9-
import com.yahoo.bullet.record.AvroBulletRecord;
109
import com.yahoo.bullet.record.BulletRecord;
10+
import com.yahoo.bullet.record.avro.TypedAvroBulletRecord;
1111
import lombok.extern.slf4j.Slf4j;
1212
import org.apache.storm.spout.SpoutOutputCollector;
1313
import org.apache.storm.task.TopologyContext;
@@ -144,7 +144,7 @@ private Map<String, String> makeRandomMap() {
144144
}
145145

146146
private BulletRecord generateRecord() {
147-
BulletRecord record = new AvroBulletRecord();
147+
BulletRecord record = new TypedAvroBulletRecord();
148148
String uuid = UUID.randomUUID().toString();
149149

150150
record.setString(STRING, uuid);

examples/storm/src/main/resources/bullet_settings.yaml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,13 @@ bullet.topology.join.bolt.query.post.finish.buffer.ticks: 3
3030
bullet.topology.join.bolt.query.pre.start.delay.ticks: 2
3131
bullet.topology.tick.spout.interval.ms: 100
3232

33+
bullet.topology.bullet.spout.class.name: "com.yahoo.bullet.storm.examples.RandomSpout"
34+
bullet.topology.bullet.spout.args: []
35+
bullet.topology.bullet.spout.parallelism: 1
36+
bullet.topology.bullet.spout.cpu.load: 25.0
37+
bullet.topology.bullet.spout.memory.on.heap.load: 128.0
38+
bullet.topology.bullet.spout.memory.off.heap.load: 160.0
39+
3340
# Bullet Core settings
3441
bullet.query.aggregation.raw.max.size: 500
3542
bullet.query.aggregation.max.size: 1024

examples/web-service/example_columns.json

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -31,26 +31,23 @@
3131
},
3232
{
3333
"name": "subtypes",
34-
"type": "MAP",
35-
"subtype": "STRING",
34+
"type": "STRING_MAP",
3635
"description": "Contains two keys whose values are randomly chosen from: foo, bar, baz, qux, quux, norf",
37-
"enumerations": [
36+
"subFields": [
3837
{"name": "field_A", "description": "Value randomly chosen from: foo, bar, baz, qux, quux, norf"},
3938
{"name": "field_B", "description": "Value randomly chosen from: foo, bar, baz, qux, quux, norf"}
4039
]
4140
},
4241
{
4342
"name": "tags",
44-
"type": "MAP",
45-
"subtype": "BOOLEAN",
43+
"type": "BOOLEAN_MAP",
4644
"description": "Contains four keys which are four fragments of the uuid. The values are randomly generated boolean values from Random#nextBoolean"
4745
},
4846
{
4947
"name": "stats",
50-
"type": "MAP",
51-
"subtype": "LONG",
48+
"type": "LONG_MAP",
5249
"description": "This map contains some numeric information such as the current number of periods etc.",
53-
"enumerations": [
50+
"subFields": [
5451
{"name": "period_count", "description": "The period in which this record was generated"},
5552
{"name": "record_number", "description": "A monotonically increasing id for the record. There may be gaps in the id but if the data generation has kept up with your maximum tuples per period, this is the nth tuple generated"},
5653
{"name": "timestamp", "description": "The ms time when this record was generated"},
@@ -59,8 +56,7 @@
5956
},
6057
{
6158
"name": "classifiers",
62-
"type": "LIST",
63-
"subtype": "MAP",
59+
"type": "STRING_MAP_LIST",
6460
"description": "This contains two maps, each with: field_A and field_B whose values are randomly chosen from: foo, bar, baz, qux, quux, norf"
6561
}
6662
]

0 commit comments

Comments
 (0)