Skip to content

Commit 40b72da

Browse files
committed
Upgrade Testing for Kafka connector
1 parent b7793cf commit 40b72da

File tree

5 files changed

+142
-8
lines changed

5 files changed

+142
-8
lines changed

.circleci/config.yml

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ jobs:
1919
CI_DATAGEN_IMAGE: rock1017/log-generator:latest
2020
CI_KAFKA_VERSION: 2.4.0
2121
CI_KAFKA_FILENAME: kafka_2.13-2.4.0.tgz
22+
CI_OLD_CONNECTOR_VERSION: v1.2.0
2223
working_directory: ~/repo
2324
steps:
2425
- attach_workspace:
@@ -120,22 +121,34 @@ jobs:
120121
sudo systemctl start zookeeper
121122
sudo systemctl start kafka
122123
- run:
123-
name: Start kafka connect
124+
name: Setup kafka connect
124125
command: |
125126
sudo mkdir -p /usr/local/share/kafka/plugins/
126-
sudo cp /home/circleci/repo/target/splunk-kafka-connect*.jar /usr/local/share/kafka/plugins/
127+
wget https://github.com/splunk/kafka-connect-splunk/releases/download/$CI_OLD_CONNECTOR_VERSION/splunk-kafka-connect-$CI_OLD_CONNECTOR_VERSION.jar
128+
sudo cp splunk-kafka-connect-$CI_OLD_CONNECTOR_VERSION.jar /usr/local/share/kafka/plugins/
127129
sed -i 's/plugin\.path\=connectors\//plugin\.path\=\/usr\/local\/share\/kafka\/plugins\//' /home/circleci/repo/config/connect-distributed-quickstart.properties
128130
sed -i 's/key\.converter\=org\.apache\.kafka\.connect\.storage\.StringConverter/key\.converter\=org\.apache\.kafka\.connect\.json\.JsonConverter/' /home/circleci/repo/config/connect-distributed-quickstart.properties
129131
sed -i 's/value\.converter\=org\.apache\.kafka\.connect\.storage\.StringConverter/value\.converter\=org\.apache\.kafka\.connect\.json\.JsonConverter/' /home/circleci/repo/config/connect-distributed-quickstart.properties
130-
sudo /usr/local/kafka/bin/connect-distributed.sh /home/circleci/repo/config/connect-distributed-quickstart.properties
131-
background: true
132132
- run:
133-
name: Run Functional tests
133+
name: Test kafka connect upgrade
134134
command: |
135135
pyenv global 3.6.5
136136
pip install --upgrade pip
137137
pip install -r test/requirements.txt
138138
export PYTHONWARNINGS="ignore:Unverified HTTPS request"
139+
echo "Test kafka connect upgrade ..."
140+
python test/lib/connector_upgrade.py
141+
- run:
142+
name: Start kafka connect
143+
command: |
144+
sudo /usr/local/kafka/bin/connect-distributed.sh /home/circleci/repo/config/connect-distributed-quickstart.properties
145+
background: true
146+
- run:
147+
name: Run Functional tests
148+
command: |
149+
sleep 5
150+
sudo apt-get install jq
151+
curl localhost:8083/connector-plugins | jq
139152
echo "Running functional tests....."
140153
python -m pytest -p no:warnings -s
141154

test/config.yaml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,9 @@ kafka_connect_url: http://127.0.0.1:8083
1010
kafka_topic: test-datagen
1111
kafka_topic_2: kafka_topic_2
1212
kafka_header_topic: kafka_header_topic
13-
kafka_header_index: kafka
13+
kafka_header_index: kafka
14+
connector_path: /usr/local/share/kafka/plugins
15+
connector_build_target: /home/circleci/repo/target
16+
kafka_home: /usr/local/kafka
17+
kafka_connect_home: /home/circleci/repo
18+
old_connector_name: splunk-kafka-connect-v1.2.0.jar

test/lib/commonsplunk.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def _get_events(job_id, url="", user="", password=""):
227227
@param: job_id
228228
returns events
229229
'''
230-
event_url = '{0}/services/search/jobs/{1}/events?output_mode=json'.format(
230+
event_url = '{0}/services/search/jobs/{1}/events?output_mode=json&count=3000'.format(
231231
url, str(job_id))
232232
logger.debug('requesting: %s', event_url)
233233

test/lib/connector_upgrade.py

Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
from kafka.producer import KafkaProducer
2+
import sys
3+
import os
4+
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
5+
6+
from lib.commonsplunk import check_events_from_splunk
7+
from lib.commonkafka import *
8+
from lib.helper import *
9+
from datetime import datetime
10+
import threading
11+
import logging.config
12+
import yaml
13+
import subprocess
14+
import logging
15+
import time
16+
17+
logging.config.fileConfig(os.path.join(get_test_folder(), "logging.conf"))
18+
logger = logging.getLogger('connector_upgrade')
19+
20+
_config_path = os.path.join(get_test_folder(), 'config.yaml')
21+
with open(_config_path, 'r') as yaml_file:
22+
config = yaml.load(yaml_file)
23+
now = datetime.now()
24+
_time_stamp = str(datetime.timestamp(now))
25+
26+
27+
def start_old_connector():
28+
cmds = ["test -f {0}/{1} && echo {0}/{1}".format(config["connector_path"], config["old_connector_name"]),
29+
"cd {}".format(config["kafka_home"]),
30+
"sudo ./bin/connect-distributed.sh {}/config/connect-distributed-quickstart.properties &".
31+
format(config["kafka_connect_home"])]
32+
33+
cmd = "\n".join(cmds)
34+
try:
35+
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
36+
stderr=subprocess.STDOUT)
37+
proc.wait()
38+
except OSError as e:
39+
logger.error(e)
40+
41+
42+
def generate_kafka_events(num):
43+
# Generate message data
44+
topics = ["kafka_data_gen"]
45+
connector_content = {
46+
"name": "kafka_connect",
47+
"config": {
48+
"connector.class": "com.splunk.kafka.connect.SplunkSinkConnector",
49+
"tasks.max": "1",
50+
"splunk.indexes": config["splunk_index"],
51+
"topics": "kafka_data_gen",
52+
"splunk.hec.ack.enabled": "false",
53+
"splunk.hec.uri": config["splunk_hec_url"],
54+
"splunk.hec.ssl.validate.certs": "false",
55+
"splunk.hec.token": config["splunk_token"]
56+
}
57+
}
58+
create_kafka_connector(config, connector_content)
59+
create_kafka_topics(config, topics)
60+
producer = KafkaProducer(bootstrap_servers=config["kafka_broker_url"],
61+
value_serializer=lambda v: json.dumps(v).encode('utf-8'))
62+
63+
for _ in range(num):
64+
msg = {"timestamp": _time_stamp}
65+
producer.send("kafka_data_gen", msg)
66+
time.sleep(0.05)
67+
producer.flush()
68+
69+
70+
def upgrade_connector():
71+
cmds = ["sudo kill $(sudo lsof -t -i:8083) && sleep 2",
72+
"sudo rm {}/{} && sleep 2".format(config["connector_path"], config["old_connector_name"]),
73+
"sudo cp {0}/splunk-kafka-connect*.jar {1} && sleep 2".format(config["connector_build_target"],
74+
config["connector_path"]),
75+
"cd {}".format(config["kafka_home"]),
76+
"sudo ./bin/connect-distributed.sh {}/config/connect-distributed-quickstart.properties &".
77+
format(config["kafka_connect_home"])]
78+
79+
cmd = "\n".join(cmds)
80+
try:
81+
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
82+
stderr=subprocess.STDOUT)
83+
output, error = proc.communicate()
84+
logger.info(output)
85+
except OSError as e:
86+
logger.error(e)
87+
88+
89+
if __name__ == '__main__':
90+
logger.info("Start old Kafka connector ...")
91+
thread_old_connect = threading.Thread(target=start_old_connector, daemon=True)
92+
thread_old_connect.start()
93+
time.sleep(10)
94+
logger.info("Generate Kafka events ...")
95+
thread_gen = threading.Thread(target=generate_kafka_events, args=(2000,), daemon=True)
96+
thread_gen.start()
97+
time.sleep(50)
98+
logger.info("Upgrade Kafka connector ...")
99+
thread_upgrade = threading.Thread(target=upgrade_connector, daemon=True)
100+
thread_upgrade.start()
101+
time.sleep(100)
102+
search_query = "index={0} | search timestamp=\"{1}\"".format(config['splunk_index'], _time_stamp)
103+
logger.info(search_query)
104+
events = check_events_from_splunk(start_time="-15m@m",
105+
url=config["splunkd_url"],
106+
user=config["splunk_user"],
107+
query=["search {}".format(search_query)],
108+
password=config["splunk_password"])
109+
logger.info("Splunk received %s events in the last 15m", len(events))
110+
assert len(events) == 2000

test/logging.conf

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[loggers]
2-
keys=root,kafka,splunk,test.conftest,test_case
2+
keys=root,kafka,splunk,test.conftest,test_case,connector_upgrade
33

44
[handlers]
55
keys=consoleHandler
@@ -35,6 +35,12 @@ propagate=1
3535
handlers=
3636
qualname=test_case
3737

38+
[logger_connector_upgrade]
39+
level=DEBUG
40+
propagate=1
41+
handlers=
42+
qualname=connector_upgrade
43+
3844
[handler_consoleHandler]
3945
class=StreamHandler
4046
level=DEBUG

0 commit comments

Comments
 (0)