Skip to content

Commit 4e3d5e1

Browse files
author
OpenShift Bot
authored
Merge pull request #567 from richm/port-json-parsing
Merged by openshift-bot
2 parents fb4a987 + 5d9451d commit 4e3d5e1

File tree

3 files changed

+52
-56
lines changed

3 files changed

+52
-56
lines changed

hack/testing/entrypoint.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ fi
9595

9696
expected_failures=(
9797
"test-fluentd-forward"
98-
"test-json-parsing"
9998
"test-es-copy"
10099
"test-mux"
101100
"test-upgrade"

hack/testing/test-json-parsing.sh

Lines changed: 3 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,57 +1,5 @@
1-
#! /bin/bash
1+
#!/bin/bash
22

3-
# test that logging will parse the message field containing
4-
# embedded JSON into its component fields, and use the
5-
# original message field in the embedded JSON
3+
source "$(dirname "${BASH_SOURCE[0]}" )/../lib/init.sh"
64

7-
if [[ $VERBOSE ]]; then
8-
set -ex
9-
else
10-
set -e
11-
VERBOSE=
12-
fi
13-
set -o nounset
14-
set -o pipefail
15-
16-
if ! type get_running_pod > /dev/null 2>&1 ; then
17-
. ${OS_O_A_L_DIR:-../..}/deployer/scripts/util.sh
18-
fi
19-
20-
ARTIFACT_DIR=${ARTIFACT_DIR:-${TMPDIR:-/tmp}/origin-aggregated-logging}
21-
if [ ! -d $ARTIFACT_DIR ] ; then
22-
mkdir -p $ARTIFACT_DIR
23-
fi
24-
25-
# generate a log message in the Kibana logs - Kibana log messages are in JSON format:
26-
# {"type":"response","@timestamp":"2017-04-07T02:03:37Z","tags":[],"pid":1,"method":"get","statusCode":404,"req":{"url":"/ca30cead-d470-4db8-a2a2-bb71439987e2","method":"get","headers":{"user-agent":"curl/7.29.0","host":"localhost:5601","accept":"*/*"},"remoteAddress":"127.0.0.1","userAgent":"127.0.0.1"},"res":{"statusCode":404,"responseTime":3,"contentLength":9},"message":"GET /ca30cead-d470-4db8-a2a2-bb71439987e2 404 3ms - 9.0B"}
27-
# logging should parse this and make "type", "tags", "statusCode", etc. as top level fields
28-
# the "message" field should contain only the embedded message and not the entire JSON blob
29-
30-
es_pod=`get_running_pod es`
31-
uuid_es=`uuidgen`
32-
echo Adding test message $uuid_es to Kibana . . .
33-
add_test_message $uuid_es
34-
rc=0
35-
timeout=600
36-
echo Waiting $timeout seconds for $uuid_es to show up in Elasticsearch . . .
37-
if espod=$es_pod myproject=project.logging. mymessage=$uuid_es expected=1 \
38-
wait_until_cmd_or_err test_count_expected test_count_err $timeout ; then
39-
echo good - $0: found 1 record project logging for $uuid_es
40-
else
41-
echo failed - $0: not found 1 record project logging for $uuid_es after $timeout seconds
42-
echo "Checking journal for $uuid_es..."
43-
if journalctl | grep $uuid_es ; then
44-
echo "Found $uuid_es in journal"
45-
else
46-
echo "Unable to find $uuid_es in journal"
47-
fi
48-
49-
exit 1
50-
fi
51-
52-
echo Testing if record is in correct format . . .
53-
query_es_from_es $es_pod project.logging. _search message $uuid_es | \
54-
python test-json-parsing.py $uuid_es
55-
56-
echo Success: $0 passed
57-
exit 0
5+
exec ${OS_O_A_L_DIR}/test/json-parsing.sh

test/json-parsing.sh

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
#!/bin/bash
2+
3+
# test that logging will parse the message field containing
4+
# embedded JSON into its component fields, and use the
5+
# original message field in the embedded JSON
6+
7+
source "$(dirname "${BASH_SOURCE[0]}" )/../hack/lib/init.sh"
8+
source "${OS_O_A_L_DIR}/deployer/scripts/util.sh"
9+
os::util::environment::use_sudo
10+
11+
os::test::junit::declare_suite_start "test/json-parsing"
12+
13+
if [ -n "${DEBUG:-}" ] ; then
14+
set -x
15+
fi
16+
17+
cleanup() {
18+
local return_code="$?"
19+
set +e
20+
if [ $return_code = 0 ] ; then
21+
mycmd=os::log::info
22+
else
23+
mycmd=os::log::error
24+
fi
25+
$mycmd json-parsing test finished at $( date )
26+
# this will call declare_test_end, suite_end, etc.
27+
os::test::junit::reconcile_output
28+
exit $return_code
29+
}
30+
trap "cleanup" EXIT
31+
32+
os::log::info Starting json-parsing test at $( date )
33+
34+
# generate a log message in the Kibana logs - Kibana log messages are in JSON format:
35+
# {"type":"response","@timestamp":"2017-04-07T02:03:37Z","tags":[],"pid":1,"method":"get","statusCode":404,"req":{"url":"/ca30cead-d470-4db8-a2a2-bb71439987e2","method":"get","headers":{"user-agent":"curl/7.29.0","host":"localhost:5601","accept":"*/*"},"remoteAddress":"127.0.0.1","userAgent":"127.0.0.1"},"res":{"statusCode":404,"responseTime":3,"contentLength":9},"message":"GET /ca30cead-d470-4db8-a2a2-bb71439987e2 404 3ms - 9.0B"}
36+
# logging should parse this and make "type", "tags", "statusCode", etc. as top level fields
37+
# the "message" field should contain only the embedded message and not the entire JSON blob
38+
39+
get_uuid_es() {
40+
json_test_uuid=$1
41+
}
42+
wait_for_fluentd_ready
43+
wait_for_fluentd_to_catch_up get_uuid_es
44+
45+
es_pod=$( get_running_pod es )
46+
47+
os::log::info Testing if record is in correct format . . .
48+
os::cmd::expect_success "curl_es $es_pod /project.logging.*/_search?q=message:$json_test_uuid | \
49+
python $OS_O_A_L_DIR/hack/testing/test-json-parsing.py $json_test_uuid"

0 commit comments

Comments
 (0)