Skip to content

Commit 1c2543e

Browse files
authored
Merge pull request #2215 from basho/dpb/better_deploy_error_messages
Improve readability of s3_deploy error messages
2 parents fdfc7e9 + 3e788a1 commit 1c2543e

File tree

1 file changed

+19
-13
lines changed

1 file changed

+19
-13
lines changed

rake_libs/s3_deploy.rb

Lines changed: 19 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
# Deploy rules and helpers
33

44
$archive_name = "archived_docs.basho.com.tar.bz2"
5+
$archive_url = "http://s3.amazonaws.com/downloads.basho.com/documentation_content/#{$archive_name}"
56

67
def do_fetch_archived_content()
78
# Fetch and extract the archived content that we want to survive from the
@@ -13,21 +14,22 @@ def do_fetch_archived_content()
1314
# If we don't have wget. Error out.
1415
Kernel.abort("ERROR: #{$archive_name} was not found, and this system "\
1516
"does not have access to `wget`.\n"\
16-
" Please either install `wget` and re-run this "\
17-
"deploy, or manually download the file from the below "\
18-
"address and place it into this directory.\n"\
19-
" http://s3.amazonaws.com/downloads.basho.com/documentation_content/#{$archive_name}")
17+
" Please either;\n"\
18+
" * install `wget` and re-run this deploy, or\n"\
19+
" * manually download the file from the below URL "\
20+
"and place it into this directory.\n"\
21+
" #{$archive_url}")
2022
else
2123
# We have wget, but not the file. Fetch it.
2224
puts(" Using wget to fetch #{$archive_name} "\
2325
"(this may take some time)...")
24-
successful = system("wget http://s3.amazonaws.com/downloads.basho.com/documentation_content/#{$archive_name}")
26+
successful = system("wget #{$archive_url}")
2527

2628
if (not successful)
2729
Kernel.abort("ERROR: Failed to get #{$archive_name}\n"\
28-
" Please download the file from the below "\
29-
"address and copy it into this directory.\n"\
30-
" http://s3.amazonaws.com/downloads.basho.com/documentation_content/#{$archive_name}")
30+
" Please download the file from the below URL "\
31+
"and copy it into this directory.\n"\
32+
" #{$archive_url}")
3133
end
3234
end
3335
end
@@ -51,9 +53,11 @@ def do_fetch_archived_content()
5153
if (web_md5 != loc_md5)
5254
Kernel.abort("ERROR: Fetched #{$archive_name} does not match the "\
5355
"expected md5sum.\n"\
54-
" Please remove the current #{$archive_name}, reset "\
55-
"the contents of the static/ directory (`git clean -xdf "\
56-
"static; git checkout -f static`), and re-run this script.")
56+
" Please:\n"\
57+
" * remove (`rm`) the current #{$archive_name}\n"\
58+
" * reset the contents of the static/ directory "\
59+
"(`git clean -xdf static; git checkout -f static`)\n"\
60+
" * re-run this script.")
5761
end
5862
end
5963

@@ -72,8 +76,10 @@ def do_fetch_archived_content()
7276
(File.exist?("static/shared")) )
7377
if (not all_dirs_present and any_dirs_present)
7478
Kernel.abort("ERRPR: The static/ directory is verifiably corrupt.\n"\
75-
" Please run \`git clean -xdf static/\` to clear out "\
76-
"the malformed files, and re-run this deploy script.")
79+
" Please:\n"
80+
" * reset the contents of the static/ directory "\
81+
"(`git clean -xdf static; git checkout -f static`)\n"\
82+
" * re-run this script.")
7783
elsif (not any_dirs_present)
7884
puts("Extracting #{$archive_name} (this may take a lot of time)...")
7985
successful = system("tar -xjf #{$archive_name} -C static")

0 commit comments

Comments
 (0)