1
1
###########################
2
2
# Deploy rules and helpers
3
3
4
+ $archive_name = "archived_docs.basho.com.tar.bz2"
5
+
4
6
def do_fetch_archived_content ( )
5
7
# Fetch and extract the archived content that we want to survive from the
6
8
# Middleman website.
7
9
puts ( "Verifying archived content..." )
8
10
# Verify that the tar.bz2 is present.
9
- if ( not File . file? ( File . join ( Dir . pwd , "archived_docs.basho.com.tar.bz2 " ) ) )
11
+ if ( not File . file? ( File . join ( Dir . pwd , "#{ $archive_name } " ) ) )
10
12
if ( `which wget` . empty? )
11
13
# If we don't have wget. Error out.
12
- Kernel . abort ( "ERROR: archived_docs.basho.com.tar.bz2 was not found, " \
13
- "and this system doesn't have access to `wget`.\n " \
14
+ Kernel . abort ( "ERROR: #{ $archive_name } was not found, and this system " \
15
+ "does not have access to `wget`.\n " \
14
16
" Please either install `wget` and re-run this " \
15
17
"deploy, or manually download the file from the below " \
16
18
"address and place it into this directory.\n " \
17
- " http://s3.amazonaws.com/downloads.basho.com/documentation_content/archived_docs.basho.com.tar.bz2 " )
19
+ " http://s3.amazonaws.com/downloads.basho.com/documentation_content/#{ $archive_name } " )
18
20
else
19
21
# We have wget, but not the file. Fetch it.
20
- puts ( " Using wget to fetch archived_docs.basho.com.tar.bz2 " \
22
+ puts ( " Using wget to fetch #{ $archive_name } " \
21
23
"(this may take some time)..." )
22
- successful = system ( 'wget http://s3.amazonaws.com/downloads.basho.com/documentation_content/archived_docs.basho.com.tar.bz2' )
24
+ successful = system ( "wget http://s3.amazonaws.com/downloads.basho.com/documentation_content/#{ $archive_name} " )
25
+
23
26
if ( not successful )
24
- Kernel . abort ( "ERROR: Failed to get archived_docs.basho.com.tar.bz2 \n " \
27
+ Kernel . abort ( "ERROR: Failed to get #{ $archive_name } \n " \
25
28
" Please download the file from the below " \
26
29
"address and copy it into this directory.\n " \
27
- " http://s3.amazonaws.com/downloads.basho.com/documentation_content/archived_docs.basho.com.tar.bz2 " )
30
+ " http://s3.amazonaws.com/downloads.basho.com/documentation_content/#{ $archive_name } " )
28
31
end
29
32
end
30
33
end
31
34
32
35
# Verify the file is correct via an md5sum, unless NO_CHECK has been set
33
36
if ( ENV [ 'NO_CHECK' ] == "True" )
34
- puts ( " Skipping archived_docs.basho.com.tar.bz2 sha1 check. Good luck." )
37
+ puts ( " Skipping #{ $archive_name } sha1 check. Good luck." )
35
38
else
36
39
if ( `which md5sum` . empty? )
37
40
# We don't have md5sum, and we want to perform a check. Error out.
38
41
Kernel . abort ( "ERROR: This system does not have `md5sum`, so the " \
39
- "contents of archived_docs.basho.com.tar.bz2 cannot be " \
40
- "verified.\n " \
42
+ "contents of #{ $archive_name} cannot be verified.\n " \
41
43
" Please install the md5sum tool (possibly named " \
42
44
"md5sha1sum).\n " \
43
45
" You may also re-run this script after running " \
44
46
"`export NO_CHECK=\" True\" `, but it is **highly " \
45
47
"recommended** that you install `md5sum` instead." )
46
48
end
47
- web_md5 = Net ::HTTP . get ( ' s3.amazonaws.com' , ' /downloads.basho.com/documentation_content/archived_docs.basho.com.tar.bz2. md5' ) . split ( " " ) [ 0 ]
48
- loc_md5 = `md5sum archived_docs.basho.com.tar.bz2 ` . split ( " " ) [ 0 ]
49
+ web_md5 = Net ::HTTP . get ( " s3.amazonaws.com" , " /downloads.basho.com/documentation_content/#{ $archive_name } . md5" ) . split ( " " ) [ 0 ]
50
+ loc_md5 = `md5sum #{ $archive_name } ` . split ( " " ) [ 0 ]
49
51
if ( web_md5 != loc_md5 )
50
- Kernel . abort ( "ERROR: Fetch archived_docs.basho.com.tar.bz2 does not " \
51
- "match the expected md5sum.\n " \
52
- " Please remove the current " \
53
- "archived_docs.basho.com.tar.bz2, reset the contents of " \
54
- "the static/ directory (`git clean -xdf static; git " \
55
- "checkout -f static`), and re-run this script." )
52
+ Kernel . abort ( "ERROR: Fetched #{ $archive_name} does not match the " \
53
+ "expected md5sum.\n " \
54
+ " Please remove the current #{ $archive_name} , reset " \
55
+ "the contents of the static/ directory (`git clean -xdf " \
56
+ "static; git checkout -f static`), and re-run this script." )
56
57
end
57
58
end
58
59
@@ -71,13 +72,11 @@ def do_fetch_archived_content()
71
72
( not File . exist? ( "static/shared" ) ) )
72
73
73
74
if ( should_extract )
74
- puts ( "Extracting archived_docs.basho.com.tar.bz2 (this may take a lot " \
75
- "of time)..." )
76
- successful = system ( 'tar -xjf archived_docs.basho.com.tar.bz2 -C static' )
75
+ puts ( "Extracting #{ $archive_name} (this may take a lot of time)..." )
76
+ successful = system ( "tar -xjf #{ $archive_name} -C static" )
77
77
78
78
if ( not successful )
79
- Kernel . abort ( "ERROR: archived_docs.basho.com.tar.bz2 failed to " \
80
- "extract.\n " \
79
+ Kernel . abort ( "ERROR: #{ $archive_name} failed to extract.\n " \
81
80
" I... actually don't know why. Not sure how to " \
82
81
"extract error messages from this system call." )
83
82
end
0 commit comments