|
6 | 6 | def do_fetch_archived_content()
|
7 | 7 | # Fetch and extract the archived content that we want to survive from the
|
8 | 8 | # Middleman website.
|
9 |
| - puts("Verifying archived content...") |
| 9 | + puts("Verifying the archived tarball is present and correct...") |
10 | 10 | # Verify that the tar.bz2 is present.
|
11 | 11 | if (not File.file?(File.join(Dir.pwd, "#{$archive_name}")))
|
12 | 12 | if (`which wget`.empty?)
|
@@ -57,32 +57,42 @@ def do_fetch_archived_content()
|
57 | 57 | end
|
58 | 58 | end
|
59 | 59 |
|
60 |
| - puts("Verifying archived content extraction...") |
61 |
| - puts(" Please note, this only checks for directories.\n"\ |
62 |
| - " If something went wrong with a previous extraction or if any "\ |
63 |
| - "of the extracted files were modified, please run \`git clean "\ |
64 |
| - "-xdf static/\` and re-run this deploy script.") |
65 |
| - #TODO: Consider if this is a good idea or not. I'm leaning towards not. |
66 |
| - should_extract = ( |
67 |
| - (not File.exist?("static/css/standalone")) || |
68 |
| - (not File.exist?("static/js/standalone")) || |
69 |
| - (not File.exist?("static/riak")) || |
70 |
| - (not File.exist?("static/riakcs")) || |
71 |
| - (not File.exist?("static/riakee")) || |
72 |
| - (not File.exist?("static/shared")) ) |
73 |
| - |
74 |
| - if (should_extract) |
| 60 | + puts("Verifying archived content extraction by checking direcotry tree...") |
| 61 | + all_dirs_present = ( (File.exist?("static/css/standalone")) && |
| 62 | + (File.exist?("static/js/standalone")) && |
| 63 | + (File.exist?("static/riak/1.4.12")) && |
| 64 | + (File.exist?("static/riakcs/1.5.4")) && |
| 65 | + (File.exist?("static/riakee")) && |
| 66 | + (File.exist?("static/shared")) ) |
| 67 | + any_dirs_present = ( (File.exist?("static/css/standalone")) || |
| 68 | + (File.exist?("static/js/standalone")) || |
| 69 | + (File.exist?("static/riak/1.4.12")) || |
| 70 | + (File.exist?("static/riakcs/1.5.4")) || |
| 71 | + (File.exist?("static/riakee")) || |
| 72 | + (File.exist?("static/shared")) ) |
| 73 | + if (not all_dirs_present and any_dirs_present) |
| 74 | + Kernel.abort("ERRPR: The static/ directory is verifiably corrupt.\n"\ |
| 75 | + " Please run \`git clean -xdf static/\` to clear out "\ |
| 76 | + "the malformed files, and re-run this deploy script.") |
| 77 | + elsif (not any_dirs_present) |
75 | 78 | puts("Extracting #{$archive_name} (this may take a lot of time)...")
|
76 | 79 | successful = system("tar -xjf #{$archive_name} -C static")
|
77 |
| - |
78 | 80 | if (not successful)
|
79 | 81 | Kernel.abort("ERROR: #{$archive_name} failed to extract.\n"\
|
80 |
| - " I... actually don't know why. Not sure how to "\ |
81 |
| - "extract error messages from this system call.") |
| 82 | + " The failure message should have been printed to "\ |
| 83 | + "stdout and be visible above.") |
82 | 84 | end
|
| 85 | + else |
| 86 | + puts(" Archived content directory tree verified.\n"\ |
| 87 | + " NOTE: File integrity is NOT checked here.\n"\ |
| 88 | + " As such, it is advisable to periodically clean out the "\ |
| 89 | + "static/ directory that this archive is extracted into.\n"\ |
| 90 | + " To do so, please run \`git clean -xdf static/\`, and "\ |
| 91 | + "re-run this deploy script.") |
83 | 92 | end
|
84 | 93 | end
|
85 | 94 |
|
| 95 | + |
86 | 96 | # Once the Hugo site has been fully and correctly generated, we can upload the
|
87 | 97 | # updated and new -- and delete the no longer generated -- files to/from our S3
|
88 | 98 | # bucket, and send out CloudFront invalidation requests to propagate those
|
|
0 commit comments