@@ -116,6 +116,129 @@ test_backend "webdav" \
116116 " SCCACHE_WEBDAV_USERNAME=bar" \
117117 " SCCACHE_WEBDAV_PASSWORD=baz"
118118
119+ # Function to test basedirs with multi-level cache (disk + remote)
120+ # Tests that basedirs normalization works across cache levels and backfill
121+ test_multilevel_backend () {
122+ local backend_name=" $1 "
123+ local level_name=" $backend_name "
124+ if [ " $backend_name " = " azblob" ]; then
125+ level_name=" azure"
126+ fi
127+ shift
128+ cp -r /sccache/tests/integration/basedirs-autotools /build/dir1
129+ cp -r /sccache/tests/integration/basedirs-autotools /build/dir2
130+
131+ echo " "
132+ echo " =========================================="
133+ echo " Testing multilevel basedirs: disk + $backend_name "
134+ echo " =========================================="
135+
136+ # Stop any running sccache server
137+ " $SCCACHE " --stop-server 2> /dev/null || true
138+
139+ # Set backend-specific environment variables (passed as arguments)
140+ for env_var in " $@ " ; do
141+ export " ${env_var?} "
142+ done
143+
144+ # Configure basedirs and multi-level cache
145+ export SCCACHE_BASEDIRS=" /build/dir1:/build/dir2"
146+ export SCCACHE_MULTILEVEL_CHAIN=" disk,$level_name "
147+ export SCCACHE_DIR=" /build/sccache-ml-basedirs"
148+ rm -rf /build/sccache-ml-basedirs
149+ mkdir -p /build/sccache-ml-basedirs
150+
151+ # Start sccache server
152+ " $SCCACHE " --start-server
153+
154+ # Verify multi-level is active
155+ STATS_JSON=$( " $SCCACHE " --show-stats --stats-format=json)
156+ CACHE_LOCATION=$( echo " $STATS_JSON " | python3 -c " import sys, json; print(json.load(sys.stdin).get('cache_location', ''))" || echo " unknown" )
157+ echo " Cache location: $CACHE_LOCATION "
158+
159+ if ! echo " $CACHE_LOCATION " | grep -qi " Multi-level" ; then
160+ echo " ✗ FAIL: Multi-level cache not detected in cache_location: $CACHE_LOCATION "
161+ exit 1
162+ fi
163+
164+ echo " Test 1: Compile from first directory (cache miss, populates L0 disk + L1 $backend_name )"
165+ autotools /build/dir1
166+
167+ STATS_JSON=$( " $SCCACHE " --show-stats --stats-format=json)
168+ FIRST_MISSES=$( echo " $STATS_JSON " | python3 -c " import sys, json; stats = json.load(sys.stdin).get('stats', {}); print(stats.get('cache_misses', {}).get('counts', {}).get('C/C++', 0))" )
169+ echo " Cache misses after first build: $FIRST_MISSES "
170+
171+ echo " "
172+ echo " Test 2: Compile from second directory (cache hit expected via basedirs)"
173+ autotools /build/dir2
174+
175+ STATS_JSON=$( " $SCCACHE " --show-stats --stats-format=json)
176+ CACHE_HITS=$( echo " $STATS_JSON " | python3 -c " import sys, json; stats = json.load(sys.stdin).get('stats', {}); print(stats.get('cache_hits', {}).get('counts', {}).get('C/C++', 0))" )
177+ SECOND_MISSES=$( echo " $STATS_JSON " | python3 -c " import sys, json; stats = json.load(sys.stdin).get('stats', {}); print(stats.get('cache_misses', {}).get('counts', {}).get('C/C++', 0))" )
178+ echo " Cache hits: $CACHE_HITS , misses: $SECOND_MISSES (first build: $FIRST_MISSES )"
179+
180+ if [ " $FIRST_MISSES " != " $SECOND_MISSES " ]; then
181+ echo " ✗ FAIL: multilevel disk+$backend_name - Cache misses increased from $FIRST_MISSES to $SECOND_MISSES "
182+ echo " $STATS_JSON " | python3 -m json.tool
183+ exit 1
184+ fi
185+
186+ echo " "
187+ echo " Test 3: Clear L0 (disk), rebuild from dir1 (should hit L1 $backend_name and backfill)"
188+ " $SCCACHE " --stop-server 2> /dev/null || true
189+ rm -rf /build/sccache-ml-basedirs
190+ mkdir -p /build/sccache-ml-basedirs
191+ rm -rf /build/dir1
192+ cp -r /sccache/tests/integration/basedirs-autotools /build/dir1
193+ " $SCCACHE " --start-server
194+
195+ autotools /build/dir1
196+
197+ STATS_JSON=$( " $SCCACHE " --show-stats --stats-format=json)
198+ THIRD_MISSES=$( echo " $STATS_JSON " | python3 -c " import sys, json; stats = json.load(sys.stdin).get('stats', {}); print(stats.get('cache_misses', {}).get('counts', {}).get('C/C++', 0))" )
199+ echo " Cache misses after L0 clear and rebuild: $THIRD_MISSES (should be 0)"
200+
201+ if [ " $THIRD_MISSES " -gt 0 ]; then
202+ echo " ✗ FAIL: multilevel disk+$backend_name - Misses after L0 clear ($THIRD_MISSES ), L1 should have served data"
203+ echo " $STATS_JSON " | python3 -m json.tool
204+ exit 1
205+ fi
206+
207+ # Give backfill time to complete
208+ sleep 2
209+
210+ echo " "
211+ echo " Test 4: Rebuild from dir2 (should hit backfilled L0 via basedirs)"
212+ rm -rf /build/dir2
213+ cp -r /sccache/tests/integration/basedirs-autotools /build/dir2
214+ autotools /build/dir2
215+
216+ STATS_JSON=$( " $SCCACHE " --show-stats --stats-format=json)
217+ FOURTH_MISSES=$( echo " $STATS_JSON " | python3 -c " import sys, json; stats = json.load(sys.stdin).get('stats', {}); print(stats.get('cache_misses', {}).get('counts', {}).get('C/C++', 0))" )
218+
219+ if [ " $FOURTH_MISSES " -gt 0 ]; then
220+ echo " ✗ FAIL: multilevel disk+$backend_name - Misses on build 4, basedirs + backfill should provide hits"
221+ echo " $STATS_JSON " | python3 -m json.tool
222+ exit 1
223+ fi
224+
225+ echo " ✓ PASS: multilevel disk+$backend_name - Basedirs + multilevel + backfill all working"
226+
227+ # Clean up for next test
228+ rm -rf /build/dir1 /build/dir2 /build/sccache-ml-basedirs
229+ " $SCCACHE " --stop-server & > /dev/null || true
230+
231+ # Unset environment variables
232+ for env_var in " $@ " ; do
233+ VAR_NAME=" ${env_var%% =* } "
234+ unset " $VAR_NAME "
235+ done
236+ unset SCCACHE_BASEDIRS SCCACHE_MULTILEVEL_CHAIN SCCACHE_DIR
237+ }
238+
239+ # Test multilevel basedirs with redis
240+ test_multilevel_backend " redis" " SCCACHE_REDIS_ENDPOINT=tcp://redis:6379"
241+
119242echo " "
120243echo " =========================================="
121244echo " All basedir tests completed successfully!"
0 commit comments