diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 572ab66c5708b1e5579b33c84cb7d1e3b74f43b2..0bc7e4066f3eb19de26e84d7fb5445e55a251754 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -18,6 +18,13 @@ variables:
       For instance use `*` to process all updates, `dash` to only process `pkg/dash`.
       Leave it empty to not trigger any update.
     value: ""
+  SKIP_OBS:
+    description: |
+      The OBS 2.10 update made things sensibly slower, causing timeouts in the
+      packaging-data-fetch-obs job. Disable it for now.
+      See https://phabricator.apertis.org/T8880
+    value: "yes"
+
 
 stages:
   - lint
@@ -194,7 +201,7 @@ packaging-data-fetch-binaries-published:
 
 packaging-data-fetch-obs:
   stage: fetch
-  timeout: 1h 30m
+  timeout: 3h
   tags:
     - lightweight
   before_script:
@@ -214,6 +221,8 @@ packaging-data-fetch-obs:
     paths:
       - packaging-data-obs.yaml
   rules:
+    - if: $SKIP_OBS == "yes"
+      when: never
     - if: $TRIGGER_FROM_JOB
       when: never
     - if: $CI_PIPELINE_SOURCE != "merge_request_event"
@@ -249,7 +258,7 @@ storage-usage:
   artifacts:
     paths:
       - storage.yaml
-  timeout: 1h 30m
+  timeout: 3h
   rules:
     - if: $TRIGGER_FROM_JOB
       when: never
@@ -263,6 +272,8 @@ packaging-check-invariants:
         python3-gitlab
         python3-yaml
   script:
+    - |
+      test "$SKIP_OBS" = yes && echo '{}' > packaging-data-obs.yaml
     - ./bin/yaml-merge
         --input packaging-data-downstream.yaml
         --input packaging-data-sources-upstream.yaml
diff --git a/bin/packaging-check-invariants b/bin/packaging-check-invariants
index 2ff49e0a16334291d85bb12dce27de3be66656f9..f8559599141765bd639b5af149d76b99d60d3f7c 100755
--- a/bin/packaging-check-invariants
+++ b/bin/packaging-check-invariants
@@ -2,6 +2,7 @@
 
 import argparse
 import logging
+import os
 
 import debian.debian_support
 import yaml
@@ -558,6 +559,10 @@ class Error(Exception):
     pass
 
 
+def hack_skip_obs():
+    return os.environ.get("SKIP_OBS") == "yes"
+
+
 def is_stable_channel(channel):
     if channel.endswith("pre"):
         return False
@@ -906,7 +911,7 @@ class InvariantChecker:
             logging.debug(
                 f"Checking if {package.name} branch {branch}/{source.component} is in {obsproject}"
             )
-            if obsproject not in package.get("obs", {}):
+            if obsproject not in package.get("obs", {}) and not hack_skip_obs():
                 self.error(
                     package.name,
                     Report.OBS_PACKAGE_MISSING_BUT_ON_APT,
diff --git a/storage_stats/src/client.rs b/storage_stats/src/client.rs
index e54adb241415f8e77af535e35bbf9803a45f339f..0809a53f8c9cd9a2bef388016e355f83b5fca928 100644
--- a/storage_stats/src/client.rs
+++ b/storage_stats/src/client.rs
@@ -54,7 +54,7 @@ impl Client {
 
         // NOTE: the URL should be printed since get() is instrumented.
         debug!("GET");
-        Ok(self.client.get(url.clone()).send().await?.bytes().await?)
+        Ok(self.client.get(url.clone()).send().await?.error_for_status()?.bytes().await?)
     }
 
     #[instrument(skip(self))]
diff --git a/storage_stats/src/repository.rs b/storage_stats/src/repository.rs
index 0990e7ced7b7dca16258f1e5a0eb5e62f3aeca64..cbd6a7d1d194c64f79c09a66daadb10df641180f 100644
--- a/storage_stats/src/repository.rs
+++ b/storage_stats/src/repository.rs
@@ -88,22 +88,31 @@ async fn read_deb822_file<
     download: F,
     client: &'client Client,
     url: &'url str,
-) -> Result<T> {
-    let bytes = download(client, url)
-        .await
-        .with_context(|| format!("Failed to download {}", url))?;
-
-    let file = tokio::task::block_in_place(|| {
-        rfc822_like::from_bytes(&bytes[..])
-            .with_context(|| format!("Failed to parse {}", url))
-    })?;
+) -> Result<Option<T>> {
+    match lift_404(
+        download(client, url)
+            .await
+            .with_context(|| format!("Failed to download {}", url)),
+    )? {
+        Some(bytes) => {
+            let file = tokio::task::block_in_place(|| {
+                rfc822_like::from_bytes(&bytes[..])
+                    .with_context(|| format!("Failed to parse {}", url))
+            })?;
+
+            debug!("Retrieved repo file");
+            Ok(Some(file))
+        }
 
-    debug!("Retrieved repo file");
-    Ok(file)
+        None => Ok(None),
+    }
 }
 
 impl Repository {
-    pub async fn read_release(&self, client: &Client) -> Result<RepositoryRelease> {
+    pub async fn read_release(
+        &self,
+        client: &Client,
+    ) -> Result<Option<RepositoryRelease>> {
         read_deb822_file(Client::get, client, &format!("{}/Release", &self.url)).await
     }
 
@@ -111,7 +120,7 @@ impl Repository {
         &self,
         client: &Client,
         component: &str,
-    ) -> Result<Vec<SourcePackage>> {
+    ) -> Result<Option<Vec<SourcePackage>>> {
         read_deb822_file(
             get_maybe_compressed_file,
             client,
@@ -125,7 +134,7 @@ impl Repository {
         client: &Client,
         component: &str,
         arch: &str,
-    ) -> Result<Vec<BinaryPackage>> {
+    ) -> Result<Option<Vec<BinaryPackage>>> {
         read_deb822_file(
             get_maybe_compressed_file,
             client,
diff --git a/storage_stats/src/stats.rs b/storage_stats/src/stats.rs
index abc05213b1f080fec10b93da7f37b786346d9b97..3231a93c603ec654a9af1517eefccff91336a8c2 100644
--- a/storage_stats/src/stats.rs
+++ b/storage_stats/src/stats.rs
@@ -10,7 +10,7 @@ use anyhow::{Context, Result};
 use futures::{future::try_join_all, stream, try_join, StreamExt, TryStreamExt};
 use serde_derive::Serialize;
 use std::sync::Arc;
-use tracing::{debug, info, instrument};
+use tracing::{debug, info, instrument, warn};
 
 async fn scan_source_packages(
     client: Client,
@@ -18,9 +18,23 @@ async fn scan_source_packages(
     component: String,
     agg: Arc<StorageUsageAggregator>,
 ) -> Result<()> {
-    for pkg in repo.read_sources(&client, &component).await? {
-        for file in pkg.files {
-            agg.add(&repo.id, format!("{}/{}", pkg.directory, file.filename), file.size);
+    match repo.read_sources(&client, &component).await? {
+        Some(pkgs) => {
+            for pkg in pkgs {
+                for file in pkg.files {
+                    agg.add(
+                        &repo.id,
+                        format!("{}/{}", pkg.directory, file.filename),
+                        file.size,
+                    );
+                }
+            }
+        }
+        None => {
+            warn!(
+                "Repository {}, component {} is missing its Sources file",
+                repo.id, component
+            );
         }
     }
 
@@ -34,8 +48,18 @@ async fn scan_binary_packages(
     arch: String,
     agg: Arc<StorageUsageAggregator>,
 ) -> Result<()> {
-    for pkg in repo.read_packages(&client, &component, &arch).await? {
-        agg.add(&repo.id, pkg.filename, pkg.size);
+    match repo.read_packages(&client, &component, &arch).await? {
+        Some(pkgs) => {
+            for pkg in pkgs {
+                agg.add(&repo.id, pkg.filename, pkg.size);
+            }
+        }
+        None => {
+            warn!(
+                "Repository {}, component {}, arch {} is missing its Packages file",
+                repo.id, component, arch
+            );
+        }
     }
 
     Ok(())
@@ -50,7 +74,14 @@ async fn aggregate_basic_stats(
 ) -> Result<()> {
     debug!("Scanning repo");
 
-    let release = repo.read_release(client).await?;
+    let release = match repo.read_release(client).await? {
+        Some(release) => release,
+        None => {
+            warn!("Repository {} is missing its Release file", repo.id);
+            return Ok(());
+        }
+    };
+
     let metadata_size: usize = release.files.iter().map(|entry| entry.size).sum();
 
     agg.register_key(repo.id.to_owned(), metadata_size);