Skip to content
Snippets Groups Projects
Commit dd3e81a9 authored by Emanuele Aina's avatar Emanuele Aina
Browse files

fetch-downstream: Drop check_duplicates()


Since we moved all git repositories directly under a single `pkg/` group
instead of the `pkg/$component/` nested categories, checking for
duplicates is no longer relevant.

Signed-off-by: Emanuele Aina's avatarEmanuele Aina <emanuele.aina@collabora.com>
parent 68f398d1
No related branches found
No related tags found
1 merge request!75fetch-downstream: Small improvements
......@@ -4,7 +4,6 @@ from __future__ import annotations
import argparse
import fnmatch
import itertools
import logging
import urllib.parse
......@@ -192,21 +191,6 @@ class DownstreamFetcher:
thread_pool(num_worker_threads, _fetch_license_report, projects, num_retries=2)
def check_duplicates(projects, data):
def packagenamefunc(p):
return p.packagename
for packagename, projects in itertools.groupby(
sorted(projects, key=packagenamefunc), packagenamefunc
):
paths = [p.path_with_namespace for p in projects]
if len(paths) == 1:
continue
msg = f"Project name '{packagename}' is ambiguous across projects: {', '.join(paths)}"
logging.error(msg)
data["packages"][packagename].setdefault("errors", []).append({"msg": msg})
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Fetch data from the GitLab packaging projects"
......@@ -270,6 +254,4 @@ if __name__ == "__main__":
"channels": d.channels,
}
check_duplicates(d.projects, data)
yaml.dump(data, args.yaml, width=120, Dumper=yaml.CSafeDumper)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment