repos / dbin

📦 Poor man's package manager.
git clone https://github.com/xplshn/dbin.git

dbin / misc / cmd / dbinRepoIndexGenerators / 1.6
xplshn  ·  2025-08-29

generator.go

Go
  1// TODO: Add blessed selection, a cherry-picked repo made up of programs from autogenerated repos like pkgforge-go/pkgforge-cargo, this one would be included in the default repo index _AND_ as standalone
  2//   - We'll have a PickFrom() function that accepts a repository index (DbinMetadata) and a .Pkg and .PkgId, it uses these two to get us the item we want. We'll use this in our blessed repo.
  3package main
  4
  5import (
  6	"fmt"
  7	"io"
  8	"net/http"
  9	"os"
 10	"path/filepath"
 11	"sort"
 12	"strconv"
 13	"strings"
 14
 15	"github.com/fxamacker/cbor/v2"
 16	"github.com/goccy/go-json"
 17	"github.com/klauspost/compress/zstd"
 18	minify "github.com/tdewolff/minify/v2"
 19	mjson "github.com/tdewolff/minify/v2/json"
 20	"github.com/tiendc/go-deepcopy"
 21)
 22
 23const (
 24	colorRed    = "\033[31m"
 25	colorYellow = "\033[33m"
 26	colorReset  = "\033[0m"
 27)
 28
 29type repository struct {
 30	URLs       []string
 31	Name       string
 32	Standalone bool
 33	Single     bool
 34	Filter     func(*[]DbinItem)
 35}
 36
 37type PkgForgeItem struct {
 38	Pkg         string   `json:"pkg"`
 39	Name        string   `json:"pkg_name,omitempty"`
 40	Family      string   `json:"pkg_family,omitempty"`
 41	PkgId       string   `json:"pkg_id,omitempty"`
 42	AppId       string   `json:"app_id,omitempty"`
 43	PkgType     string   `json:"pkg_type,omitempty"`
 44	Icon        string   `json:"icon,omitempty"`
 45	Description string   `json:"description,omitempty"`
 46	Maintainers []string `json:"Maintainer,omitempty"`
 47	Screenshots []string `json:"screenshots,omitempty"`
 48	WebURLs     []string `json:"homepage,omitempty"`
 49	Version     string   `json:"version,omitempty"`
 50	DownloadURL string   `json:"download_url,omitempty"`
 51	Size        string   `json:"size,omitempty"`
 52	Bsum        string   `json:"bsum,omitempty"`
 53	Shasum      string   `json:"shasum,omitempty"`
 54	BuildDate   string   `json:"build_date,omitempty"`
 55	SrcURLs     []string `json:"src_url,omitempty"`
 56	BuildScript string   `json:"build_script,omitempty"`
 57	BuildLog    string   `json:"build_log,omitempty"`
 58	Category    []string `json:"categories,omitempty"`
 59	Snapshots   []string `json:"snapshots,omitempty"`
 60	Provides    []string `json:"provides,omitempty"`
 61	Notes       []string `json:"note,omitempty"`
 62	License     []string `json:"license,omitempty"`
 63	GhcrPkg     string   `json:"ghcr_pkg,omitempty"`
 64	HfPkg       string   `json:"hf_pkg,omitempty"`
 65	Rank        string   `json:"rank,omitempty"`
 66	WebManifest string   `json:"pkg_webpage,omitempty"`
 67}
 68
 69type snapshot struct {
 70	Commit  string `json:"commit,omitempty"`
 71	Version string `json:"version,omitempty"`
 72}
 73
 74type DbinItem struct {
 75	Pkg             string     `json:"pkg,omitempty"`
 76	Name            string     `json:"pkg_name,omitempty"`
 77	PkgId           string     `json:"pkg_id,omitempty"`
 78	AppstreamId     string     `json:"app_id,omitempty"`
 79	Icon            string     `json:"icon,omitempty"`
 80	Description     string     `json:"description,omitempty"`
 81	LongDescription string     `json:"description_long,omitempty"`
 82	Screenshots     []string   `json:"screenshots,omitempty"`
 83	Version         string     `json:"version,omitempty"`
 84	DownloadURL     string     `json:"download_url,omitempty"`
 85	Size            string     `json:"size,omitempty"`
 86	Bsum            string     `json:"bsum,omitempty"`
 87	Shasum          string     `json:"shasum,omitempty"`
 88	BuildDate       string     `json:"build_date,omitempty"`
 89	SrcURLs         []string   `json:"src_urls,omitempty"`
 90	WebURLs         []string   `json:"web_urls,omitempty"`
 91	BuildScript     string     `json:"build_script,omitempty"`
 92	BuildLog        string     `json:"build_log,omitempty"`
 93	Categories      string     `json:"categories,omitempty"`
 94	Snapshots       []snapshot `json:"snapshots,omitempty"`
 95	Provides        string     `json:"provides,omitempty"`
 96	License         []string   `json:"license,omitempty"`
 97	Maintainers     string     `json:"maintainers,omitempty"`
 98	Notes           []string   `json:"notes,omitempty"`
 99	Appstream       string     `json:"appstream,omitempty"`
100	Rank            uint       `json:"rank,omitempty"`
101	WebManifest     string     `json:"web_manifest,omitempty"`
102}
103
104type DbinMetadata map[string][]DbinItem
105
106type RepositoryHandler interface {
107	FetchMetadata(urls []string, arch string) ([]DbinItem, error)
108}
109
110type PkgForgeHandler struct{}
111
112func (PkgForgeHandler) FetchMetadata(urls []string, arch string) ([]DbinItem, error) {
113	return fetchAndConvertMetadata(urls, arch, downloadJSON, convertPkgForgeToDbinItem)
114}
115
116type DbinHandler struct{}
117
118func (DbinHandler) FetchMetadata(urls []string, arch string) ([]DbinItem, error) {
119	var lastErr error
120
121	for i, urlTemplate := range urls {
122		url := urlTemplate
123		if strings.Contains(url, "%s") {
124			url = fmt.Sprintf(url, arch)
125		}
126
127		if i > 0 {
128			fmt.Printf("Using fallback URL: %s\n", url)
129		}
130
131		resp, err := http.Get(url)
132		if err != nil {
133			lastErr = err
134			continue
135		}
136		defer resp.Body.Close()
137
138		body, err := io.ReadAll(resp.Body)
139		if err != nil {
140			lastErr = err
141			continue
142		}
143
144		var metadata DbinMetadata
145		err = json.Unmarshal(body, &metadata)
146		if err != nil {
147			lastErr = err
148			continue
149		}
150
151		// Since the metadata is already in Dbin format, we just need to extract the items
152		for _, items := range metadata {
153			return items, nil
154		}
155	}
156
157	return nil, lastErr
158}
159
160type AppStreamMetadata struct {
161	AppId           string   `json:"app_id"`
162	Categories      string   `json:"categories"`
163	RichDescription string   `json:"rich_description"`
164	Icons           []string `json:"icons"`
165	Screenshots     []string `json:"screenshots"`
166}
167
168var appStreamMetadata []AppStreamMetadata
169var appStreamMetadataLoaded bool
170
171func loadAppStreamMetadata() error {
172	if appStreamMetadataLoaded {
173		return nil
174	}
175
176	//resp, err := http.Get("https://d.xplshn.com.ar/misc/cmd/flatpakAppStreamScrapper/appstream_metadata.cbor")
177	resp, err := http.Get("https://github.com/xplshn/dbin-metadata/raw/refs/heads/master/misc/cmd/flatpakAppStreamScrapper/appstream_metadata.cbor.zst")
178	if err != nil {
179		return fmt.Errorf("failed to fetch Flathub AppStream metadata: %v", err)
180	}
181	defer resp.Body.Close()
182
183	body, err := io.ReadAll(resp.Body)
184	if err != nil {
185		return fmt.Errorf("failed to read response body: %v", err)
186	}
187
188	zstdReader, err := zstd.NewReader(nil, zstd.WithDecoderConcurrency(1))
189	if err != nil {
190		return fmt.Errorf("failed to create zstd reader: %v", err)
191	}
192	defer zstdReader.Close()
193
194	decompressed, err := zstdReader.DecodeAll(body, nil)
195	if err != nil {
196		return fmt.Errorf("failed to decompress data: %v", err)
197	}
198
199	err = cbor.Unmarshal(decompressed, &appStreamMetadata)
200	if err != nil {
201		return fmt.Errorf("failed to unmarshal Flathub AppStream metadata: %v", err)
202	}
203
204	appStreamMetadataLoaded = true
205	return nil
206}
207
208func updateItemWithAppStreamMetadata(item *DbinItem) {
209	if item.AppstreamId == "" {
210		return
211	}
212
213	for _, metadata := range appStreamMetadata {
214		if metadata.AppId == item.AppstreamId {
215			if len(metadata.Icons) > 0 {
216				item.Icon = metadata.Icons[0]
217			}
218			if len(metadata.Screenshots) > 0 {
219				item.Screenshots = metadata.Screenshots
220			}
221			if metadata.Categories != "" {
222				item.Categories = metadata.Categories
223			}
224			if metadata.RichDescription != "" {
225				item.LongDescription = metadata.RichDescription
226			}
227			break
228		}
229	}
230}
231
232func fetchAndConvertMetadata(urls []string, arch string, downloadFunc func([]string, string) ([]PkgForgeItem, error), convertFunc func(PkgForgeItem, map[string]bool) (DbinItem, bool)) ([]DbinItem, error) {
233	items, err := downloadFunc(urls, arch)
234	if err != nil {
235		return nil, err
236	}
237
238	familyCount := make(map[string]int)
239	familyNames := make(map[string]string)
240	useFamilyFormat := make(map[string]bool)
241
242	for _, item := range items {
243		familyCount[item.Family]++
244		if familyNames[item.Family] == "" {
245			familyNames[item.Family] = item.Name
246		} else if familyNames[item.Family] != item.Name {
247			useFamilyFormat[item.Family] = true
248		}
249	}
250
251	var dbinItems []DbinItem
252	for _, item := range items {
253		dbinItem, include := convertFunc(item, useFamilyFormat)
254		if include {
255			updateItemWithAppStreamMetadata(&dbinItem)
256			dbinItems = append(dbinItems, dbinItem)
257		}
258	}
259
260	return dbinItems, nil
261}
262
263func convertPkgForgeToDbinItem(item PkgForgeItem, useFamilyFormat map[string]bool) (DbinItem, bool) {
264	// PkgTypes we discard, completely
265	if item.PkgType == "dynamic" {
266		return DbinItem{}, false
267	}
268
269	var categories, provides, maintainers, downloadURL string
270
271	if len(item.Category) > 0 {
272		categories = strings.Join(item.Category, ",")
273	}
274
275	if len(item.Provides) > 0 {
276		provides = strings.Join(item.Provides, ",")
277	}
278
279	if len(item.Maintainers) > 0 {
280		maintainers = strings.Join(item.Maintainers, ",")
281	}
282
283	if item.GhcrPkg != "" {
284		downloadURL = "oci://" + item.GhcrPkg
285	} else if item.HfPkg != "" {
286		downloadURL = strings.Replace(item.HfPkg, "/tree/main", "/resolve/main", 1) + "/" + item.Pkg
287	} else if item.DownloadURL != "" {
288		downloadURL = item.DownloadURL
289	}
290
291	rank, _ := strconv.Atoi(item.Rank)
292
293	// Parse snapshots
294	var snapshots []snapshot
295	for _, snapshotStr := range item.Snapshots {
296		parts := strings.Split(snapshotStr, "[")
297		commit := strings.TrimSpace(parts[0])
298		version := ""
299		if len(parts) > 1 {
300			version = strings.TrimSuffix(parts[1], "]")
301		}
302		snapshots = append(snapshots, snapshot{Commit: commit, Version: version})
303	}
304
305	// Would love to do this: but Snapshots doesn't update as often as it should for this to be feasible
306	//if strings.HasPrefix(item.Version, "HEAD-") && len(snapshots) >= 1 {
307	//	if snapshots[0].Version != "" {
308	//		item.Version = snapshots[0].Version
309	//	}
310	//}
311
312	// - Determine the package name format
313	//   | - If all packages in a family have the same name (e.g., "bwrap" in the "bubblewrap" family),
314	//   |   the package name will be just the package name (e.g., "bwrap").
315	//   | - If there are multiple packages with different names in a family, the format will be
316	//   |   "family/package_name" (e.g., "a-utils/ccat").
317	// - Applies to all occurrences
318	pkgName := item.Name
319	if item.Family != "" && useFamilyFormat[item.Family] {
320		pkgName = fmt.Sprintf("%s/%s", item.Family, item.Name)
321	}
322
323	if item.PkgType == "static" {
324		pkgName = strings.TrimSuffix(pkgName, ".static")
325	} else if item.PkgType == "archive" {
326		pkgName = strings.TrimSuffix(pkgName, ".archive")
327	} else if item.PkgType != "" {
328		pkgName = pkgName + "." + item.PkgType
329	}
330
331	item.Pkg = strings.TrimPrefix(item.Pkg, "/")
332
333	if areSlicesEqual(item.SrcURLs, item.WebURLs) {
334		item.WebURLs = []string{}
335	}
336
337	return DbinItem{
338		Pkg:         pkgName,
339		Name:        item.Name,
340		PkgId:       item.PkgId,
341		AppstreamId: item.AppId,
342		Icon:        item.Icon,
343		Screenshots: item.Screenshots,
344		Description: item.Description,
345		Version:     item.Version,
346		DownloadURL: downloadURL,
347		Size:        item.Size,
348		Bsum:        item.Bsum,
349		Shasum:      item.Shasum,
350		BuildDate:   item.BuildDate,
351		SrcURLs:     item.SrcURLs,
352		WebURLs:     item.WebURLs,
353		BuildScript: item.BuildScript,
354		BuildLog:    item.BuildLog,
355		Categories:  categories,
356		Snapshots:   snapshots,
357		Provides:    provides,
358		License:     item.License,
359		Maintainers: maintainers,
360		Notes:       item.Notes,
361		Rank:        uint(rank),
362		WebManifest: item.WebManifest,
363	}, true
364}
365
366func downloadJSON(urls []string, arch string) ([]PkgForgeItem, error) {
367	var lastErr error
368
369	for i, urlTemplate := range urls {
370		url := urlTemplate
371		if strings.Contains(url, "%s") {
372			url = fmt.Sprintf(url, arch)
373		}
374
375		if i > 0 {
376			fmt.Printf("Using fallback URL: %s\n", url)
377		}
378
379		resp, err := http.Get(url)
380		if err != nil {
381			lastErr = err
382			continue
383		}
384		defer resp.Body.Close()
385
386		body, err := io.ReadAll(resp.Body)
387		if err != nil {
388			lastErr = err
389			continue
390		}
391
392		var items []PkgForgeItem
393		err = json.Unmarshal(body, &items)
394		if err != nil {
395			lastErr = err
396			continue
397		}
398
399		return items, nil
400	}
401
402	return nil, lastErr
403}
404
405func reorderItems(str []map[string]string, metadata DbinMetadata) {
406	for _, replacements := range str {
407		for repo, items := range metadata {
408			// Replace str with str2
409			for oldStr, newStr := range replacements {
410				for i := range items {
411					items[i].PkgId = strings.ReplaceAll(items[i].PkgId, oldStr, newStr)
412				}
413			}
414
415			// Sort items alphabetically by BinId
416			sort.Slice(items, func(i, j int) bool {
417				return items[i].PkgId < items[j].PkgId
418			})
419
420			// Replace str2 back to str
421			for oldStr, newStr := range replacements {
422				for i := range items {
423					items[i].PkgId = strings.ReplaceAll(items[i].PkgId, newStr, oldStr)
424				}
425			}
426
427			metadata[repo] = items
428		}
429	}
430}
431
432func saveAll(filename string, metadata DbinMetadata) error {
433	if err := saveJSON(filename, metadata); err != nil {
434		return err
435	}
436	return saveCBOR(filename, metadata)
437}
438
439func saveMetadata(filename string, metadata DbinMetadata) error {
440	// Reorder items alphabetically but with priority exceptions, to ensure a higher level of quality.
441	// We basically do a search&replace, order alphabetically, and then do a search&replace again.
442	// I prioritize binaries with a smaller size, more hardware compat, and that are truly static.
443	reorderItems([]map[string]string{
444		{"musl": "0000_"},    // | Higher priority for Musl
445		{"ppkg": "0020__"},   // | Higher priority for ppkg
446		{"glibc": "0040___"}, // | Push glibc to the end
447		// -					 // | - Little Glenda says hi!
448		// -      				 // |   (\(\
449		{"musl-v3": "0080_"},    // |   ¸". ..
450		{"glibc-v3": "0100___"}, // |   (  . .)
451		// -    				 // |   |   ° ¡
452		{"musl-v4": "0200_"},    // |   ¿     ;
453		{"glibc-v4": "0400___"}, // |  c?".UJ"
454	}, metadata)
455
456	if err := saveAll(filename, metadata); err != nil {
457		return err
458	}
459
460	// "web" version
461	var webMetadata DbinMetadata
462	_ = deepcopy.Copy(&webMetadata, &metadata)
463	for _, items := range webMetadata {
464		for i := range items {
465			items[i].Provides = ""
466			items[i].Shasum = ""
467			items[i].Bsum = ""
468			items[i].WebManifest = ""
469		}
470	}
471	saveAll(filename+".web", webMetadata)
472	// "nlite" version
473	for _, items := range metadata {
474		for i := range items {
475			items[i].Icon = ""
476			items[i].Provides = ""
477			items[i].Shasum = ""
478			items[i].AppstreamId = ""
479			items[i].LongDescription = ""
480			items[i].Screenshots = []string{}
481		}
482	}
483	saveAll(filename+".nlite", metadata)
484	// "lite" version
485	for _, items := range metadata {
486		for i := range items {
487			items[i].Icon = ""
488			items[i].Provides = ""
489			items[i].Shasum = ""
490			items[i].AppstreamId = ""
491			items[i].LongDescription = ""
492			items[i].WebManifest = ""
493			items[i].Screenshots = []string{}
494		}
495	}
496	return saveAll(filename+".lite", metadata)
497}
498
499func saveCBOR(filename string, metadata DbinMetadata) error {
500	cborData, err := cbor.Marshal(metadata)
501	if err != nil {
502		return err
503	}
504	return os.WriteFile(filename+".cbor", cborData, 0644)
505}
506func saveJSON(filename string, metadata DbinMetadata) error {
507	jsonData, err := json.MarshalIndent(metadata, "", " ")
508	if err != nil {
509		return err
510	}
511	if err := os.WriteFile(filename+".json", jsonData, 0644); err != nil {
512		return err
513	}
514	// Minify JSON
515	m := minify.New()
516	m.AddFunc("application/json", mjson.Minify)
517	if jsonData, err = m.Bytes("application/json", jsonData); err != nil {
518		return err
519	} else if err := os.WriteFile(filename+".min.json", jsonData, 0644); err != nil {
520		return err
521	}
522	return nil
523}
524
525func main() {
526	// Load AppStream metadata once at startup
527	if err := loadAppStreamMetadata(); err != nil {
528		fmt.Printf("%serror:%s Error loading AppStream metadata: %v\n", colorRed, colorReset, err)
529	}
530
531	realArchs := map[string]string{
532		"x86_64-Linux":  "amd64_linux",
533		"aarch64-Linux": "arm64_linux",
534		"riscv64-Linux": "riscv64_linux",
535		"loong64-Linux": "loongarch64_linux",
536	}
537
538	// At least the amd64 repo should have succeeded in order for the fetch failure
539	// of a repo for a specifc arch to be considered a warning instead of an error.
540	amd64Success := false
541
542	repositories := []struct {
543		Repo    repository
544		Handler RepositoryHandler
545	}{
546		{
547			Repo: repository{
548				Name: "bincache",
549				URLs: []string{
550					"https://github.com/pkgforge/metadata/raw/refs/heads/main/bincache/data/%s.json",
551					"https://meta.pkgforge.dev/bincache/%s.json",
552				},
553				Single: true,
554			},
555			Handler: PkgForgeHandler{},
556		},
557		{
558			Repo: repository{
559				Name: "pkgcache",
560				URLs: []string{
561					"https://github.com/pkgforge/metadata/raw/refs/heads/main/pkgcache/data/%s.json",
562					"https://meta.pkgforge.dev/pkgcache/%s.json",
563				},
564				Single: true,
565				Filter: func(items *[]DbinItem) {
566					var filteredItems []DbinItem
567					for _, item := range *items {
568						hasPortableNote := false
569						for _, note := range item.Notes {
570							if strings.Contains(note, "[PORTABLE]") {
571								hasPortableNote = true
572								break
573							}
574						}
575						if hasPortableNote {
576							filteredItems = append(filteredItems, item)
577						}
578					}
579					*items = filteredItems
580				},
581			},
582			Handler: PkgForgeHandler{},
583		},
584		{
585			Repo: repository{
586				Name: "pkgforge-go",
587				URLs: []string{
588					"https://github.com/pkgforge-go/builder/raw/refs/heads/main/data/%s.json",
589					"https://meta.pkgforge.dev/external/pkgforge-go/%s.json",
590				},
591				Standalone: true,
592				Filter: func(items *[]DbinItem) {
593					var filteredItems []DbinItem
594					for _, item := range *items {
595						//if !strings.Contains(item.Description, "bindings") && !strings.Contains(item.Description, "key") {
596						//	filteredItems = append(filteredItems, item)
597						//} /* else {
598						//	fmt.Printf("[pkgforge-go]: repo filter: %s#%s contains bad word (%s)", item.Name, item.PkgId, "bindings")
599						//} */
600						item.PkgId = strings.Replace(item.PkgId, "#", ".", -1)
601						if item.Description != "No Description Provided" {
602							filteredItems = append(filteredItems, item)
603						}
604					}
605					*items = filteredItems
606				},
607			},
608			Handler: PkgForgeHandler{},
609		},
610		{
611			Repo: repository{
612				Name: "pkgforge-cargo",
613				URLs: []string{
614					"https://raw.githubusercontent.com/pkgforge-cargo/builder/refs/heads/main/data/x86_64-Linux.json",
615					"https://meta.pkgforge.dev/external/pkgforge-cargo/%s.json",
616					"https://github.com/pkgforge-cargo/builder/raw/refs/heads/main/data/%s.json",
617				},
618				Standalone: true,
619			},
620			Handler: PkgForgeHandler{},
621		},
622		{
623			Repo: repository{
624				Name: "AM",
625				URLs: []string{
626					"https://github.com/pkgforge/metadata/raw/refs/heads/main/external/am/data/%s.json",
627					"https://meta.pkgforge.dev/external/am/%s.json",
628				},
629				Standalone: true,
630			},
631			Handler: PkgForgeHandler{},
632		},
633		{
634			Repo: repository{
635				Name: "appimage-github-io",
636				URLs: []string{
637					"https://github.com/pkgforge/metadata/raw/refs/heads/main/external/appimage.github.io/data/%s.json",
638					"https://meta.pkgforge.dev/external/appimage.github.io/%s.json",
639				},
640				Standalone: true,
641			},
642			Handler: PkgForgeHandler{},
643		},
644		{
645			Repo: repository{
646				Name: "AppBundleHUB",
647				URLs: []string{
648					"https://github.com/xplshn/AppBundleHUB/releases/download/latest_metadata/metadata_%s.json",
649				},
650				Single: true,
651			},
652			Handler: DbinHandler{},
653		},
654	}
655
656	for arch, outputArch := range realArchs {
657		dbinMetadata := make(DbinMetadata)
658		archSuccess := false // Track success for the current architecture
659
660		for _, repo := range repositories {
661			items, err := repo.Handler.FetchMetadata(repo.Repo.URLs, arch)
662			if err != nil {
663				// If amd64 succeeded, treat non-amd64 failures as warnings
664				if arch != "x86_64-Linux" && amd64Success {
665					fmt.Printf("%swarning:%s Failed to download %s metadata for %s: %v\n", colorYellow, colorReset, repo.Repo.Name, arch, err)
666					continue
667				} else {
668					fmt.Printf("%serror:%s Error downloading %s metadata for %s: %v\n", colorRed, colorReset, repo.Repo.Name, arch, err)
669					continue
670				}
671			}
672
673			if repo.Repo.Filter != nil {
674				repo.Repo.Filter(&items)
675			}
676
677			if !repo.Repo.Standalone {
678				dbinMetadata[repo.Repo.Name] = append(dbinMetadata[repo.Repo.Name], items...)
679			}
680
681			if repo.Repo.Single || repo.Repo.Standalone {
682				singleMetadata := make(DbinMetadata)
683				singleMetadata[repo.Repo.Name] = items
684				singleOutputFile := fmt.Sprintf("%s_%s", repo.Repo.Name, outputArch)
685
686				if err := saveMetadata(singleOutputFile, singleMetadata); err != nil {
687					fmt.Printf("%serror:%s Error saving single metadata to %s: %v\n", colorRed, colorReset, singleOutputFile, err)
688					continue
689				}
690				fmt.Printf("Successfully saved single metadata to %s\n", singleOutputFile)
691			}
692
693			archSuccess = true // Mark this architecture as successful if at least one repo processed
694		}
695
696		// Update amd64Success if this is the amd64 architecture
697		if arch == "x86_64-Linux" && archSuccess {
698			amd64Success = true
699		}
700
701		// Save combined metadata only if the architecture had at least one successful repo
702		if archSuccess {
703			outputFile := fmt.Sprintf("%s", outputArch)
704			if err := saveMetadata(outputFile, dbinMetadata); err != nil {
705				fmt.Printf("%serror:%s Error saving metadata to %s: %v\n", colorRed, colorReset, outputFile, err)
706				continue
707			}
708			fmt.Printf("Successfully processed and saved combined metadata to %s\n", outputFile)
709		} else if arch != "x86_64-Linux" && amd64Success {
710			fmt.Printf("%swarning:%s No metadata saved for %s: all repositories failed\n", colorYellow, colorReset, outputArch)
711		} else {
712			fmt.Printf("%serror:%s No metadata saved for %s: all repositories failed\n", colorRed, colorReset, outputArch)
713		}
714	}
715}
716
717func areSlicesEqual(a, b []string) bool {
718	if len(a) != len(b) {
719		return false
720	}
721	for i, v := range a {
722		if v != b[i] {
723			return false
724		}
725	}
726	return true
727}
728
729func t[T any](cond bool, vtrue, vfalse T) T {
730	if cond {
731		return vtrue
732	}
733	return vfalse
734}
735
736/* AM is one of the most relevant projects of the portable Linux apps community
737 * And the AM repo is a thirdparty optional repo in `dbin`, so its only fair that
738 * we help them distribute more programs too!                                   -
739 */
740const pipeRepl = "ǀ" // Replacement for `|` to avoid breaking the MD table
741func replacePipeFields(pkg *DbinItem) {
742	pkg.Name = strings.ReplaceAll(pkg.Name, "|", pipeRepl)
743	pkg.Description = strings.ReplaceAll(pkg.Description, "|", pipeRepl)
744	pkg.DownloadURL = strings.ReplaceAll(pkg.DownloadURL, "|", pipeRepl)
745	for i := range pkg.WebURLs {
746		pkg.WebURLs[i] = strings.ReplaceAll(pkg.WebURLs[i], "|", pipeRepl)
747	}
748}
749func genAMMeta(filename string, metadata DbinMetadata) {
750	file, err := os.Create(filename + ".txt")
751	if err != nil {
752		fmt.Println("Error creating output file:", err)
753		return
754	}
755	defer file.Close()
756
757	file.WriteString("| appname | description | site | download | version |\n")
758	file.WriteString("|---------|-------------|------|----------|---------|\n")
759
760	var allEntries []DbinItem
761	for _, entries := range metadata {
762		allEntries = append(allEntries, entries...)
763	}
764
765	sort.Slice(allEntries, func(i, j int) bool {
766		return strings.ToLower(allEntries[i].Name) < strings.ToLower(allEntries[j].Pkg)
767	})
768
769	for _, entry := range allEntries {
770		pkg := strings.TrimSuffix(entry.Pkg, filepath.Ext(entry.Pkg))
771
772		if pkg != "" {
773			entry.Pkg = pkg
774		}
775
776		siteURL := ""
777		if len(entry.SrcURLs) > 0 {
778			siteURL = entry.SrcURLs[0]
779		} else if len(entry.WebURLs) > 0 {
780			siteURL = entry.WebURLs[0]
781		} else {
782			siteURL = "https://github.com/xplshn/dbin"
783		}
784
785		version := entry.Version
786		if version == "" && entry.BuildDate != "" {
787			version = entry.BuildDate
788		}
789		if version == "" {
790			version = "not_available"
791		}
792
793		file.WriteString(fmt.Sprintf("| %s | %s | %s | %s | %s |\n",
794			pkg,
795			t(entry.Description != "", entry.Description, "not_available"),
796			t(siteURL != "", siteURL, "not_available"),
797			entry.DownloadURL,
798			t(version != "", version, "not_available"),
799		))
800	}
801}