lmdb: Use lmdb to store packages and dependency indices

This commit is contained in:
Martchus 2021-12-05 23:40:51 +01:00
parent bcc24fe1d1
commit 2ffa6629c8
32 changed files with 1677 additions and 729 deletions

View File

@ -95,7 +95,7 @@ static void printPackageSearchResults(const LibRepoMgr::WebClient::Response::bod
tabulate::Table t;
t.format().hide_border();
t.add_row({ "Arch", "Repo", "Name", "Version", "Description", "Build date" });
for (const auto &[db, package] : packages) {
for (const auto &[db, package, packageID] : packages) {
const auto &dbInfo = std::get<LibPkg::DatabaseInfo>(db);
t.add_row(
{ package->packageInfo ? package->packageInfo->arch : dbInfo.arch, dbInfo.name, package->name, package->version, package->description,

View File

@ -7,19 +7,20 @@ set(HEADER_FILES
data/config.h
data/lockable.h
data/siglevel.h
data/storagefwd.h
parser/aur.h
parser/package.h
parser/database.h
parser/config.h
parser/utils.h
parser/binary.h
lmdb-safe/lmdb-safe.hh
lmdb-safe/lmdb-typed.hh)
parser/binary.h)
set(SRC_FILES
data/package.cpp
data/database.cpp
data/config.cpp
data/lockable.cpp
data/storageprivate.h
data/storage.cpp
algo/search.cpp
algo/buildorder.cpp
algo/licenses.cpp
@ -30,6 +31,8 @@ set(SRC_FILES
parser/utils.cpp
parser/binary.cpp
parser/siglevel.cpp
lmdb-safe/lmdb-safe.hh
lmdb-safe/lmdb-typed.hh
lmdb-safe/lmdb-safe.cc
lmdb-safe/lmdb-typed.cc)
set(TEST_HEADER_FILES tests/parser_helper.h)
@ -51,7 +54,7 @@ set(META_TIDY_EXCLUDE_REGEX "lmdb-safe/.*")
set(CONFIGURATION_PACKAGE_SUFFIX
""
CACHE STRING "sets the suffix for find_package() calls to packages configured via c++utilities")
find_package(c++utilities${CONFIGURATION_PACKAGE_SUFFIX} 5.2.0 REQUIRED)
find_package(c++utilities${CONFIGURATION_PACKAGE_SUFFIX} 5.12.0 REQUIRED)
use_cpp_utilities(VISIBILITY PUBLIC)
# use std::filesystem
@ -65,9 +68,6 @@ use_reflective_rapidjson(VISIBILITY PUBLIC)
# find lmdb
use_pkg_config_module(PKG_CONFIG_MODULES "lmdb" VISIBILITY PUBLIC)
# configure lmdb-safe
set_source_files_properties(lmdb-safe/lmdb-safe.cc lmdb-safe/lmdb-typed.cc PROPERTIES COMPILE_FLAGS "-Wno-error")
# find 3rd party libraries zlib
use_zlib()
# libarchive
@ -80,6 +80,15 @@ set_property(TARGET libarchive PROPERTY IMPORTED_LOCATION "${LibArchive_LIBRARIE
set_property(TARGET libarchive PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${LibArchive_INCLUDE_DIRS}")
use_target(TARGET_NAME libarchive)
# find boost libraries (required by lmdb-safe)
option(BOOST_STATIC_LINKAGE "${STATIC_LINKAGE}" "link statically against Boost (instead of dynamically)")
set(Boost_USE_MULTITHREADED ON)
if (BOOST_STATIC_LINKAGE)
set(Boost_USE_STATIC_LIBS ON)
endif ()
set(BOOST_ARGS "REQUIRED;COMPONENTS;iostreams")
use_package(TARGET_NAME Boost::iostreams PACKAGE_NAME Boost PACKAGE_ARGS "${BOOST_ARGS}")
# apply basic configuration
include(BasicConfig)

View File

@ -36,7 +36,7 @@ struct TopoSortItem {
* \return Returns true if \a dependency could be added to \a finishedItems, has already been present or has been ignored. Returns false if a cycle has been detected.
* \deprecated This function can likely be removed. The build preparation task has its own implementation (to compute batches) which is more useful.
*/
bool Config::addDepsRecursivelyInTopoOrder(vector<unique_ptr<TopoSortItem>> &allItems, vector<TopoSortItem *> &finishedItems,
bool Config::addDepsRecursivelyInTopoOrder(std::vector<std::unique_ptr<TopoSortItem>> &allItems, std::vector<TopoSortItem *> &finishedItems,
std::vector<std::string> &ignored, std::vector<PackageSearchResult> &cycleTracking, const Dependency &dependency, BuildOrderOptions options,
bool onlyDependency)
{
@ -62,8 +62,9 @@ bool Config::addDepsRecursivelyInTopoOrder(vector<unique_ptr<TopoSortItem>> &all
}
// check whether a topo sort item for the current dependency is already pending to detect cycles
for (auto &item : allItems) {
if (pkg != item->package.pkg) {
for (const auto &item : allItems) {
const auto &itemPackage = item->package;
if (std::get<Database *>(packageSearchResult.db) != std::get<Database *>(itemPackage.db) || packageSearchResult.id != itemPackage.id) {
continue;
}
@ -72,9 +73,10 @@ bool Config::addDepsRecursivelyInTopoOrder(vector<unique_ptr<TopoSortItem>> &all
return true;
}
// report error: remove so far "healy" path in the current chain so it contains only the cyclic path anymore
// report error: remove so far "healthy" path in the current chain so it contains only the cyclic path anymore
for (auto i = cycleTracking.begin(), end = cycleTracking.end(); i != end; ++i) {
if (pkg == i->pkg) {
if (const auto &visited = *i;
std::get<Database *>(packageSearchResult.db) == std::get<Database *>(visited.db) && packageSearchResult.id == visited.id) {
cycleTracking.erase(cycleTracking.begin(), i);
break;
}
@ -119,19 +121,19 @@ bool Config::addDepsRecursivelyInTopoOrder(vector<unique_ptr<TopoSortItem>> &all
return currentItem->finished = true;
}
BuildOrderResult Config::computeBuildOrder(const std::vector<string> &dependencyDenotations, BuildOrderOptions options)
BuildOrderResult Config::computeBuildOrder(const std::vector<std::string> &dependencyDenotations, BuildOrderOptions options)
{
// setup variables to store results
BuildOrderResult result;
vector<unique_ptr<TopoSortItem>> allTopoSortItems;
vector<TopoSortItem *> finishedTopoSortItems;
auto result = BuildOrderResult();
auto allTopoSortItems = std::vector<std::unique_ptr<TopoSortItem>>();
auto finishedTopoSortItems = std::vector<TopoSortItem *>();
allTopoSortItems.reserve(dependencyDenotations.size());
// add dependencies
for (const auto &dependency : dependencyDenotations) {
// continue adding dependencies as long as no cycles have been detected
if (addDepsRecursivelyInTopoOrder(allTopoSortItems, finishedTopoSortItems, result.ignored, result.cycle,
Dependency(dependency.data(), dependency.size()), options, false)) {
if (addDepsRecursivelyInTopoOrder(
allTopoSortItems, finishedTopoSortItems, result.ignored, result.cycle, Dependency(std::string_view(dependency)), options, false)) {
result.cycle.clear();
continue;
}

View File

@ -35,6 +35,18 @@ Database *Config::findDatabaseFromDenotation(std::string_view databaseDenotation
return findDatabase(dbInfo.first, dbInfo.second);
}
/*!
* \brief Creates a database with the specified \a name and appends it to the configuration.
*/
Database *Config::createDatabase(std::string &&name)
{
auto *const db = &databases.emplace_back(std::string(name));
if (storage()) {
db->initStorage(*storage());
}
return db;
}
/*!
* \brief Returns the database with the specified \a name and \a architecture or creates a new one if it doesn't exist.
* \remarks Resets the database's configuration. You'll end up with a blank database in any case.
@ -45,7 +57,7 @@ Database *Config::findOrCreateDatabase(std::string &&name, std::string_view arch
if (db) {
db->resetConfiguration();
} else {
db = &databases.emplace_back(move(name));
db = createDatabase(std::move(name));
}
if (!architecture.empty()) {
db->arch = architecture;
@ -63,7 +75,7 @@ Database *Config::findOrCreateDatabase(std::string_view name, std::string_view a
if (db) {
db->resetConfiguration();
} else {
db = &databases.emplace_back(std::string(name));
db = createDatabase(std::string(name));
}
if (!architecture.empty()) {
db->arch = architecture;
@ -82,114 +94,12 @@ Database *Config::findOrCreateDatabaseFromDenotation(std::string_view databaseDe
return findOrCreateDatabase(dbInfo.first, dbInfo.second);
}
/*!
* \brief Runs \a processNextPackage for each package of each database using as many threads as CPU cores available.
*
* Databases and packages are iterated in order. \a processNextDatabase is called when "reaching" the next database.
*
* \a processNextDatabase and \a processNextPackage are supposed to return an empty string on success and an error message
* on failure. If \a processNextDatabase fails, the whole database is skipped.
*
* \a processNextDatabase is not ran in parallel and therefore expected to be fast.
*
* \returns Returns the error messages returned by \a processNextDatabase and \a processNextPackage.
* \remarks Not used anymore. Possibly still useful at some point?
*/
std::list<std::string> Config::forEachPackage(const std::function<std::string(Database *db)> &processNextDatabase,
const std::function<std::string(Database *db, std::shared_ptr<Package> &pkg, std::mutex &dbMutex)> &processNextPackage)
{
// define mutex to sync getting the next package
std::mutex getNextPathMutex, submitFailureMutex, dbMutex;
std::list<std::string> errorMessages;
// define and initialize iterators
auto dbIterator = databases.begin(), dbEnd = databases.end();
auto error = processNextDatabase(&*dbIterator);
while (dbIterator != dbEnd) {
if (error.empty()) {
break;
}
errorMessages.emplace_back(move(error));
error = processNextDatabase(&*++dbIterator);
}
if (dbIterator == dbEnd) {
return errorMessages;
}
auto pkgIterator = dbIterator->packages.begin(), pkgEnd = dbIterator->packages.end();
// get the first database
Database *currentDb = &*dbIterator;
++dbIterator;
const auto recordError = [&](auto &&errorMessage) {
lock_guard<mutex> lock(submitFailureMutex);
cerr << Phrases::SubError << errorMessage << Phrases::End;
errorMessages.emplace_back(errorMessage);
};
const auto processPackages = [&] {
for (;;) {
// get next package
shared_ptr<Package> currentPackage;
{
lock_guard<mutex> lock(getNextPathMutex);
for (;;) {
if (pkgIterator != pkgEnd) {
currentPackage = pkgIterator->second;
++pkgIterator;
break;
} else if (dbIterator != dbEnd) {
// process next database
auto errorMessage = processNextDatabase(&*dbIterator);
if (!errorMessage.empty()) {
if (error != "skip") {
recordError(std::move(errorMessage));
}
++dbIterator;
continue;
}
currentDb = &*dbIterator;
pkgIterator = dbIterator->packages.begin();
pkgEnd = dbIterator->packages.end();
++dbIterator;
continue;
} else {
return;
}
}
}
// process next package
try {
auto errorMessage = processNextPackage(currentDb, currentPackage, dbMutex);
if (!errorMessage.empty()) {
recordError(std::move(errorMessage));
}
} catch (const std::runtime_error &e) {
recordError(argsToString(currentPackage->name, ':', ' ', e.what()));
continue;
}
}
};
vector<thread> threads(thread::hardware_concurrency() + 2); // FIXME: make this thread count configurable?
for (thread &t : threads) {
t = thread(processPackages);
}
processPackages();
for (thread &t : threads) {
t.join();
}
return errorMessages;
}
/*!
* \brief Returns all packages with the specified database name, database architecture and package name.
*/
std::vector<PackageSearchResult> Config::findPackages(std::tuple<std::string_view, std::string_view, std::string_view> dbAndPackageName)
{
vector<PackageSearchResult> pkgs;
auto pkgs = std::vector<PackageSearchResult>();
const auto &[dbName, dbArch, packageName] = dbAndPackageName;
// don't allow to get a list of all packages
@ -202,8 +112,8 @@ std::vector<PackageSearchResult> Config::findPackages(std::tuple<std::string_vie
if ((!dbName.empty() && dbName != db.name) || (!dbArch.empty() && dbArch != db.arch)) {
continue;
}
if (const auto i = db.packages.find(name); i != db.packages.end()) {
pkgs.emplace_back(db, i->second);
if (const auto [id, package] = db.findPackageWithID(name); package) {
pkgs.emplace_back(db, package, id);
}
}
return pkgs;
@ -215,26 +125,20 @@ std::vector<PackageSearchResult> Config::findPackages(std::tuple<std::string_vie
*/
PackageSearchResult Config::findPackage(const Dependency &dependency)
{
PackageSearchResult result;
auto result = PackageSearchResult();
auto exactMatch = false;
for (auto &db : databases) {
for (auto range = db.providedDeps.equal_range(dependency.name); range.first != range.second; ++range.first) {
const auto &providedDependency = range.first->second;
if (!Dependency::matches(dependency.mode, dependency.version, providedDependency.version)) {
continue;
}
const auto pkgs = providedDependency.relevantPackages;
for (const auto &pkg : pkgs) {
if (!result.pkg) {
result.db = &db;
result.pkg = pkg;
}
// prefer package where the name matches exactly; so if we found one no need to look further
if (dependency.name == pkg->name) {
result.db = &db;
result.pkg = pkg;
return result;
}
}
db.providingPackages(dependency, false, [&](StorageID id, Package &&package) {
// FIXME: avoid copy
exactMatch = dependency.name == package.name;
result.db = &db;
result.pkg = std::make_shared<Package>(std::move(package));
result.id = id;
// prefer package where the name matches exactly; so if we found one no need to look further
return exactMatch;
});
if (exactMatch) {
break;
}
}
return result;
@ -245,20 +149,16 @@ PackageSearchResult Config::findPackage(const Dependency &dependency)
*/
std::vector<PackageSearchResult> Config::findPackages(const Dependency &dependency, bool reverse)
{
const auto dependencySet = reverse ? &Database::requiredDeps : &Database::providedDeps;
vector<PackageSearchResult> results;
auto results = std::vector<PackageSearchResult>();
for (auto &db : databases) {
for (auto range = (db.*dependencySet).equal_range(dependency.name); range.first != range.second; ++range.first) {
const auto &providedDependency = range.first->second;
if (!Dependency::matches(dependency.mode, dependency.version, providedDependency.version)) {
continue;
auto visited = std::unordered_set<StorageID>();
db.providingPackages(dependency, reverse, [&](StorageID packageID, Package &&package) {
// FIXME: avoid copy
if (visited.emplace(packageID).second) {
results.emplace_back(db, std::make_shared<Package>(std::move(package)), packageID);
}
for (const auto &pkg : providedDependency.relevantPackages) {
if (std::find_if(results.begin(), results.end(), [&pkg](const auto &res) { return res.pkg == pkg; }) == results.end()) {
results.emplace_back(db, pkg);
}
}
}
return false;
});
}
return results;
}
@ -266,18 +166,18 @@ std::vector<PackageSearchResult> Config::findPackages(const Dependency &dependen
/*!
* \brief Returns all packages providing \a library or - if \a reverse is true - all packages requiring \a library.
*/
std::vector<PackageSearchResult> Config::findPackagesProvidingLibrary(const string &library, bool reverse)
std::vector<PackageSearchResult> Config::findPackagesProvidingLibrary(const std::string &library, bool reverse)
{
const auto packagesByLibraryName = reverse ? &Database::requiredLibs : &Database::providedLibs;
vector<PackageSearchResult> results;
auto results = std::vector<PackageSearchResult>();
auto visited = std::unordered_set<StorageID>();
for (auto &db : databases) {
for (auto range = (db.*packagesByLibraryName).equal_range(library); range.first != range.second; ++range.first) {
for (const auto &pkg : range.first->second) {
if (std::find_if(results.begin(), results.end(), [&pkg](const auto &res) { return res.pkg == pkg; }) == results.end()) {
results.emplace_back(db, pkg);
}
db.providingPackages(library, reverse, [&](StorageID packageID, Package &&package) {
// FIXME: avoid copy
if (visited.emplace(packageID).second) {
results.emplace_back(db, std::make_shared<Package>(std::move(package)), packageID);
}
}
return false;
});
}
return results;
}
@ -285,15 +185,17 @@ std::vector<PackageSearchResult> Config::findPackagesProvidingLibrary(const stri
/*!
* \brief Returns all packages which names matches \a regex.
*/
std::vector<PackageSearchResult> Config::findPackages(const regex &regex)
std::vector<PackageSearchResult> Config::findPackages(const std::regex &regex)
{
vector<PackageSearchResult> pkgs;
auto pkgs = std::vector<PackageSearchResult>();
for (auto &db : databases) {
for (const auto &pkg : db.packages) {
if (regex_match(pkg.second->name, regex)) {
pkgs.emplace_back(db, pkg.second);
db.allPackages([&](StorageID packageID, Package &&package) {
if (std::regex_match(package.name, regex)) {
// FIXME: avoid copy
pkgs.emplace_back(db, std::make_shared<Package>(std::move(package)), packageID);
}
}
return false;
});
}
return pkgs;
}
@ -304,13 +206,10 @@ std::vector<PackageSearchResult> Config::findPackages(const regex &regex)
*/
std::vector<PackageSearchResult> Config::findPackages(const Package &package)
{
vector<PackageSearchResult> pkgs;
auto pkgs = std::vector<PackageSearchResult>();
for (auto &db : databases) {
for (const auto &pkg : db.packages) {
if (pkg.second->isSame(package)) {
pkgs.emplace_back(db, pkg.second);
break;
}
if (const auto [id, pkg] = db.findPackageWithID(package.name); pkg && pkg->isSame(package)) {
pkgs.emplace_back(db, pkg, id);
}
}
return pkgs;
@ -322,16 +221,18 @@ std::vector<PackageSearchResult> Config::findPackages(const Package &package)
std::vector<PackageSearchResult> Config::findPackages(
const std::function<bool(const Database &)> &databasePred, const std::function<bool(const Database &, const Package &)> &packagePred)
{
std::vector<PackageSearchResult> pkgs;
auto pkgs = std::vector<PackageSearchResult>();
for (auto &db : databases) {
if (!databasePred(db)) {
continue;
}
for (const auto &pkg : db.packages) {
if (packagePred(db, *pkg.second)) {
pkgs.emplace_back(db, pkg.second);
db.allPackages([&](StorageID packageID, const Package &package) {
if (packagePred(db, package)) {
// FIXME: avoid copy
pkgs.emplace_back(db, std::make_shared<Package>(package), packageID);
}
}
return false;
});
}
return pkgs;
}
@ -341,13 +242,15 @@ std::vector<PackageSearchResult> Config::findPackages(
*/
std::vector<PackageSearchResult> Config::findPackages(const std::function<bool(const Database &, const Package &)> &pred)
{
std::vector<PackageSearchResult> pkgs;
auto pkgs = std::vector<PackageSearchResult>();
for (auto &db : databases) {
for (const auto &pkg : db.packages) {
if (pred(db, *pkg.second)) {
pkgs.emplace_back(db, pkg.second);
db.allPackages([&](StorageID packageID, const Package &package) {
if (pred(db, package)) {
// FIXME: avoid copy
pkgs.emplace_back(db, std::make_shared<Package>(package), packageID);
}
}
return false;
});
}
return pkgs;
}

View File

@ -1,5 +1,5 @@
#include "./config.h"
#include "./global.h"
#include "./storageprivate.h"
#include <reflective_rapidjson/json/reflector.h>
@ -35,7 +35,7 @@ static const std::string &firstNonLocalMirror(const std::vector<std::string> &mi
DatabaseStatistics::DatabaseStatistics(const Database &db)
: name(db.name)
, packageCount(db.packages.size())
, packageCount(db.packageCount())
, arch(db.arch)
, lastUpdate(db.lastUpdate)
, localPkgDir(db.localPkgDir)
@ -44,6 +44,24 @@ DatabaseStatistics::DatabaseStatistics(const Database &db)
{
}
Config::Config()
{
}
Config::~Config()
{
}
void Config::initStorage(const char *path, std::uint32_t maxDbs)
{
assert(m_storage == nullptr); // only allow initializing storage once
m_storage = std::make_unique<StorageDistribution>(path, maxDbs ? maxDbs : databases.size() * 10 + 15);
for (auto &db : databases) {
db.initStorage(*m_storage);
}
aur.initStorage(*m_storage);
}
static std::string addDatabaseDependencies(
Config &config, Database &database, std::vector<Database *> &result, std::unordered_map<Database *, bool> &visited, bool addSelf)
{
@ -123,18 +141,28 @@ std::vector<Database *> Config::computeDatabasesRequiringDatabase(Database &data
}
void Config::pullDependentPackages(const std::vector<Dependency> &dependencies, const std::shared_ptr<Package> &relevantPackage,
const std::unordered_set<LibPkg::Database *> &relevantDbs, std::unordered_set<Package *> &runtimeDependencies, DependencySet &missingDependencies)
const std::unordered_set<LibPkg::Database *> &relevantDbs,
std::unordered_map<LibPkg::StorageID, std::shared_ptr<LibPkg::Package>> &runtimeDependencies, DependencySet &missingDependencies,
std::unordered_set<StorageID> &visited)
{
auto found = false;
for (const auto &dependency : dependencies) {
const auto results = findPackages(dependency);
auto found = false;
for (const auto &result : results) {
if (relevantDbs.find(std::get<Database *>(result.db)) != relevantDbs.end()) {
found = true;
if (runtimeDependencies.emplace(result.pkg.get()).second) {
pullDependentPackages(result.pkg, relevantDbs, runtimeDependencies, missingDependencies);
}
for (auto &db : databases) {
if (relevantDbs.find(&db) == relevantDbs.end()) {
continue;
}
db.providingPackages(dependency, false, [&](StorageID packageID, Package &&package) {
found = true;
// FIXME: avoid copy
if (visited.emplace(packageID).second) {
const auto &[i, inserted] = runtimeDependencies.try_emplace(packageID);
if (inserted) {
i->second = std::make_shared<Package>(std::move(package));
}
pullDependentPackages(i->second, relevantDbs, runtimeDependencies, missingDependencies, visited);
}
return false;
});
}
if (!found) {
missingDependencies.add(dependency, relevantPackage);
@ -143,10 +171,11 @@ void Config::pullDependentPackages(const std::vector<Dependency> &dependencies,
}
void Config::pullDependentPackages(const std::shared_ptr<Package> &package, const std::unordered_set<LibPkg::Database *> &relevantDbs,
std::unordered_set<Package *> &runtimeDependencies, DependencySet &missingDependencies)
std::unordered_map<LibPkg::StorageID, std::shared_ptr<LibPkg::Package>> &runtimeDependencies, DependencySet &missingDependencies,
std::unordered_set<StorageID> &visited)
{
pullDependentPackages(package->dependencies, package, relevantDbs, runtimeDependencies, missingDependencies);
pullDependentPackages(package->optionalDependencies, package, relevantDbs, runtimeDependencies, missingDependencies);
pullDependentPackages(package->dependencies, package, relevantDbs, runtimeDependencies, missingDependencies, visited);
pullDependentPackages(package->optionalDependencies, package, relevantDbs, runtimeDependencies, missingDependencies, visited);
}
void Config::markAllDatabasesToBeDiscarded()

View File

@ -11,6 +11,7 @@
#include <reflective_rapidjson/json/serializable.h>
#include <cstring>
#include <memory>
#include <mutex>
#include <regex>
#include <set>
@ -24,6 +25,7 @@ enum class SignatureStatus { Valid, KeyExpired, SigExpired, KeyUnknown, KeyDisab
struct Config;
struct Database;
struct StorageDistribution;
struct LIBPKG_EXPORT DatabaseStatistics : public ReflectiveRapidJSON::JsonSerializable<Config> {
DatabaseStatistics(const Database &config);
@ -104,11 +106,16 @@ constexpr bool operator&(BuildOrderOptions lhs, BuildOrderOptions rhs)
}
struct LIBPKG_EXPORT Config : public Lockable, public ReflectiveRapidJSON::BinarySerializable<Config> {
explicit Config();
~Config();
// load config and packages
void loadPacmanConfig(const char *pacmanConfigPath);
void loadAllPackages(bool withFiles);
// caching
// storage and caching
void initStorage(const char *path = "libpkg.db", std::uint32_t maxDbs = 0);
std::unique_ptr<StorageDistribution> &storage();
std::uint64_t restoreFromCache();
std::uint64_t dumpCacheFile();
void markAllDatabasesToBeDiscarded();
@ -121,10 +128,12 @@ struct LIBPKG_EXPORT Config : public Lockable, public ReflectiveRapidJSON::Binar
std::variant<std::vector<Database *>, std::string> computeDatabaseDependencyOrder(Database &database, bool addSelf = true);
std::vector<Database *> computeDatabasesRequiringDatabase(Database &database);
void pullDependentPackages(const std::vector<Dependency> &dependencies, const std::shared_ptr<Package> &relevantPackage,
const std::unordered_set<LibPkg::Database *> &relevantDbs, std::unordered_set<Package *> &runtimeDependencies,
DependencySet &missingDependencies);
const std::unordered_set<LibPkg::Database *> &relevantDbs,
std::unordered_map<LibPkg::StorageID, std::shared_ptr<LibPkg::Package>> &runtimeDependencies, DependencySet &missingDependencies,
std::unordered_set<StorageID> &visited);
void pullDependentPackages(const std::shared_ptr<Package> &package, const std::unordered_set<LibPkg::Database *> &relevantDbs,
std::unordered_set<LibPkg::Package *> &runtimeDependencies, DependencySet &missingDependencies);
std::unordered_map<LibPkg::StorageID, std::shared_ptr<LibPkg::Package>> &runtimeDependencies, DependencySet &missingDependencies,
std::unordered_set<StorageID> &visited);
// search for packages
static std::pair<std::string_view, std::string_view> parseDatabaseDenotation(std::string_view databaseDenotation);
@ -146,10 +155,6 @@ struct LIBPKG_EXPORT Config : public Lockable, public ReflectiveRapidJSON::Binar
const std::function<bool(const Database &)> &databasePred, const std::function<bool(const Database &, const Package &)> &packagePred);
std::vector<PackageSearchResult> findPackages(const std::function<bool(const Database &, const Package &)> &pred);
// utilities
std::list<std::string> forEachPackage(const std::function<std::string(Database *db)> &processNextDatabase,
const std::function<std::string(Database *db, std::shared_ptr<Package> &pkg, std::mutex &dbMutex)> &processNextPackage);
std::vector<Database> databases;
Database aur = Database("aur");
std::set<std::string> architectures;
@ -158,13 +163,21 @@ struct LIBPKG_EXPORT Config : public Lockable, public ReflectiveRapidJSON::Binar
SignatureLevelConfig signatureLevel;
private:
Database *createDatabase(std::string &&name);
bool addDepsRecursivelyInTopoOrder(std::vector<std::unique_ptr<TopoSortItem>> &allItems, std::vector<TopoSortItem *> &items,
std::vector<std::string> &ignored, std::vector<PackageSearchResult> &cycleTracking, const Dependency &dependency, BuildOrderOptions options,
bool onlyDependency);
bool addLicenseInfo(LicenseResult &result, const Dependency &dependency);
std::string addLicenseInfo(LicenseResult &result, PackageSearchResult &searchResult, const std::shared_ptr<Package> &package);
std::unique_ptr<StorageDistribution> m_storage;
};
inline std::unique_ptr<StorageDistribution> &Config::storage()
{
return m_storage;
}
inline Status Config::computeStatus() const
{
return Status(*this);

View File

@ -1,5 +1,6 @@
#include "./database.h"
#include "./config.h"
#include "./storageprivate.h"
#include "reflection/database.h"
@ -10,6 +11,77 @@ using namespace CppUtilities;
namespace LibPkg {
struct AffectedPackages {
std::unordered_set<StorageID> newPackages;
std::unordered_set<StorageID> removedPackages;
};
struct AffectedPackagesWithDependencyDetail : public AffectedPackages {
std::string version;
DependencyMode mode = DependencyMode::Any;
};
struct PackageUpdaterPrivate {
using AffectedDeps = std::unordered_multimap<std::string, AffectedPackagesWithDependencyDetail>;
using AffectedLibs = std::unordered_map<std::string, AffectedPackages>;
explicit PackageUpdaterPrivate(DatabaseStorage &storage);
void update(const PackageCache::StoreResult &res, const std::shared_ptr<Package> &package);
void update(const StorageID packageID, bool removed, const std::shared_ptr<Package> &package);
void submit(const std::string &dependencyName, AffectedDeps::mapped_type &affected, DependencyStorage::RWTransaction &txn);
void submit(const std::string &libraryName, AffectedLibs::mapped_type &affected, LibraryDependencyStorage::RWTransaction &txn);
PackageStorage::RWTransaction packagesTxn;
AffectedDeps affectedProvidedDeps;
AffectedDeps affectedRequiredDeps;
AffectedLibs affectedProvidedLibs;
AffectedLibs affectedRequiredLibs;
private:
static AffectedDeps::iterator findDependency(const Dependency &dependency, AffectedDeps &affected);
static void addDependency(StorageID packageID, const Dependency &dependency, bool removed, AffectedDeps &affected);
static void addLibrary(StorageID packageID, const std::string &libraryName, bool removed, AffectedLibs &affected);
};
Database::Database(const std::string &name, const std::string &path)
: name(name)
, path(path)
{
}
Database::Database(std::string &&name, std::string &&path)
: name(std::move(name))
, path(std::move(path))
{
}
Database::Database(Database &&other)
: name(std::move(other.name))
, path(std::move(other.path))
, filesPath(std::move(other.filesPath))
, mirrors(std::move(other.mirrors))
, usage(other.usage)
, signatureLevel(other.signatureLevel)
, arch(std::move(other.arch))
, dependencies(std::move(other.dependencies))
, localPkgDir(std::move(other.localPkgDir))
, localDbDir(std::move(other.localDbDir))
, lastUpdate(other.lastUpdate)
, syncFromMirror(other.syncFromMirror)
, toBeDiscarded(other.toBeDiscarded)
, m_storage(std::move(other.m_storage))
{
}
Database::~Database()
{
}
void Database::initStorage(StorageDistribution &storage)
{
m_storage = storage.forDatabase(name % '@' + arch);
}
void LibPkg::Database::deducePathsFromLocalDirs()
{
if (localDbDir.empty()) {
@ -39,154 +111,285 @@ void Database::resetConfiguration()
void Database::clearPackages()
{
packages.clear();
providedLibs.clear();
requiredLibs.clear();
providedDeps.clear();
requiredDeps.clear();
lastUpdate = CppUtilities::DateTime::gmtNow();
if (m_storage) {
m_storage->packageCache.clear(*m_storage);
}
}
std::vector<std::shared_ptr<Package>> Database::findPackages(const std::function<bool(const Database &, const Package &)> &pred)
{
std::vector<std::shared_ptr<Package>> pkgs;
for (const auto &pkg : packages) {
if (pred(*this, *pkg.second)) {
pkgs.emplace_back(pkg.second);
// TODO: use cache here
// TODO: avoid std::move()
auto pkgs = std::vector<std::shared_ptr<Package>>();
auto txn = m_storage->packages.getROTransaction();
for (auto i = txn.begin(); i != txn.end(); ++i) {
if (pred(*this, *i)) {
pkgs.emplace_back(std::make_shared<Package>(std::move(*i)));
}
}
return pkgs;
}
void Database::removePackageDependencies(PackageMap::const_iterator packageIterator)
static void removeDependency(DependencyStorage::RWTransaction &txn, StorageID packageID, const std::string &dependencyName)
{
const auto &package = packageIterator->second;
providedDeps.remove(Dependency(package->name, package->version), package);
for (const auto &dep : package->provides) {
providedDeps.remove(dep, package);
}
for (const auto &dep : package->dependencies) {
requiredDeps.remove(dep, package);
}
for (const auto &dep : package->optionalDependencies) {
requiredDeps.remove(dep, package);
}
for (const auto &lib : package->libprovides) {
const auto iterator(providedLibs.find(lib));
if (iterator == providedLibs.end()) {
continue;
}
auto &relevantPackages(iterator->second);
relevantPackages.erase(remove(relevantPackages.begin(), relevantPackages.end(), package), relevantPackages.end());
if (relevantPackages.empty()) {
providedLibs.erase(iterator);
}
}
for (const auto &lib : package->libdepends) {
const auto iterator(requiredLibs.find(lib));
if (iterator == requiredLibs.end()) {
continue;
}
auto &relevantPackages(iterator->second);
relevantPackages.erase(remove(relevantPackages.begin(), relevantPackages.end(), package), relevantPackages.end());
if (relevantPackages.empty()) {
requiredLibs.erase(iterator);
for (auto [i, end] = txn.equal_range<0>(dependencyName); i != end; ++i) {
auto &dependency = i.value();
dependency.relevantPackages.erase(packageID);
if (dependency.relevantPackages.empty()) {
i.del();
} else {
txn.put(dependency, i.getID());
}
}
}
void Database::addPackageDependencies(const std::shared_ptr<Package> &package)
static void removeLibDependency(LibraryDependencyStorage::RWTransaction &txn, StorageID packageID, const std::string &dependencyName)
{
providedDeps.add(Dependency(package->name, package->version), package);
for (const auto &dep : package->provides) {
providedDeps.add(dep, package);
for (auto [i, end] = txn.equal_range<0>(dependencyName); i != end; ++i) {
auto &dependency = i.value();
dependency.relevantPackages.erase(packageID);
if (dependency.relevantPackages.empty()) {
i.del();
} else {
txn.put(dependency, i.getID());
}
}
for (const auto &dep : package->dependencies) {
requiredDeps.add(dep, package);
}
void Database::removePackageDependencies(StorageID packageID, const std::shared_ptr<Package> &package)
{
{
auto txn = m_storage->providedDeps.getRWTransaction();
removeDependency(txn, packageID, package->name);
for (const auto &dep : package->provides) {
removeDependency(txn, packageID, dep.name);
}
txn.commit();
}
for (const auto &dep : package->optionalDependencies) {
requiredDeps.add(dep, package);
{
auto txn = m_storage->requiredDeps.getRWTransaction();
for (const auto &dep : package->dependencies) {
removeDependency(txn, packageID, dep.name);
}
for (const auto &dep : package->optionalDependencies) {
removeDependency(txn, packageID, dep.name);
}
txn.commit();
}
for (const auto &lib : package->libprovides) {
providedLibs[lib].emplace_back(package);
{
auto txn = m_storage->providedLibs.getRWTransaction();
for (const auto &lib : package->libprovides) {
removeLibDependency(txn, packageID, lib);
}
txn.commit();
}
for (const auto &lib : package->libdepends) {
requiredLibs[lib].emplace_back(package);
{
auto txn = m_storage->requiredLibs.getRWTransaction();
for (const auto &lib : package->libdepends) {
removeLibDependency(txn, packageID, lib);
}
txn.commit();
}
}
static void addDependency(DependencyStorage::RWTransaction &txn, StorageID packageID, const std::string &dependencyName,
const std::string &dependencyVersion, DependencyMode dependencyMode = DependencyMode::Any)
{
for (auto [i, end] = txn.equal_range<0>(dependencyName); i != end; ++i) {
auto &existingDependency = i.value();
if (static_cast<const Dependency &>(existingDependency).version != dependencyVersion) {
continue;
}
const auto [i2, newID] = existingDependency.relevantPackages.emplace(packageID);
if (newID) {
txn.put(existingDependency, i.getID());
}
return;
}
auto newDependency = DatabaseDependency(dependencyName, dependencyVersion, dependencyMode);
newDependency.relevantPackages.emplace(packageID);
txn.put(newDependency);
}
static void addLibDependency(LibraryDependencyStorage::RWTransaction &txn, StorageID packageID, const std::string &dependencyName)
{
for (auto [i, end] = txn.equal_range<0>(dependencyName); i != end; ++i) {
auto &existingDependency = i.value();
const auto [i2, newID] = existingDependency.relevantPackages.emplace(packageID);
if (newID) {
txn.put(existingDependency, i.getID());
}
return;
}
auto newDependency = DatabaseLibraryDependency(dependencyName);
newDependency.relevantPackages.emplace(packageID);
txn.put(newDependency);
}
void Database::addPackageDependencies(StorageID packageID, const std::shared_ptr<Package> &package)
{
{
auto txn = m_storage->providedDeps.getRWTransaction();
addDependency(txn, packageID, package->name, package->version);
for (const auto &dep : package->provides) {
addDependency(txn, packageID, dep.name, dep.version, dep.mode);
}
txn.commit();
}
{
auto txn = m_storage->requiredDeps.getRWTransaction();
for (const auto &dep : package->dependencies) {
addDependency(txn, packageID, dep.name, dep.version, dep.mode);
}
for (const auto &dep : package->optionalDependencies) {
addDependency(txn, packageID, dep.name, dep.version, dep.mode);
}
txn.commit();
}
{
auto txn = m_storage->providedLibs.getRWTransaction();
for (const auto &lib : package->libprovides) {
addLibDependency(txn, packageID, lib);
}
txn.commit();
}
{
auto txn = m_storage->requiredLibs.getRWTransaction();
for (const auto &lib : package->libdepends) {
addLibDependency(txn, packageID, lib);
}
txn.commit();
}
}
void Database::allPackages(const PackageVisitor &visitor)
{
// TODO: use cache here
//auto &cachedPackages = m_storage->packageCache;
auto txn = m_storage->packages.getROTransaction();
for (auto i = txn.begin(); i != txn.end(); ++i) {
if (visitor(i.getID(), std::move(i.value()))) {
return;
}
}
}
std::size_t Database::packageCount() const
{
return m_storage->packages.getROTransaction().size();
}
void Database::providingPackages(const Dependency &dependency, bool reverse, const PackageVisitor &visitor)
{
// TODO: use cache here
auto package = Package();
auto providesTxn = (reverse ? m_storage->requiredDeps : m_storage->providedDeps).getROTransaction();
auto packagesTxn = m_storage->packages.getROTransaction();
for (auto [i, end] = providesTxn.equal_range<0>(dependency.name); i != end; ++i) {
const Dependency &providedDependency = i.value();
if (!Dependency::matches(dependency.mode, dependency.version, providedDependency.version)) {
continue;
}
for (const auto packageID : i->relevantPackages) {
if (packagesTxn.get(packageID, package) && visitor(packageID, std::move(package))) {
return;
}
}
}
}
void Database::providingPackages(const std::string &libraryName, bool reverse, const PackageVisitor &visitor)
{
// TODO: use cache here
auto package = Package();
auto providesTxn = (reverse ? m_storage->requiredLibs : m_storage->providedLibs).getROTransaction();
auto packagesTxn = m_storage->packages.getROTransaction();
for (auto [i, end] = providesTxn.equal_range<0>(libraryName); i != end; ++i) {
for (const auto packageID : i->relevantPackages) {
if (packagesTxn.get(packageID, package) && visitor(packageID, std::move(package))) {
return;
}
}
}
}
bool Database::provides(const Dependency &dependency, bool reverse) const
{
auto providesTxn = (reverse ? m_storage->requiredDeps : m_storage->providedDeps).getROTransaction();
for (auto [i, end] = providesTxn.equal_range<0>(dependency.name); i != end; ++i) {
const Dependency &providedDependency = i.value();
if (Dependency::matches(dependency.mode, dependency.version, providedDependency.version)) {
return true;
}
}
return false;
}
bool Database::provides(const std::string &libraryName, bool reverse) const
{
auto providesTxn = (reverse ? m_storage->requiredLibs : m_storage->providedLibs).getROTransaction();
return providesTxn.find<0>(libraryName) != providesTxn.end();
}
std::shared_ptr<Package> Database::findPackage(StorageID packageID)
{
// TODO: use cache here
auto package = std::make_shared<Package>();
auto txn = m_storage->packages.getROTransaction();
return txn.get(packageID, *package) ? package : std::shared_ptr<Package>();
}
std::shared_ptr<Package> Database::findPackage(const std::string &packageName)
{
return m_storage->packageCache.retrieve(*m_storage, packageName).pkg;
}
PackageSpec Database::findPackageWithID(const std::string &packageName)
{
return m_storage->packageCache.retrieve(*m_storage, packageName);
}
void Database::removePackage(const std::string &packageName)
{
const auto packageIterator = packages.find(packageName);
if (packageIterator == packages.end()) {
return;
const auto [packageID, package] = m_storage->packageCache.retrieve(*m_storage, packageName);
if (package) {
removePackageDependencies(packageID, package);
m_storage->packageCache.invalidate(*m_storage, packageName);
}
removePackage(packageIterator);
}
void LibPkg::Database::removePackage(PackageMap::const_iterator packageIterator)
StorageID Database::updatePackage(const std::shared_ptr<Package> &package)
{
removePackageDependencies(packageIterator);
packages.erase(packageIterator);
const auto res = m_storage->packageCache.store(*m_storage, package, false);
if (!res.updated) {
return res.id;
}
if (res.oldPackage) {
removePackageDependencies(res.id, res.oldPackage);
}
addPackageDependencies(res.id, package);
return res.id;
}
void Database::updatePackage(const std::shared_ptr<Package> &package)
StorageID Database::forceUpdatePackage(const std::shared_ptr<Package> &package)
{
// check whether the package already exists
const auto packageIterator = packages.find(package->name);
if (packageIterator != packages.end()) {
const auto &existingPackage = packageIterator->second;
if (package == existingPackage) {
return;
}
// retain certain information obtained from package contents if this is actually the same package as before
package->addDepsAndProvidesFromOtherPackage(*existingPackage);
// remove the existing package
removePackage(packageIterator);
}
// add the new package
addPackageDependencies(package);
packages.emplace(package->name, package);
}
void Database::forceUpdatePackage(const std::shared_ptr<Package> &package)
{
// check whether the package already exists
const auto packageIterator = packages.find(package->name);
auto differentPackage = true;
if (packageIterator != packages.end()) {
const auto &existingPackage = packageIterator->second;
if ((differentPackage = package != existingPackage)) {
// retain certain information obtained from package contents if this is actually the same package as before
package->addDepsAndProvidesFromOtherPackage(*existingPackage);
// remove the existing package
removePackage(packageIterator);
}
}
// add the new package
addPackageDependencies(package);
if (differentPackage) {
packages.emplace(package->name, package);
const auto res = m_storage->packageCache.store(*m_storage, package, true);
if (res.oldPackage) {
removePackageDependencies(res.id, res.oldPackage);
}
addPackageDependencies(res.id, package);
return res.id;
}
void Database::replacePackages(const std::vector<std::shared_ptr<Package>> &newPackages, DateTime lastModified)
{
// retain certain information obtained from package contents
for (auto &package : newPackages) {
const auto packageIterator = packages.find(package->name);
if (packageIterator == packages.end()) {
continue;
}
package->addDepsAndProvidesFromOtherPackage(*packageIterator->second);
}
// clear current packages and add new ones
clearPackages();
for (auto &package : newPackages) {
updatePackage(package);
auto updater = PackageUpdater(*this);
for (const auto &package : newPackages) {
updater.update(package);
}
updater.commit();
lastUpdate = lastModified;
}
@ -200,15 +403,15 @@ void Database::replacePackages(const std::vector<std::shared_ptr<Package>> &newP
* \remarks "Resolvable" means here (so far) just that all dependencies are present. It does not mean a package is "installable" because
* conflicts between dependencies might still prevent that.
*/
std::unordered_map<std::shared_ptr<Package>, UnresolvedDependencies> Database::detectUnresolvedPackages(Config &config,
std::unordered_map<PackageSpec, UnresolvedDependencies> Database::detectUnresolvedPackages(Config &config,
const std::vector<std::shared_ptr<Package>> &newPackages, const DependencySet &removedProvides,
const std::unordered_set<std::string_view> &depsToIgnore, const std::unordered_set<std::string_view> &libsToIgnore)
{
auto unresolvedPackages = std::unordered_map<std::shared_ptr<Package>, UnresolvedDependencies>();
auto unresolvedPackages = std::unordered_map<PackageSpec, UnresolvedDependencies>();
// determine new provides
DependencySet newProvides;
set<string> newLibProvides;
auto newProvides = DependencySet();
auto newLibProvides = std::set<std::string>();
for (const auto &newPackage : newPackages) {
newProvides.add(Dependency(newPackage->name, newPackage->version), newPackage);
for (const auto &newProvide : newPackage->provides) {
@ -230,29 +433,26 @@ std::unordered_map<std::shared_ptr<Package>, UnresolvedDependencies> Database::d
}
// check whether all required dependencies are still provided
for (const auto &requiredDep : requiredDeps) {
const auto &[dependencyName, dependencyDetail] = requiredDep;
const auto &affectedPackages = dependencyDetail.relevantPackages;
for (auto txn = m_storage->requiredDeps.getROTransaction(); const auto &requiredDep : txn) {
// skip dependencies to ignore
if (depsToIgnore.find(dependencyName) != depsToIgnore.end()) {
if (depsToIgnore.find(requiredDep.name) != depsToIgnore.end()) {
continue;
}
// skip if new packages provide dependency
if (newProvides.provides(dependencyName, dependencyDetail)) {
if (newProvides.provides(requiredDep)) {
continue;
}
// skip if db provides dependency
if (!removedProvides.provides(dependencyName, dependencyDetail) && providedDeps.provides(dependencyName, dependencyDetail)) {
if (!removedProvides.provides(requiredDep) && provides(requiredDep)) {
continue;
}
// skip if dependency is provided by a database this database depends on or the protected version of this db
auto providedByAnotherDb = false;
for (const auto *db : deps) {
if ((providedByAnotherDb = db->providedDeps.provides(requiredDep.first, requiredDep.second))) {
if ((providedByAnotherDb = db->provides(requiredDep))) {
break;
}
}
@ -261,33 +461,34 @@ std::unordered_map<std::shared_ptr<Package>, UnresolvedDependencies> Database::d
}
// add packages to list of unresolved packages
for (const auto &affectedPackage : affectedPackages) {
unresolvedPackages[affectedPackage].deps.emplace_back(Dependency(dependencyName, dependencyDetail.version, dependencyDetail.mode));
for (const auto &affectedPackageID : requiredDep.relevantPackages) {
const auto affectedPackage = findPackage(affectedPackageID);
unresolvedPackages[PackageSpec(affectedPackageID, affectedPackage)].deps.emplace_back(requiredDep);
}
}
// check whether all required libraries are still provided
for (const auto &[requiredLib, affectedPackages] : requiredLibs) {
for (auto txn = m_storage->requiredLibs.getROTransaction(); const auto &requiredLib : txn) {
// skip libs to ignore
if (libsToIgnore.find(requiredLib) != libsToIgnore.end()) {
if (libsToIgnore.find(requiredLib.name) != libsToIgnore.end()) {
continue;
}
// skip if new packages provide dependency
if (newLibProvides.find(requiredLib) != newLibProvides.end()) {
if (newLibProvides.find(requiredLib.name) != newLibProvides.end()) {
continue;
}
// skip if db provides dependency
if (providedLibs.find(requiredLib) != providedLibs.end()) {
if (provides(requiredLib.name)) {
continue;
}
// skip if dependency is provided by a database this database depends on or the protected version of this db
auto providedByAnotherDb = false;
for (const auto *db : deps) {
if ((providedByAnotherDb = db->providedLibs.find(requiredLib) != db->providedLibs.end())) {
if ((providedByAnotherDb = db->provides(requiredLib.name))) {
break;
}
}
@ -296,8 +497,9 @@ std::unordered_map<std::shared_ptr<Package>, UnresolvedDependencies> Database::d
}
// add packages to list of unresolved packages
for (const auto &affectedPackage : affectedPackages) {
unresolvedPackages[affectedPackage].libs.emplace_back(requiredLib);
for (const auto &affectedPackageID : requiredLib.relevantPackages) {
const auto affectedPackage = findPackage(affectedPackageID);
unresolvedPackages[PackageSpec(affectedPackageID, affectedPackage)].libs.emplace_back(requiredLib.name);
}
}
@ -306,8 +508,9 @@ std::unordered_map<std::shared_ptr<Package>, UnresolvedDependencies> Database::d
LibPkg::PackageUpdates LibPkg::Database::checkForUpdates(const std::vector<LibPkg::Database *> &updateSources, UpdateCheckOptions options)
{
PackageUpdates results;
for (const auto &[myPackageName, myPackage] : packages) {
auto results = PackageUpdates();
allPackages([&](StorageID myPackageID, Package &&package) {
auto myPackage = std::make_shared<Package>(std::move(package));
auto regularName = std::string();
if (options & UpdateCheckOptions::ConsiderRegularPackage) {
const auto decomposedName = myPackage->decomposeName();
@ -317,12 +520,11 @@ LibPkg::PackageUpdates LibPkg::Database::checkForUpdates(const std::vector<LibPk
}
auto foundPackage = false;
for (auto *const updateSource : updateSources) {
const auto updatePackageIterator = updateSource->packages.find(myPackageName);
if (updatePackageIterator == updateSource->packages.cend()) {
const auto [updatePackageID, updatePackage] = updateSource->findPackageWithID(myPackage->name);
if (!updatePackage) {
continue;
}
foundPackage = true;
const auto &updatePackage = updatePackageIterator->second;
const auto versionDiff = myPackage->compareVersion(*updatePackage);
std::vector<PackageUpdate> *list = nullptr;
switch (versionDiff) {
@ -338,21 +540,21 @@ LibPkg::PackageUpdates LibPkg::Database::checkForUpdates(const std::vector<LibPk
default:;
}
if (list) {
list->emplace_back(PackageSearchResult(*this, myPackage), PackageSearchResult(*updateSource, updatePackage));
list->emplace_back(
PackageSearchResult(*this, myPackage, myPackageID), PackageSearchResult(*updateSource, updatePackage, updatePackageID));
}
}
if (!foundPackage) {
results.orphans.emplace_back(PackageSearchResult(*this, myPackage));
results.orphans.emplace_back(PackageSearchResult(*this, myPackage, myPackageID));
}
if (regularName.empty()) {
continue;
return false;
}
for (auto *const updateSource : updateSources) {
const auto updatePackageIterator = updateSource->packages.find(regularName);
if (updatePackageIterator == updateSource->packages.cend()) {
const auto [updatePackageID, updatePackage] = updateSource->findPackageWithID(regularName);
if (!updatePackage) {
continue;
}
const auto &updatePackage = updatePackageIterator->second;
const auto versionDiff = myPackage->compareVersion(*updatePackage);
std::vector<PackageUpdate> *list = nullptr;
switch (versionDiff) {
@ -368,10 +570,12 @@ LibPkg::PackageUpdates LibPkg::Database::checkForUpdates(const std::vector<LibPk
default:;
}
if (list) {
list->emplace_back(PackageSearchResult(*this, myPackage), PackageSearchResult(*updateSource, updatePackage));
list->emplace_back(
PackageSearchResult(*this, myPackage, myPackageID), PackageSearchResult(*updateSource, updatePackage, updatePackageID));
}
}
}
return false;
});
return results;
}
@ -416,6 +620,167 @@ std::string Database::filesPathFromRegularPath() const
return ext == std::string::npos ? path : argsToString(std::string_view(path.data(), ext), ".files");
}
PackageUpdaterPrivate::PackageUpdaterPrivate(DatabaseStorage &storage)
: packagesTxn(storage.packages.getRWTransaction())
{
}
void PackageUpdaterPrivate::update(const PackageCache::StoreResult &res, const std::shared_ptr<Package> &package)
{
update(res.id, false, package);
if (res.oldPackage) {
update(res.id, true, res.oldPackage);
}
}
void PackageUpdaterPrivate::update(const StorageID packageID, bool removed, const std::shared_ptr<Package> &package)
{
addDependency(packageID, Dependency(package->name, package->version), removed, affectedProvidedDeps);
for (const auto &dependeny : package->provides) {
addDependency(packageID, dependeny, removed, affectedProvidedDeps);
}
for (const auto &lib : package->libprovides) {
addLibrary(packageID, lib, removed, affectedProvidedLibs);
}
for (const auto &dependeny : package->dependencies) {
addDependency(packageID, dependeny, removed, affectedRequiredDeps);
}
for (const auto &dependeny : package->optionalDependencies) {
addDependency(packageID, dependeny, removed, affectedRequiredDeps);
}
for (const auto &lib : package->libdepends) {
addLibrary(packageID, lib, removed, affectedRequiredLibs);
}
}
void PackageUpdaterPrivate::submit(const std::string &dependencyName, AffectedDeps::mapped_type &affected, DependencyStorage::RWTransaction &txn)
{
for (auto [i, end] = txn.equal_range<0>(dependencyName); i != end; ++i) {
auto &existingDependency = i.value();
if (static_cast<const Dependency &>(existingDependency).version != affected.version) {
continue;
}
auto &pkgs = existingDependency.relevantPackages;
auto size = pkgs.size();
pkgs.merge(affected.newPackages);
auto change = pkgs.size() != size;
for (auto &toRemove : affected.removedPackages) {
change = pkgs.erase(toRemove) || change;
}
if (change) {
txn.put(existingDependency, i.getID());
}
return;
}
auto newDependency = DatabaseDependency(dependencyName, affected.version, affected.mode);
newDependency.relevantPackages.swap(affected.newPackages);
txn.put(newDependency);
}
void PackageUpdaterPrivate::submit(const std::string &libraryName, AffectedLibs::mapped_type &affected, LibraryDependencyStorage::RWTransaction &txn)
{
for (auto [i, end] = txn.equal_range<0>(libraryName); i != end; ++i) {
auto &existingDependency = i.value();
auto &pkgs = existingDependency.relevantPackages;
auto size = pkgs.size();
pkgs.merge(affected.newPackages);
auto change = pkgs.size() != size;
for (auto &toRemove : affected.removedPackages) {
change = pkgs.erase(toRemove) || change;
}
if (change) {
txn.put(existingDependency, i.getID());
}
return;
}
auto newDependency = DatabaseLibraryDependency(libraryName);
newDependency.relevantPackages.swap(affected.newPackages);
txn.put(newDependency);
}
PackageUpdaterPrivate::AffectedDeps::iterator PackageUpdaterPrivate::findDependency(const Dependency &dependency, AffectedDeps &affected)
{
for (auto range = affected.equal_range(dependency.name); range.first != range.second; ++range.first) {
if (dependency.version == range.first->second.version) {
return range.first;
}
}
return affected.end();
}
void PackageUpdaterPrivate::addDependency(StorageID packageID, const Dependency &dependency, bool removed, AffectedDeps &affected)
{
auto iterator = findDependency(dependency, affected);
if (iterator == affected.end()) {
iterator = affected.insert(AffectedDeps::value_type(dependency.name, AffectedDeps::mapped_type()));
iterator->second.version = dependency.version;
iterator->second.mode = dependency.mode;
}
if (!removed) {
iterator->second.newPackages.emplace(packageID);
} else {
iterator->second.removedPackages.emplace(packageID);
}
}
void PackageUpdaterPrivate::addLibrary(StorageID packageID, const std::string &libraryName, bool removed, AffectedLibs &affected)
{
if (auto &affectedPackages = affected[libraryName]; !removed) {
affectedPackages.newPackages.emplace(packageID);
} else {
affectedPackages.removedPackages.emplace(packageID);
}
}
PackageUpdater::PackageUpdater(Database &database)
: m_database(database)
, m_d(std::make_unique<PackageUpdaterPrivate>(*m_database.m_storage))
{
}
PackageUpdater::~PackageUpdater()
{
}
StorageID PackageUpdater::update(const std::shared_ptr<Package> &package)
{
const auto res = m_database.m_storage->packageCache.store(*m_database.m_storage, m_d->packagesTxn, package);
m_d->update(res, package);
return res.id;
}
void PackageUpdater::commit()
{
m_d->packagesTxn.commit();
{
auto txn = m_database.m_storage->providedDeps.getRWTransaction();
for (auto &[dependencyName, affected] : m_d->affectedProvidedDeps) {
m_d->submit(dependencyName, affected, txn);
}
txn.commit();
}
{
auto txn = m_database.m_storage->requiredDeps.getRWTransaction();
for (auto &[dependencyName, affected] : m_d->affectedRequiredDeps) {
m_d->submit(dependencyName, affected, txn);
}
txn.commit();
}
{
auto txn = m_database.m_storage->providedLibs.getRWTransaction();
for (auto &[libraryName, affected] : m_d->affectedProvidedLibs) {
m_d->submit(libraryName, affected, txn);
}
txn.commit();
}
{
auto txn = m_database.m_storage->requiredLibs.getRWTransaction();
for (auto &[libraryName, affected] : m_d->affectedRequiredLibs) {
m_d->submit(libraryName, affected, txn);
}
txn.commit();
}
}
} // namespace LibPkg
namespace ReflectiveRapidJSON {

View File

@ -3,6 +3,7 @@
#include "./package.h"
#include "./siglevel.h"
#include "./storagefwd.h"
#include "../global.h"
@ -25,16 +26,18 @@ struct LIBPKG_EXPORT DatabaseInfo {
struct LIBPKG_EXPORT PackageSearchResult {
PackageSearchResult();
PackageSearchResult(Database &database, const std::shared_ptr<Package> &package);
PackageSearchResult(Database &database, const std::shared_ptr<Package> &package, StorageID id);
PackageSearchResult(Database &database, Package &&package, StorageID id);
bool operator==(const PackageSearchResult &other) const;
/// \brief The related database.
/// \remarks
/// - The find functions always use Database* and it is guaranteed to be never nullptr.
/// - The find functions always uses Database* and it is guaranteed to be never nullptr.
/// - The deserialization functions always use DatabaseInfo and the values might be empty if the source was empty.
/// - The serialization functions can cope with both alternatives.
std::variant<Database *, DatabaseInfo> db;
std::shared_ptr<Package> pkg;
StorageID id;
};
/*!
@ -93,11 +96,33 @@ struct LIBPKG_EXPORT UnresolvedDependencies : public ReflectiveRapidJSON::JsonSe
std::vector<std::string> libs;
};
struct PackageUpdaterPrivate;
struct LIBPKG_EXPORT PackageUpdater {
explicit PackageUpdater(Database &database);
~PackageUpdater();
StorageID update(const std::shared_ptr<Package> &package);
void commit();
private:
Database &m_database;
std::unique_ptr<PackageUpdaterPrivate> m_d;
};
struct LIBPKG_EXPORT Database : public ReflectiveRapidJSON::JsonSerializable<Database>, public ReflectiveRapidJSON::BinarySerializable<Database> {
using PackageMap = std::unordered_map<std::string, std::shared_ptr<Package>>;
using PackageVisitor = std::function<bool(StorageID, Package &&)>;
Database(const std::string &name = std::string(), const std::string &path = std::string());
Database(std::string &&name, std::string &&path);
friend struct PackageUpdater;
explicit Database(const std::string &name = std::string(), const std::string &path = std::string());
explicit Database(std::string &&name, std::string &&path);
Database(Database &&other);
~Database();
Database &operator=(Database &&rhs);
void initStorage(StorageDistribution &storage);
void deducePathsFromLocalDirs();
void resetConfiguration();
void clearPackages();
@ -106,14 +131,23 @@ struct LIBPKG_EXPORT Database : public ReflectiveRapidJSON::JsonSerializable<Dat
void loadPackages(FileMap &&databaseFiles, CppUtilities::DateTime lastModified);
static bool isFileRelevant(const char *filePath, const char *fileName, mode_t);
std::vector<std::shared_ptr<Package>> findPackages(const std::function<bool(const Database &, const Package &)> &pred);
void removePackageDependencies(typename PackageMap::const_iterator packageIterator);
void addPackageDependencies(const std::shared_ptr<Package> &package);
void removePackageDependencies(StorageID packageID, const std::shared_ptr<Package> &package);
void addPackageDependencies(StorageID packageID, const std::shared_ptr<Package> &package);
void allPackages(const PackageVisitor &visitor);
std::size_t packageCount() const;
void providingPackages(const Dependency &dependency, bool reverse, const PackageVisitor &visitor);
void providingPackages(const std::string &libraryName, bool reverse, const PackageVisitor &visitor);
bool provides(const Dependency &dependency, bool reverse = false) const;
bool provides(const std::string &libraryName, bool reverse = false) const;
std::shared_ptr<Package> findPackage(StorageID packageID);
std::shared_ptr<Package> findPackage(const std::string &packageName);
PackageSpec findPackageWithID(const std::string &packageName);
void removePackage(const std::string &packageName);
void removePackage(typename PackageMap::const_iterator packageIterator);
void updatePackage(const std::shared_ptr<Package> &package);
void forceUpdatePackage(const std::shared_ptr<Package> &package);
StorageID updatePackage(const std::shared_ptr<Package> &package);
StorageID forceUpdatePackage(const std::shared_ptr<Package> &package);
void replacePackages(const std::vector<std::shared_ptr<Package>> &newPackages, CppUtilities::DateTime lastModified);
std::unordered_map<std::shared_ptr<Package>, UnresolvedDependencies> detectUnresolvedPackages(Config &config,
std::unordered_map<PackageSpec, UnresolvedDependencies> detectUnresolvedPackages(Config &config,
const std::vector<std::shared_ptr<Package>> &newPackages, const DependencySet &removedPackages,
const std::unordered_set<std::string_view> &depsToIgnore = std::unordered_set<std::string_view>(),
const std::unordered_set<std::string_view> &libsToIgnore = std::unordered_set<std::string_view>());
@ -125,45 +159,45 @@ struct LIBPKG_EXPORT Database : public ReflectiveRapidJSON::JsonSerializable<Dat
std::string path;
std::string filesPath;
std::vector<std::string> mirrors;
PackageMap packages;
DatabaseUsage usage = DatabaseUsage::None;
SignatureLevel signatureLevel = SignatureLevel::Default;
std::string arch = "x86_64";
std::vector<std::string> dependencies;
DependencySet providedDeps;
DependencySet requiredDeps;
std::unordered_map<std::string, std::vector<std::shared_ptr<Package>>> providedLibs;
std::unordered_map<std::string, std::vector<std::shared_ptr<Package>>> requiredLibs;
std::string localPkgDir;
std::string localDbDir;
CppUtilities::DateTime lastUpdate;
bool syncFromMirror = false;
bool toBeDiscarded = false;
// FIXME: The member variables packages, providedDeps, requiredDeps, providedLibs and requiredLibs should
// not be updated directly/individually; better make them private and provide a getter to a const ref.
private:
std::unique_ptr<DatabaseStorage> m_storage;
};
inline Database::Database(const std::string &name, const std::string &path)
: name(name)
, path(path)
{
}
inline Database::Database(std::string &&name, std::string &&path)
: name(std::move(name))
, path(std::move(path))
inline Database &Database::operator=(Database &&rhs)
{
if (this != &rhs) {
*this = std::move(rhs);
}
return *this;
}
inline PackageSearchResult::PackageSearchResult()
: db(nullptr)
, id(0)
{
}
inline PackageSearchResult::PackageSearchResult(Database &database, const std::shared_ptr<Package> &package)
inline PackageSearchResult::PackageSearchResult(Database &database, const std::shared_ptr<Package> &package, StorageID id)
: db(&database)
, pkg(package)
, id(id)
{
}
inline PackageSearchResult::PackageSearchResult(Database &database, Package &&package, StorageID id)
: db(&database)
, pkg(std::make_shared<Package>(std::move(package)))
, id(id)
{
}

View File

@ -484,7 +484,7 @@ DependencySetBase::iterator DependencySet::add(const Dependency &dependency, con
{
auto iterator = findExact(dependency);
if (iterator == end()) {
iterator = insert(make_pair(dependency.name, DependencyDetail(dependency.version, dependency.mode, { relevantPackage })));
iterator = insert(std::make_pair(dependency.name, DependencyDetail(dependency.version, dependency.mode, { relevantPackage })));
} else {
iterator->second.relevantPackages.emplace(relevantPackage);
}
@ -497,7 +497,7 @@ DependencySetBase::iterator DependencySet::add(
{
auto iterator = findExact(dependencyName, dependencyDetail);
if (iterator == end()) {
iterator = insert(make_pair(dependencyName, DependencyDetail{ dependencyDetail.version, dependencyDetail.mode, { relevantPackage } }));
iterator = insert(std::make_pair(dependencyName, DependencyDetail{ dependencyDetail.version, dependencyDetail.mode, { relevantPackage } }));
} else {
iterator->second.relevantPackages.emplace(relevantPackage);
}
@ -536,3 +536,27 @@ void DependencySet::remove(const string &name)
}
} // namespace LibPkg
namespace ReflectiveRapidJSON {
namespace JsonReflector {
template <>
LIBPKG_EXPORT void push<LibPkg::PackageSpec>(
const LibPkg::PackageSpec &reflectable, RAPIDJSON_NAMESPACE::Value &value, RAPIDJSON_NAMESPACE::Document::AllocatorType &allocator)
{
// just serialize the package (and ignore the ID)
push(reflectable.pkg, value, allocator);
}
template <>
LIBPKG_EXPORT void pull<LibPkg::PackageSpec>(LibPkg::PackageSpec &reflectable,
const RAPIDJSON_NAMESPACE::GenericValue<RAPIDJSON_NAMESPACE::UTF8<char>> &value, JsonDeserializationErrors *errors)
{
// just deserialize the package (and ignore the ID)
pull(reflectable.pkg, value, errors);
}
} // namespace JsonReflector
} // namespace ReflectiveRapidJSON

View File

@ -1,6 +1,8 @@
#ifndef LIBPKG_DATA_PACKAGE_H
#define LIBPKG_DATA_PACKAGE_H
#include "./storagefwd.h"
#include "../global.h"
#include "../parser/utils.h"
@ -83,6 +85,8 @@ struct LIBPKG_EXPORT Dependency : public ReflectiveRapidJSON::JsonSerializable<D
explicit Dependency() = default;
explicit Dependency(const std::string &name, const std::string &version = std::string(), DependencyMode mode = DependencyMode::Any,
const std::string &description = std::string());
explicit Dependency(std::string &&name, std::string &&version = std::string(), DependencyMode mode = DependencyMode::Any,
std::string &&description = std::string());
explicit Dependency(const char *denotation, std::size_t denotationSize = std::numeric_limits<std::size_t>::max());
explicit Dependency(std::string_view denotation);
bool operator==(const Dependency &other) const;
@ -107,6 +111,14 @@ inline Dependency::Dependency(const std::string &name, const std::string &versio
{
}
inline Dependency::Dependency(std::string &&name, std::string &&version, DependencyMode mode, std::string &&description)
: name(std::move(name))
, version(std::move(version))
, description(std::move(description))
, mode(mode)
{
}
inline Dependency::Dependency(std::string_view denotation)
: Dependency(denotation.data(), denotation.size())
{
@ -132,6 +144,38 @@ inline Dependency Dependency::fromString(std::string_view dependency)
return Dependency(dependency);
}
struct LIBPKG_EXPORT DatabaseDependency : public Dependency,
public ReflectiveRapidJSON::JsonSerializable<DatabaseDependency>,
public ReflectiveRapidJSON::BinarySerializable<DatabaseDependency> {
explicit DatabaseDependency() = default;
explicit DatabaseDependency(const std::string &name, const std::string &version, DependencyMode mode);
explicit DatabaseDependency(std::string &&name, std::string &&version, DependencyMode mode);
std::unordered_set<StorageID> relevantPackages;
};
inline DatabaseDependency::DatabaseDependency(std::string &&name, std::string &&version, DependencyMode mode)
: Dependency(std::move(name), std::move(version), mode, std::string())
{
}
inline DatabaseDependency::DatabaseDependency(const std::string &name, const std::string &version, DependencyMode mode)
: Dependency(std::move(name), std::move(version), mode, std::string())
{
}
struct LIBPKG_EXPORT DatabaseLibraryDependency : public ReflectiveRapidJSON::JsonSerializable<DatabaseLibraryDependency>,
public ReflectiveRapidJSON::BinarySerializable<DatabaseLibraryDependency> {
explicit DatabaseLibraryDependency() = default;
explicit DatabaseLibraryDependency(const std::string &name);
std::string name;
std::unordered_set<StorageID> relevantPackages;
};
inline DatabaseLibraryDependency::DatabaseLibraryDependency(const std::string &name)
: name(name)
{
}
} // namespace LibPkg
namespace std {
@ -283,11 +327,54 @@ struct LIBPKG_EXPORT PackageNameData {
};
struct DependencySet;
struct Package;
/*!
* \brief The PackageSpec struct holds a reference to a package.
* \remarks If id is non-zero, the package is part of a database using that ID.
*/
struct LIBPKG_EXPORT PackageSpec : public ReflectiveRapidJSON::JsonSerializable<PackageSpec>,
public ReflectiveRapidJSON::BinarySerializable<PackageSpec> {
explicit PackageSpec(StorageID id = 0, const std::shared_ptr<Package> &pkg = nullptr);
bool operator==(const PackageSpec &other) const;
StorageID id;
std::shared_ptr<Package> pkg;
};
inline PackageSpec::PackageSpec(StorageID id, const std::shared_ptr<Package> &pkg)
: id(id)
, pkg(pkg)
{
}
inline bool PackageSpec::operator==(const PackageSpec &other) const
{
return id ? id == other.id : pkg == other.pkg;
}
} // namespace LibPkg
namespace std {
template <> struct hash<LibPkg::PackageSpec> {
std::size_t operator()(const LibPkg::PackageSpec &spec) const
{
using std::hash;
return spec.id ? hash<decltype(spec.id)>()(spec.id) : hash<decltype(spec.pkg)>()(spec.pkg);
}
};
} // namespace std
namespace LibPkg {
struct LIBPKG_EXPORT Package : public ReflectiveRapidJSON::JsonSerializable<Package>, public ReflectiveRapidJSON::BinarySerializable<Package> {
Package() = default;
Package(const Package &other);
Package(Package &&other) = default;
//Package &operator=(const Package &other);
Package &operator=(Package &&other) = default;
bool providesDependency(const Dependency &dependency) const;
static void exportProvides(
const std::shared_ptr<Package> &package, DependencySet &destinationProvides, std::unordered_set<std::string> &destinationLibProvides);
@ -307,14 +394,13 @@ struct LIBPKG_EXPORT Package : public ReflectiveRapidJSON::JsonSerializable<Pack
static bool isPkgInfoFileOrBinary(const char *filePath, const char *fileName, mode_t mode);
static bool isLicense(const char *filePath, const char *fileName, mode_t mode);
static std::vector<std::shared_ptr<Package>> fromInfo(const std::string &info, bool isPackageInfo = false);
static std::vector<PackageSpec> fromInfo(const std::string &info, bool isPackageInfo = false);
static std::shared_ptr<Package> fromDescription(const std::vector<std::string> &descriptionParts);
static std::vector<std::shared_ptr<Package>> fromDatabaseFile(FileMap &&databaseFile);
static std::shared_ptr<Package> fromPkgFile(const std::string &path);
static std::tuple<std::string_view, std::string_view, std::string_view> fileNameComponents(std::string_view fileName);
static std::shared_ptr<Package> fromPkgFileName(std::string_view fileName);
static std::vector<std::shared_ptr<Package>> fromAurRpcJson(
const char *jsonData, std::size_t jsonSize, PackageOrigin origin = PackageOrigin::AurRpcInfo);
static std::vector<PackageSpec> fromAurRpcJson(const char *jsonData, std::size_t jsonSize, PackageOrigin origin = PackageOrigin::AurRpcInfo);
PackageOrigin origin = PackageOrigin::Default;
CppUtilities::DateTime timestamp;
@ -357,6 +443,16 @@ inline Package::Package(const Package &other)
{
}
/*
Package &Package::operator=(const Package &other)
{
if (this != &other) {
}
return *this;
}
*/
inline bool Package::isSame(const Package &other) const
{
return name == other.name && version == other.version;
@ -414,6 +510,18 @@ struct LIBPKG_EXPORT DependencySet : public DependencySetBase {
namespace ReflectiveRapidJSON {
REFLECTIVE_RAPIDJSON_TREAT_AS_MULTI_MAP_OR_HASH(LibPkg::DependencySet);
}
namespace JsonReflector {
// declare custom (de)serialization for PackageSearchResult
template <>
LIBPKG_EXPORT void push<LibPkg::PackageSpec>(
const LibPkg::PackageSpec &reflectable, RAPIDJSON_NAMESPACE::Value &value, RAPIDJSON_NAMESPACE::Document::AllocatorType &allocator);
template <>
LIBPKG_EXPORT void pull<LibPkg::PackageSpec>(LibPkg::PackageSpec &reflectable,
const RAPIDJSON_NAMESPACE::GenericValue<RAPIDJSON_NAMESPACE::UTF8<char>> &value, JsonDeserializationErrors *errors);
} // namespace JsonReflector
} // namespace ReflectiveRapidJSON
#endif // LIBPKG_DATA_PACKAGE_H

186
libpkg/data/storage.cpp Normal file
View File

@ -0,0 +1,186 @@
#include "./storageprivate.h"
#include <c++utilities/conversion/stringbuilder.h>
using namespace CppUtilities;
namespace LibPkg {
StorageDistribution::StorageDistribution(const char *path, std::uint32_t maxDbs)
{
m_env = getMDBEnv(path, MDB_NOSUBDIR, 0600, maxDbs);
}
PackageSpec PackageCache::retrieve(DatabaseStorage &databaseStorage, const std::string &packageName)
{
// check for package in cache
const auto ref = PackageCacheRef(databaseStorage, packageName);
const auto lock = std::unique_lock(m_mutex);
auto &cacheEntry = m_packages.findOrCreate(ref);
if (cacheEntry.package) {
return PackageSpec(cacheEntry.id, cacheEntry.package);
}
// check for package in storage, populate cache entry
cacheEntry.package = std::make_unique<Package>();
auto txn = databaseStorage.packages.getROTransaction();
if ((cacheEntry.id = txn.get<0>(packageName, *cacheEntry.package))) {
cacheEntry.ref.packageName = &cacheEntry.package->name;
return PackageSpec(cacheEntry.id, cacheEntry.package);
}
m_packages.undo();
return PackageSpec(0, std::shared_ptr<Package>());
}
PackageCache::StoreResult PackageCache::store(DatabaseStorage &databaseStorage, const std::shared_ptr<Package> &package, bool force)
{
// check for package in cache
const auto ref = PackageCacheRef(databaseStorage, package->name);
auto res = PackageCache::StoreResult();
auto lock = std::unique_lock(m_mutex);
auto &cacheEntry = m_packages.findOrCreate(ref);
if (cacheEntry.package == package && !force) {
// do nothing if cached package is the same as specified one
res.id = cacheEntry.id;
return res;
} else if (cacheEntry.package) {
// retain certain information obtained from package contents if this is actually the same package as before
package->addDepsAndProvidesFromOtherPackage(*(res.oldPackage = cacheEntry.package));
} else {
cacheEntry.package = std::make_shared<Package>();
}
// check for package in storage
auto txn = databaseStorage.packages.getRWTransaction();
if (!res.oldPackage && (cacheEntry.id = txn.get<0>(package->name, *cacheEntry.package))) {
package->addDepsAndProvidesFromOtherPackage(*(res.oldPackage = cacheEntry.package));
}
// update cache entry
cacheEntry.ref.packageName = &package->name;
cacheEntry.package = package;
// update package in storage
cacheEntry.id = txn.put(*package, cacheEntry.id);
txn.commit();
res.id = cacheEntry.id;
res.updated = true;
return res;
}
PackageCache::StoreResult PackageCache::store(
DatabaseStorage &databaseStorage, PackageStorage::RWTransaction &txn, const std::shared_ptr<Package> &package)
{
// check for package in cache
const auto ref = PackageCacheRef(databaseStorage, package->name);
auto res = PackageCache::StoreResult();
auto lock = std::unique_lock(m_mutex);
auto &cacheEntry = m_packages.findOrCreate(ref);
if (cacheEntry.package) {
// retain certain information obtained from package contents if this is actually the same package as before
res.id = cacheEntry.id;
package->addDepsAndProvidesFromOtherPackage(*(res.oldPackage = cacheEntry.package));
} else {
// check for package in storage
cacheEntry.package = std::make_shared<Package>();
if ((cacheEntry.id = txn.get<0>(package->name, *cacheEntry.package))) {
package->addDepsAndProvidesFromOtherPackage(*(res.oldPackage = cacheEntry.package));
}
}
// update cache entry
cacheEntry.ref.packageName = &package->name;
cacheEntry.package = package;
// update package in storage
res.id = cacheEntry.id = txn.put(*package, cacheEntry.id);
res.updated = true;
return res;
}
bool PackageCache::invalidate(DatabaseStorage &databaseStorage, const std::string &packageName)
{
// remove package from cache
const auto ref = PackageCacheRef(databaseStorage, packageName);
auto lock = std::unique_lock(m_mutex);
m_packages.erase(ref);
lock.unlock();
// remove package from storage
auto txn = databaseStorage.packages.getRWTransaction();
if (auto i = txn.find<0>(packageName); i != txn.end()) {
i.del();
txn.commit();
return true;
}
return false;
}
void PackageCache::clear(DatabaseStorage &databaseStorage)
{
clearCacheOnly(databaseStorage);
auto packagesTxn = databaseStorage.packages.getRWTransaction();
packagesTxn.clear();
packagesTxn.commit();
auto providedDepsTxn = databaseStorage.providedDeps.getRWTransaction();
providedDepsTxn.clear();
providedDepsTxn.commit();
auto requiredDepsTxn = databaseStorage.requiredDeps.getRWTransaction();
requiredDepsTxn.clear();
requiredDepsTxn.commit();
auto providedLibsTxn = databaseStorage.providedLibs.getRWTransaction();
providedLibsTxn.clear();
providedLibsTxn.commit();
auto requiredLibsTxn = databaseStorage.requiredLibs.getRWTransaction();
requiredLibsTxn.clear();
requiredLibsTxn.commit();
}
void PackageCache::clearCacheOnly(DatabaseStorage &databaseStorage)
{
const auto lock = std::unique_lock(m_mutex);
m_packages.clear(databaseStorage);
}
DatabaseStorage::DatabaseStorage(const std::shared_ptr<MDBEnv> &env, PackageCache &packageCache, std::string_view uniqueDatabaseName)
: packageCache(packageCache)
, packages(env, argsToString(uniqueDatabaseName, "_packages"))
, providedDeps(env, argsToString(uniqueDatabaseName, "_provides"))
, requiredDeps(env, argsToString(uniqueDatabaseName, "_requires"))
, providedLibs(env, argsToString(uniqueDatabaseName, "_libprovides"))
, requiredLibs(env, argsToString(uniqueDatabaseName, "_librequires"))
, m_env(env)
{
}
std::size_t hash_value(const PackageCacheRef &ref)
{
const auto hasher1 = boost::hash<const LibPkg::DatabaseStorage *>();
const auto hasher2 = boost::hash<std::string>();
return ((hasher1(ref.databaseStorage) ^ (hasher2(*ref.packageName) << 1)) >> 1);
}
PackageCacheEntry &RecentlyUsedPackages::findOrCreate(const PackageCacheRef &ref)
{
const auto &index = m_packages.get<PackageCacheRef>();
if (auto i = index.find(ref); i != index.end()) {
m_packages.relocate(m_packages.begin(), m_packages.project<0>(i));
return i.get_node()->value();
}
const auto [i, newItem] = m_packages.emplace_front(ref);
if (!newItem) {
m_packages.relocate(m_packages.begin(), i);
} else if (m_packages.size() > m_limit) {
m_packages.pop_back();
}
return i.get_node()->value();
}
std::size_t RecentlyUsedPackages::clear(const DatabaseStorage &databaseStorage)
{
auto count = std::size_t();
for (auto i = m_packages.begin(); i != m_packages.end();) {
if (i->ref.databaseStorage == &databaseStorage) {
i = m_packages.erase(i);
++count;
} else {
++i;
}
}
return count;
}
} // namespace LibPkg

27
libpkg/data/storagefwd.h Normal file
View File

@ -0,0 +1,27 @@
#ifndef LIBPKG_DATA_STORAGE_FWD_H
#define LIBPKG_DATA_STORAGE_FWD_H
//#include "./package.h"
#include <cstdint>
// forward declarations in accordance with lmdb-ttyped.hh
/*
struct nullindex_t;
template<typename T, class I1=nullindex_t, class I2=nullindex_t, class I3 = nullindex_t, class I4 = nullindex_t>
class TypedDBI;
template<class Class,typename Type,Type Class::*PtrToMember>
struct index_on;
*/
namespace LibPkg {
using StorageID = std::uint32_t;
//struct Package;
//using PackageStorage = TypedDBI<Package, index_on<Package, std::string, &Package::name>>;
struct StorageDistribution;
struct DatabaseStorage;
} // namespace LibPkg
#endif // LIBPKG_DATA_STORAGE_FWD_H

View File

@ -0,0 +1,163 @@
#ifndef LIBPKG_DATA_STORAGE_PRIVATE_H
#define LIBPKG_DATA_STORAGE_PRIVATE_H
#include "./package.h"
#include "../lmdb-safe/lmdb-safe.hh"
#include "../lmdb-safe/lmdb-typed.hh"
#include <boost/multi_index/hashed_index.hpp>
#include <boost/multi_index/member.hpp>
#include <boost/multi_index/sequenced_index.hpp>
#include <boost/multi_index_container.hpp>
#include <memory>
#include <mutex>
namespace LibPkg {
using StorageID = std::uint32_t;
using PackageStorage = TypedDBI<Package, index_on<Package, std::string, &Package::name>>;
using DependencyStorage = TypedDBI<DatabaseDependency, index_on<Dependency, std::string, &DatabaseDependency::name>>;
using LibraryDependencyStorage
= TypedDBI<DatabaseLibraryDependency, index_on<DatabaseLibraryDependency, std::string, &DatabaseLibraryDependency::name>>;
struct PackageCache;
struct DatabaseStorage {
explicit DatabaseStorage(const std::shared_ptr<MDBEnv> &env, PackageCache &packageCache, std::string_view uniqueDatabaseName);
PackageCache &packageCache;
PackageStorage packages;
DependencyStorage providedDeps;
DependencyStorage requiredDeps;
LibraryDependencyStorage providedLibs;
LibraryDependencyStorage requiredLibs;
private:
std::shared_ptr<MDBEnv> m_env;
};
struct PackageCacheRef {
explicit PackageCacheRef(const DatabaseStorage &databaseStorage, const std::shared_ptr<Package> &package);
explicit PackageCacheRef(const DatabaseStorage &databaseStorage, const std::string &packageName);
bool operator==(const PackageCacheRef &other) const;
const DatabaseStorage *databaseStorage = nullptr;
const std::string *packageName;
};
inline PackageCacheRef::PackageCacheRef(const DatabaseStorage &databaseStorage, const std::shared_ptr<Package> &package)
: databaseStorage(&databaseStorage)
, packageName(&package->name)
{
}
inline PackageCacheRef::PackageCacheRef(const DatabaseStorage &databaseStorage, const std::string &packageName)
: databaseStorage(&databaseStorage)
, packageName(&packageName)
{
}
inline bool PackageCacheRef::operator==(const PackageCacheRef &other) const
{
return databaseStorage == other.databaseStorage && *packageName == *other.packageName;
}
std::size_t hash_value(const PackageCacheRef &ref);
struct PackageCacheEntry {
explicit PackageCacheEntry(const PackageCacheRef &ref);
PackageCacheRef ref;
StorageID id;
std::shared_ptr<Package> package;
};
inline PackageCacheEntry::PackageCacheEntry(const PackageCacheRef &ref)
: ref(ref)
, id(0)
{
}
class RecentlyUsedPackages {
using PackageList = boost::multi_index::multi_index_container<PackageCacheEntry,
boost::multi_index::indexed_by<boost::multi_index::sequenced<>,
boost::multi_index::hashed_unique<boost::multi_index::tag<PackageCacheRef>,
BOOST_MULTI_INDEX_MEMBER(PackageCacheEntry, PackageCacheRef, ref)>>>;
using iterator = PackageList::iterator;
public:
explicit RecentlyUsedPackages(std::size_t limit = 1000);
PackageCacheEntry &findOrCreate(const PackageCacheRef &ref);
void undo();
std::size_t erase(const PackageCacheRef &ref);
std::size_t clear(const DatabaseStorage &databaseStorage);
iterator begin();
iterator end();
private:
PackageList m_packages;
std::size_t m_limit;
};
inline RecentlyUsedPackages::RecentlyUsedPackages(std::size_t limit)
: m_limit(limit)
{
}
inline void RecentlyUsedPackages::undo()
{
m_packages.pop_front();
}
inline std::size_t RecentlyUsedPackages::erase(const PackageCacheRef &ref)
{
return m_packages.get<PackageCacheRef>().erase(ref);
}
inline RecentlyUsedPackages::iterator RecentlyUsedPackages::begin()
{
return m_packages.begin();
}
inline RecentlyUsedPackages::iterator RecentlyUsedPackages::end()
{
return m_packages.end();
}
struct PackageCache {
struct StoreResult {
StorageID id = 0;
bool updated = false;
std::shared_ptr<Package> oldPackage;
};
PackageSpec retrieve(DatabaseStorage &databaseStorage, const std::string &packageName);
StoreResult store(DatabaseStorage &databaseStorage, const std::shared_ptr<Package> &package, bool force);
StoreResult store(DatabaseStorage &databaseStorage, PackageStorage::RWTransaction &txn, const std::shared_ptr<Package> &package);
bool invalidate(DatabaseStorage &databaseStorage, const std::string &packageName);
void clear(DatabaseStorage &databaseStorage);
void clearCacheOnly(DatabaseStorage &databaseStorage);
private:
RecentlyUsedPackages m_packages;
std::mutex m_mutex;
};
struct StorageDistribution {
explicit StorageDistribution(const char *path, std::uint32_t maxDbs);
std::unique_ptr<DatabaseStorage> forDatabase(std::string_view uniqueDatabaseName);
private:
std::shared_ptr<MDBEnv> m_env;
PackageCache m_packageCache;
};
inline std::unique_ptr<DatabaseStorage> StorageDistribution::forDatabase(std::string_view uniqueDatabaseName)
{
return std::make_unique<DatabaseStorage>(m_env, m_packageCache, uniqueDatabaseName);
}
} // namespace LibPkg
#endif // LIBPKG_DATA_STORAGE_PRIVATE_H

View File

@ -35,10 +35,10 @@ static void moveValues(vector<string> &target, multimap<string, string> &multima
void Config::loadPacmanConfig(const char *pacmanConfigPath)
{
// open and parse ini
IniFile configIni;
unordered_map<string, IniFile> includedInis;
auto configIni = IniFile();
auto includedInis = std::unordered_map<std::string, IniFile>();
{
ifstream configFile;
auto configFile = ifstream();
configFile.exceptions(ios_base::failbit | ios_base::badbit);
configFile.open(pacmanConfigPath, ios_base::in);
configIni.parse(configFile);
@ -68,7 +68,7 @@ void Config::loadPacmanConfig(const char *pacmanConfigPath)
if (packageCacheDirs.empty()) {
packageCacheDirs.emplace_back("/var/cache/pacman/pkg/");
}
string sigLevel;
auto sigLevel = std::string();
moveLastValue(sigLevel, options, "SigLevel");
signatureLevel = SignatureLevelConfig::fromString(sigLevel);
if (!signatureLevel.isValid()) {
@ -79,9 +79,9 @@ void Config::loadPacmanConfig(const char *pacmanConfigPath)
}
} else {
// read sync database
auto *const db = findOrCreateDatabase(move(scope.first), architecture);
auto *const db = findOrCreateDatabase(std::move(scope.first), architecture);
// read sig level
string sigLevel;
auto sigLevel = std::string();
moveLastValue(sigLevel, scope.second, "SigLevel");
const auto dbSpecificSignatureLevelConfig = SignatureLevelConfig::fromString(sigLevel);
if (dbSpecificSignatureLevelConfig.databaseScope != SignatureLevel::Invalid) {
@ -95,9 +95,9 @@ void Config::loadPacmanConfig(const char *pacmanConfigPath)
// add mirrors
for (auto range = scope.second.equal_range("Server"); range.first != range.second; ++range.first) {
for (const auto &arch : architectures) {
string url = range.first->second;
findAndReplace<string>(url, "$repo", db->name);
findAndReplace<string>(url, "$arch", arch);
auto url = range.first->second;
findAndReplace<std::string>(url, "$repo", db->name);
findAndReplace<std::string>(url, "$arch", arch);
db->mirrors.emplace_back(move(url));
}
}
@ -107,7 +107,7 @@ void Config::loadPacmanConfig(const char *pacmanConfigPath)
auto &includedIni = includedInis[path];
if (includedIni.data().empty()) {
try {
ifstream includedFile;
auto includedFile = std::ifstream();
includedFile.exceptions(ios_base::failbit | ios_base::badbit);
includedFile.open(path, ios_base::in);
includedIni.parse(includedFile);
@ -145,7 +145,7 @@ void Config::loadPacmanConfig(const char *pacmanConfigPath)
void Config::loadAllPackages(bool withFiles)
{
for (Database &db : databases) {
for (auto &db : databases) {
try {
db.loadPackages(withFiles);
} catch (const runtime_error &e) {

View File

@ -38,18 +38,20 @@ void LibPkg::Database::loadPackages(const string &databaseData, DateTime lastMod
void Database::loadPackages(FileMap &&databaseFiles, DateTime lastModified)
{
lastUpdate = lastModified;
auto updater = PackageUpdater(*this);
for (auto &dir : databaseFiles) {
if (dir.first.find('/') != std::string::npos) {
cerr << Phrases::WarningMessage << "Database \"" << name << "\" contains unexpected sub directory: " << dir.first << Phrases::EndFlush;
continue;
}
vector<string> descriptionParts;
auto descriptionParts = std::vector<std::string>();
descriptionParts.reserve(dir.second.size());
for (auto &file : dir.second) {
descriptionParts.emplace_back(move(file.content));
descriptionParts.emplace_back(std::move(file.content));
}
updatePackage(Package::fromDescription(descriptionParts));
updater.update(Package::fromDescription(descriptionParts));
}
updater.commit();
}
} // namespace LibPkg

View File

@ -290,7 +290,7 @@ void addPackageInfo(
}
}
void addPackageDescription(Package &package, const char *field, size_t fieldSize, const char *value, size_t valueSize)
static void addPackageDescription(Package &package, const char *field, size_t fieldSize, const char *value, size_t valueSize)
{
if_field("BASE")
{
@ -423,7 +423,7 @@ void addPackageDescription(Package &package, const char *field, size_t fieldSize
#undef valueString
#undef ensure_pkg_info
void addVersionInfo(Package &package, PackageVersion &version, bool isPackageInfo)
static void addVersionInfo(Package &package, PackageVersion &version, bool isPackageInfo)
{
if (isPackageInfo) {
// when parsing .PKGINFO pkgver specifies the complete version which is
@ -443,7 +443,7 @@ void addVersionInfo(Package &package, PackageVersion &version, bool isPackageInf
}
}
void parsePkgInfo(const std::string &info, const std::function<Package *(Package &)> &nextPackage, bool isPackageInfo)
static void parsePkgInfo(const std::string &info, const std::function<Package *(Package &)> &nextPackage, bool isPackageInfo)
{
// define variables to store intermediate results while still parsing package base
PackageVersion version;
@ -599,21 +599,29 @@ void parsePkgInfo(const std::string &info, const std::function<Package *(Package
}
}
std::vector<std::shared_ptr<Package>> Package::fromInfo(const std::string &info, bool isPackageInfo)
/*!
* \brief Parses the specified \a info and returns the results as one or more (in case of split packages) packages.
* \remarks The returned storage ID is always zero as the returned packages are not part of a database yet. One might
* set the ID later when placing the package into a database.
*/
std::vector<PackageSpec> Package::fromInfo(const std::string &info, bool isPackageInfo)
{
vector<shared_ptr<Package>> packages;
const auto nextPackage = [&](Package &basePackage) { return packages.emplace_back(make_shared<Package>(basePackage)).get(); };
auto packages = std::vector<PackageSpec>();
const auto nextPackage = [&](Package &basePackage) { return packages.emplace_back(0, std::make_shared<Package>(basePackage)).pkg.get(); };
parsePkgInfo(info, nextPackage, isPackageInfo);
return packages;
}
shared_ptr<Package> Package::fromDescription(const std::vector<string> &descriptionParts)
/*!
* \brief Parses the specified \a descriptionParts and returns the results as package.
*/
std::shared_ptr<Package> Package::fromDescription(const std::vector<std::string> &descriptionParts)
{
auto package = make_shared<Package>();
auto package = std::make_shared<Package>();
package->origin = PackageOrigin::Database;
package->sourceInfo = make_shared<SourceInfo>();
package->packageInfo = make_unique<PackageInfo>();
for (const string &desc : descriptionParts) {
for (const auto &desc : descriptionParts) {
// states
enum {
FieldName, // reading field name
@ -625,9 +633,9 @@ shared_ptr<Package> Package::fromDescription(const std::vector<string> &descript
// variables for current field
const char *currentFieldName = nullptr;
size_t currentFieldNameSize = 0;
std::size_t currentFieldNameSize = 0;
const char *currentFieldValue = nullptr;
size_t currentFieldValueSize = 0;
std::size_t currentFieldValueSize = 0;
// do actual parsing via state machine
for (const char *i = desc.data(); *i; ++i) {
@ -834,7 +842,7 @@ std::shared_ptr<Package> Package::fromPkgFile(const string &path)
}
auto packages = fromInfo(file.content, true);
if (!packages.empty()) {
package = std::move(packages.front());
package = std::move(packages.front().pkg);
}
return;
}
@ -899,24 +907,24 @@ std::shared_ptr<Package> Package::fromPkgFileName(std::string_view fileName)
return pkg;
}
std::vector<std::shared_ptr<Package>> Package::fromAurRpcJson(const char *jsonData, std::size_t jsonSize, PackageOrigin origin)
std::vector<PackageSpec> Package::fromAurRpcJson(const char *jsonData, std::size_t jsonSize, PackageOrigin origin)
{
ReflectiveRapidJSON::JsonDeserializationErrors errors;
auto errors = ReflectiveRapidJSON::JsonDeserializationErrors();
auto rpcMultiInfo = AurRpcMultiInfo::fromJson(jsonData, jsonSize, &errors);
std::vector<std::shared_ptr<Package>> packages;
auto packages = std::vector<PackageSpec>();
packages.reserve(rpcMultiInfo.results.size());
for (auto &result : rpcMultiInfo.results) {
auto package = make_shared<Package>();
auto sourceInfo = make_shared<SourceInfo>();
auto package = std::make_shared<Package>();
auto sourceInfo = std::make_shared<SourceInfo>();
package->origin = origin;
package->name = move(result.Name);
package->version = move(result.Version);
package->description = move(result.Description);
package->upstreamUrl = move(result.URL);
package->licenses = move(result.License);
package->groups = move(result.Groups);
package->name = std::move(result.Name);
package->version = std::move(result.Version);
package->description = std::move(result.Description);
package->upstreamUrl = std::move(result.URL);
package->licenses = std::move(result.License);
package->groups = std::move(result.Groups);
for (auto &dependencyName : result.Depends) {
package->dependencies.emplace_back(dependencyName.data(), dependencyName.size());
}
@ -929,18 +937,18 @@ std::vector<std::shared_ptr<Package>> Package::fromAurRpcJson(const char *jsonDa
for (auto &dependencyName : result.CheckDepends) {
sourceInfo->checkDependencies.emplace_back(dependencyName.data(), dependencyName.size());
}
sourceInfo->name = move(result.PackageBase);
sourceInfo->maintainer = move(result.Maintainer);
sourceInfo->id = move(result.ID);
sourceInfo->votes = move(result.NumVotes);
sourceInfo->name = std::move(result.PackageBase);
sourceInfo->maintainer = std::move(result.Maintainer);
sourceInfo->id = std::move(result.ID);
sourceInfo->votes = std::move(result.NumVotes);
if (result.OutOfDate) {
sourceInfo->outOfDate = DateTime::fromTimeStampGmt(*result.OutOfDate);
}
sourceInfo->firstSubmitted = DateTime::fromTimeStampGmt(result.FirstSubmitted);
sourceInfo->lastModified = DateTime::fromTimeStampGmt(result.LastModified);
sourceInfo->url = move(result.URLPath);
package->sourceInfo = move(sourceInfo);
packages.emplace_back(move(package));
sourceInfo->url = std::move(result.URLPath);
package->sourceInfo = std::move(sourceInfo);
packages.emplace_back(0, std::move(package));
}
return packages;
}

View File

@ -62,41 +62,49 @@ public:
void testMisc();
private:
void setupPackages();
std::string m_dbFile;
Config m_config;
shared_ptr<Package> m_pkg1, m_pkg2, m_pkg3;
std::shared_ptr<Package> m_pkg1, m_pkg2, m_pkg3;
StorageID m_pkgId1, m_pkgId2, m_pkgId3;
};
CPPUNIT_TEST_SUITE_REGISTRATION(DataTests);
void DataTests::setUp()
{
m_pkg1 = make_shared<Package>();
m_pkg1->name = "foo";
m_pkg1->version = "5.6-6";
m_pkg1->dependencies.emplace_back("bar>=5.5");
m_pkg1->dependencies.emplace_back("bar<5.6");
m_pkg2 = make_shared<Package>();
m_pkg2->name = "bar";
m_pkg2->version = "5.5-1";
m_pkg2->provides.emplace_back("foo", "5.8-1");
m_pkg3 = make_shared<Package>();
m_pkg3->name = "foo";
m_pkg3->version = "5.7-1";
Database db1;
db1.name = "db1";
db1.updatePackage(m_pkg1);
db1.updatePackage(m_pkg2);
m_config.databases.emplace_back(move(db1));
Database db2;
db2.name = "db2";
db2.updatePackage(m_pkg3);
m_config.databases.emplace_back(move(db2));
}
void DataTests::tearDown()
{
}
void DataTests::setupPackages()
{
m_dbFile = workingCopyPath("test-data.db", WorkingCopyMode::Cleanup);
m_config.initStorage(m_dbFile.data());
m_pkg1 = std::make_shared<Package>();
m_pkg1->name = "foo";
m_pkg1->version = "5.6-6";
m_pkg1->dependencies.emplace_back("bar>=5.5");
m_pkg1->dependencies.emplace_back("bar<5.6");
m_pkg2 = std::make_shared<Package>();
m_pkg2->name = "bar";
m_pkg2->version = "5.5-1";
m_pkg2->provides.emplace_back("foo", "5.8-1");
m_pkg3 = std::make_shared<Package>();
m_pkg3->name = "foo";
m_pkg3->version = "5.7-1";
auto *const db1 = m_config.findOrCreateDatabase("db1"sv, std::string_view());
CPPUNIT_ASSERT_MESSAGE("ID for pkg 1 returned", m_pkgId1 = db1->updatePackage(m_pkg1));
CPPUNIT_ASSERT_MESSAGE("ID for pkg 2 returned", m_pkgId2 = db1->updatePackage(m_pkg2));
CPPUNIT_ASSERT_EQUAL_MESSAGE("packages added to db 1", 2_st, db1->packageCount());
auto *const db2 = m_config.findOrCreateDatabase("db2"sv, std::string_view());
CPPUNIT_ASSERT_MESSAGE("ID for pkg 3 returned", m_pkgId3 = db2->updatePackage(m_pkg3));
CPPUNIT_ASSERT_EQUAL_MESSAGE("package added to db 2", 1_st, db2->packageCount());
}
void DataTests::testPackageVersionComparsion()
{
Package pkg1, pkg2;
@ -202,15 +210,21 @@ void DataTests::testDependencyMatching()
void DataTests::testPackageSearch()
{
auto pkg = m_config.findPackage(Dependency("foo"));
CPPUNIT_ASSERT_EQUAL_MESSAGE(
"find package returns the package from the first database", &m_config.databases.front(), std::get<Database *>(pkg.db));
CPPUNIT_ASSERT_EQUAL(m_pkg1, pkg.pkg);
setupPackages();
auto pkgs = m_config.findPackages("foo"sv);
CPPUNIT_ASSERT_EQUAL(2_st, pkgs.size());
CPPUNIT_ASSERT_EQUAL(m_pkg1, pkgs.front().pkg);
CPPUNIT_ASSERT_EQUAL(m_pkg3, pkgs.back().pkg);
CPPUNIT_ASSERT_EQUAL_MESSAGE("package from first db returned first, cached object returned", m_pkg1, pkgs.front().pkg);
CPPUNIT_ASSERT_EQUAL_MESSAGE("package from first db returned second, cached object returned", m_pkg3, pkgs.back().pkg);
CPPUNIT_ASSERT_EQUAL_MESSAGE("package id set as expected (1)", m_pkgId1, pkgs.front().id);
CPPUNIT_ASSERT_EQUAL_MESSAGE("package id set as expected (2)", m_pkgId3, pkgs.back().id);
CPPUNIT_ASSERT_EQUAL_MESSAGE("db set as expected (1)", &m_config.databases.front(), std::get<Database *>(pkgs.front().db));
CPPUNIT_ASSERT_EQUAL_MESSAGE("db set as expected (2)", &m_config.databases.back(), std::get<Database *>(pkgs.back().db));
auto [db, pkg, packageID] = m_config.findPackage(Dependency("foo"));
CPPUNIT_ASSERT_EQUAL_MESSAGE("expected package for dependency returned", m_pkgId1, packageID);
CPPUNIT_ASSERT_EQUAL_MESSAGE("find package returns the package from the first database", &m_config.databases.front(), std::get<Database *>(db));
// FIXME: check whether packge is actually (value) equivalent
pkgs = m_config.findPackages("bar"sv);
CPPUNIT_ASSERT_EQUAL(1_st, pkgs.size());
@ -228,11 +242,13 @@ void DataTests::testPackageSearch()
pkgs = m_config.findPackages(Dependency("foo", "5.8-1", DependencyMode::GreatherEqual));
CPPUNIT_ASSERT_EQUAL(1_st, pkgs.size());
CPPUNIT_ASSERT_EQUAL(m_pkg2, pkgs.front().pkg);
CPPUNIT_ASSERT_EQUAL(m_pkgId2, pkgs.front().id);
// FIXME: check whether packge is actually (value) equivalent
pkgs = m_config.findPackages(Dependency("bar", "5.5-1", DependencyMode::Equal));
CPPUNIT_ASSERT_EQUAL(1_st, pkgs.size());
CPPUNIT_ASSERT_EQUAL(m_pkg2, pkgs.front().pkg);
CPPUNIT_ASSERT_EQUAL(m_pkgId2, pkgs.front().id);
// FIXME: check whether packge is actually (value) equivalent
pkgs = m_config.findPackages(Dependency("bar", "5.8-1", DependencyMode::Equal));
CPPUNIT_ASSERT_EQUAL(0_st, pkgs.size());
@ -240,7 +256,7 @@ void DataTests::testPackageSearch()
void DataTests::testComputingFileName()
{
Package pkg;
auto pkg = Package();
pkg.name = "test";
pkg.version = "1.2-3";
CPPUNIT_ASSERT_EQUAL_MESSAGE("packageInfo required for computing filename", string(), pkg.computeFileName());
@ -253,6 +269,7 @@ void DataTests::testComputingFileName()
void DataTests::testDetectingUnresolved()
{
setupPackages();
auto &db1 = m_config.databases[0];
CPPUNIT_ASSERT_EQUAL(0_st, db1.detectUnresolvedPackages(m_config, {}, {}).size());
@ -263,11 +280,13 @@ void DataTests::testDetectingUnresolved()
removedPackages.add(Dependency("bar", "5.5"), m_pkg2);
const auto failures = db1.detectUnresolvedPackages(m_config, { m_pkg2 }, removedPackages);
CPPUNIT_ASSERT_EQUAL(1_st, failures.size());
CPPUNIT_ASSERT_EQUAL(m_pkg1, failures.begin()->first);
CPPUNIT_ASSERT_EQUAL(m_pkgId1, failures.begin()->first.id);
}
void DataTests::testComputingBuildOrder()
{
setupPackages();
// order correctly changed according to dependencies
auto res = m_config.computeBuildOrder({ "foo", "bar" }, BuildOrderOptions::None);
CPPUNIT_ASSERT_EQUAL(true, res.success);
@ -277,7 +296,7 @@ void DataTests::testComputingBuildOrder()
CPPUNIT_ASSERT_EQUAL(0_st, res.ignored.size());
// unknown package ignored
const vector<string> ignored = { "baz" };
const auto ignored = std::vector<std::string>{ "baz" };
res = m_config.computeBuildOrder({ "foo", "bar", ignored[0] }, BuildOrderOptions::None);
CPPUNIT_ASSERT_EQUAL(false, res.success);
CPPUNIT_ASSERT_EQUAL(2_st, res.order.size());
@ -286,13 +305,14 @@ void DataTests::testComputingBuildOrder()
CPPUNIT_ASSERT_EQUAL(ignored, res.ignored);
// add cycle
auto &db(m_config.databases[0]);
auto tar = make_shared<Package>();
auto &db = m_config.databases[0];
auto tar = std::make_shared<Package>();
tar->name = "tar";
tar->version = "5.6-6";
tar->dependencies.emplace_back("foo");
m_pkg2->dependencies.emplace_back("tar"); // let bar depend on tar
db.updatePackage(tar);
db.forceUpdatePackage(tar);
db.forceUpdatePackage(m_pkg2);
// fail due to cycle
res = m_config.computeBuildOrder({ "foo", "bar", "tar" }, BuildOrderOptions::None);
@ -315,10 +335,12 @@ void DataTests::testComputingBuildOrder()
CPPUNIT_ASSERT_EQUAL(0_st, res.ignored.size());
// ignore cycle if not interested in that particular package
m_pkg2->packageInfo = make_unique<PackageInfo>();
tar->packageInfo = make_unique<PackageInfo>();
m_pkg2->packageInfo = std::make_unique<PackageInfo>();
tar->packageInfo = std::make_unique<PackageInfo>();
tar->dependencies.clear();
tar->dependencies.emplace_back("bar");
db.forceUpdatePackage(tar);
db.forceUpdatePackage(m_pkg2);
res = m_config.computeBuildOrder({ "foo" }, BuildOrderOptions::None);
CPPUNIT_ASSERT_EQUAL(true, res.success);
CPPUNIT_ASSERT_EQUAL(0_st, res.cycle.size());
@ -329,6 +351,7 @@ void DataTests::testComputingBuildOrder()
void DataTests::setupTestDbs(std::size_t dbCount)
{
setupPackages();
m_config.databases.reserve(m_config.databases.size() + dbCount);
for (std::size_t i = 1; i <= dbCount; ++i) {
m_config.findOrCreateDatabase(argsToString("db", i), "x86_64");
@ -387,16 +410,17 @@ void DataTests::testComputingDatabasesRequiringDatabase()
void DataTests::testUpdateCheck()
{
setupPackages();
auto &db1 = m_config.databases.front();
auto &db2 = m_config.databases.back();
const auto result = db1.checkForUpdates({ &db2 });
CPPUNIT_ASSERT_EQUAL(1_st, result.versionUpdates.size());
CPPUNIT_ASSERT_EQUAL(m_pkg1, result.versionUpdates.front().oldVersion.pkg);
CPPUNIT_ASSERT_EQUAL(m_pkg3, result.versionUpdates.front().newVersion.pkg);
CPPUNIT_ASSERT_EQUAL(m_pkgId1, result.versionUpdates.front().oldVersion.id);
CPPUNIT_ASSERT_EQUAL(m_pkgId3, result.versionUpdates.front().newVersion.id);
CPPUNIT_ASSERT_EQUAL(0_st, result.packageUpdates.size());
CPPUNIT_ASSERT_EQUAL(0_st, result.downgrades.size());
CPPUNIT_ASSERT_EQUAL(1_st, result.orphans.size());
CPPUNIT_ASSERT_EQUAL(m_pkg2, result.orphans.front().pkg);
CPPUNIT_ASSERT_EQUAL(m_pkgId2, result.orphans.front().id);
}
void DataTests::testLocatePackage()
@ -405,6 +429,7 @@ void DataTests::testLocatePackage()
const auto syncthingTrayPkgPath = testFilePath("repo/foo/syncthingtray-0.6.2-1-x86_64.pkg.tar.xz");
const auto syncthingTrayStorageLocation = std::filesystem::canonical(testFilePath("syncthingtray/syncthingtray-0.6.2-1-x86_64.pkg.tar.xz"));
setupPackages();
auto &db = m_config.databases.front();
db.localPkgDir = std::filesystem::path(fakePkgPath).parent_path();
@ -440,6 +465,7 @@ void DataTests::testAddingDepsAndProvidesFromOtherPackage()
Dependency{ "python", "3.5", DependencyMode::LessThan },
Dependency{ "perl", "5.32", DependencyMode::GreatherEqual },
};
setupPackages();
m_pkg1->origin = PackageOrigin::PackageContents;
m_pkg1->dependencies.insert(m_pkg1->dependencies.end(), dependenciesToTakeOver.begin(), dependenciesToTakeOver.end());
m_pkg1->libdepends.emplace("foo");
@ -465,6 +491,7 @@ void DataTests::testAddingDepsAndProvidesFromOtherPackage()
void DataTests::testDependencyExport()
{
setupPackages();
m_pkg2->provides.emplace_back("yet-another-dependency");
m_pkg2->libprovides.emplace("libfoo");
m_pkg2->libprovides.emplace("libbar");

View File

@ -21,6 +21,7 @@ using CppUtilities::operator<<; // must be visible prior to the call site
using namespace std;
using namespace CPPUNIT_NS;
using namespace CppUtilities;
using namespace CppUtilities::Literals;
using namespace LibPkg;
using namespace TestHelper;
@ -141,32 +142,33 @@ void ParserTests::testParsingPackageName()
void ParserTests::testParsingConfig()
{
// prepare pacman.conf
const string pacmanConfigWorkingCopyPath = workingCopyPath("pacman.conf"s, WorkingCopyMode::NoCopy);
const auto pacmanConfigWorkingCopyPath = workingCopyPath("pacman.conf"s, WorkingCopyMode::NoCopy);
{
const string mirrorListPath = testFilePath("mirrorlist"s);
string defaultPacmanConfig = readFile(testFilePath("pacman.conf"s), 5 * 1024);
const auto mirrorListPath = testFilePath("mirrorlist"s);
auto defaultPacmanConfig = readFile(testFilePath("pacman.conf"s), 5 * 1024);
findAndReplace(defaultPacmanConfig, "/etc/pacman.d/mirrorlist"s, mirrorListPath);
ofstream pacmanConfigWorkingCopy;
auto pacmanConfigWorkingCopy = std::ofstream();
pacmanConfigWorkingCopy.exceptions(ios_base::failbit | ios_base::badbit);
pacmanConfigWorkingCopy.open(pacmanConfigWorkingCopyPath, ios_base::out | ios_base::trunc | ios_base::binary);
pacmanConfigWorkingCopy.write(defaultPacmanConfig.data(), static_cast<streamsize>(defaultPacmanConfig.size()));
}
Config config;
auto config = Config();
config.initStorage(workingCopyPath("test-parsing-pacman-config.db", WorkingCopyMode::Cleanup).data());
config.loadPacmanConfig(pacmanConfigWorkingCopyPath.data());
for (auto &db : config.databases) {
db.deducePathsFromLocalDirs();
}
CPPUNIT_ASSERT_EQUAL_MESSAGE("cache dir"s, vector<string>{ "/cache/path/"s }, config.packageCacheDirs);
CPPUNIT_ASSERT_EQUAL_MESSAGE("cache dir"s, std::vector<std::string>{ "/cache/path/"s }, config.packageCacheDirs);
CPPUNIT_ASSERT_EQUAL_MESSAGE("pacman database path"s, "/db/path/"s, config.pacmanDatabasePath);
CPPUNIT_ASSERT_EQUAL_MESSAGE("3 databases found"s, 3ul, config.databases.size());
CPPUNIT_ASSERT_EQUAL("core"s, config.databases[0].name);
CPPUNIT_ASSERT_EQUAL("extra"s, config.databases[1].name);
CPPUNIT_ASSERT_EQUAL("community"s, config.databases[2].name);
const vector<string> mirrorsCore = { "http://ftp.fau.de/archlinux/core/os/i686"s, "https://ftp.fau.de/archlinux/core/os/i686"s };
const auto mirrorsCore = std::vector<std::string>{ "http://ftp.fau.de/archlinux/core/os/i686"s, "https://ftp.fau.de/archlinux/core/os/i686"s };
CPPUNIT_ASSERT_EQUAL_MESSAGE("mirrors read correctly in first place"s, mirrorsCore, config.databases[0].mirrors);
const vector<string> mirrorsExtra = { "http://ftp.fau.de/archlinux/extra/os/i686"s, "https://ftp.fau.de/archlinux/extra/os/i686"s };
const auto mirrorsExtra = std::vector<std::string>{ "http://ftp.fau.de/archlinux/extra/os/i686"s, "https://ftp.fau.de/archlinux/extra/os/i686"s };
CPPUNIT_ASSERT_EQUAL_MESSAGE("reusing already parsed mirror list"s, mirrorsExtra, config.databases[1].mirrors);
CPPUNIT_ASSERT_EQUAL_MESSAGE("regular database file"s, "/db/path/sync/extra.db"s, config.databases[1].path);
CPPUNIT_ASSERT_EQUAL_MESSAGE("database file containing files"s, "/db/path/sync/extra.files"s, config.databases[1].filesPath);
@ -181,7 +183,7 @@ void ParserTests::testParsingPlainSrcInfo()
const auto packages = Package::fromInfo(srcInfo, false);
CPPUNIT_ASSERT_EQUAL_MESSAGE("1 package present"s, 1ul, packages.size());
const Package &pkg1 = *packages.front();
const Package &pkg1 = *packages.front().pkg;
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin", PackageOrigin::SourceInfo, pkg1.origin);
CPPUNIT_ASSERT_EQUAL_MESSAGE("name"s, "c++utilities"s, pkg1.name);
CPPUNIT_ASSERT_EQUAL_MESSAGE("version"s, "4.5.0-1"s, pkg1.version);
@ -201,7 +203,7 @@ void ParserTests::testParsingSplitPackageSrcInfo()
const auto packages = Package::fromInfo(srcInfo, false);
CPPUNIT_ASSERT_EQUAL_MESSAGE("2 (split) packages present"s, 2ul, packages.size());
const Package &pkg1 = *packages.front(), &pkg2 = *packages.back();
const Package &pkg1 = *packages.front().pkg, &pkg2 = *packages.back().pkg;
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin (1)", PackageOrigin::SourceInfo, pkg1.origin);
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin (2)", PackageOrigin::SourceInfo, pkg2.origin);
CPPUNIT_ASSERT_EQUAL_MESSAGE("name (1)"s, "mingw-w64-harfbuzz"s, pkg1.name);
@ -236,7 +238,7 @@ void ParserTests::testParsingSplitPackageSrcInfoWithDifferentArchs()
const auto packages = Package::fromInfo(srcInfo, false);
CPPUNIT_ASSERT_EQUAL_MESSAGE("3 (split) packages present"s, 3ul, packages.size());
const auto &jre = packages[0], &jdk = packages[1], &doc = packages[2];
const auto &jre = packages[0].pkg, &jdk = packages[1].pkg, &doc = packages[2].pkg;
CPPUNIT_ASSERT_MESSAGE("source info present", jdk->sourceInfo);
CPPUNIT_ASSERT_EQUAL_MESSAGE("jre has same source info as base", jdk->sourceInfo, jre->sourceInfo);
CPPUNIT_ASSERT_EQUAL_MESSAGE("jdk-doc has same source info as base", jdk->sourceInfo, doc->sourceInfo);
@ -254,8 +256,8 @@ void ParserTests::testParsingPkgInfo()
const auto pkgInfo = readFile(testFilePath("mingw-w64-harfbuzz/PKGINFO"));
const auto packages = Package::fromInfo(pkgInfo, true);
CPPUNIT_ASSERT_EQUAL_MESSAGE("1 package present"s, 1ul, packages.size());
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin", PackageOrigin::PackageInfo, packages.front()->origin);
checkHarfbuzzPackage(*packages.front());
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin", PackageOrigin::PackageInfo, packages.front().pkg->origin);
checkHarfbuzzPackage(*packages.front().pkg);
}
void ParserTests::testParsingPkgName()
@ -290,21 +292,34 @@ void ParserTests::testParsingDescriptions()
void ParserTests::testParsingDatabase()
{
// init config
Config config;
config.databases.emplace_back();
auto dbFile = workingCopyPath("test-parsing-database.db", WorkingCopyMode::Cleanup);
// init db object
Database &db = config.databases.back();
db.path = testFilePath("core.db");
db.filesPath = testFilePath("core.files");
{
// init config
auto config = Config();
config.initStorage(dbFile.data());
// load packages
config.loadAllPackages(true);
CPPUNIT_ASSERT_EQUAL_MESSAGE("all 215 packages present"s, 215ul, db.packages.size());
const auto &autoreconf(db.packages.at("autoconf"));
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin", PackageOrigin::Database, autoreconf->origin);
checkAutoconfPackage(*autoreconf);
// init db object
auto *const db = config.findOrCreateDatabase("test"sv, "x86_64"sv);
db->path = testFilePath("core.db");
db->filesPath = testFilePath("core.files");
// load packages
config.loadAllPackages(true);
CPPUNIT_ASSERT_EQUAL_MESSAGE("all 215 packages present"s, 215_st, db->packageCount());
const auto autoreconf = db->findPackage("autoconf");
CPPUNIT_ASSERT_MESSAGE("autoreconf exists", autoreconf != nullptr);
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin", PackageOrigin::Database, autoreconf->origin);
checkAutoconfPackage(*autoreconf);
}
{
// load config again to test persistency of database/storage
auto config = Config();
config.initStorage(dbFile.data());
auto *const db = config.findOrCreateDatabase("test"sv, "x86_64"sv);
CPPUNIT_ASSERT_EQUAL_MESSAGE("config persistent; all 215 packages still present"s, 215_st, db->packageCount());
}
}
void ParserTests::testParsingSignatureLevel()

View File

@ -56,7 +56,7 @@ struct LIBREPOMGR_EXPORT PackageBuildData : public ReflectiveRapidJSON::JsonSeri
std::string sourceDirectory;
std::string originalSourceDirectory;
std::shared_ptr<LibPkg::SourceInfo> sourceInfo;
std::vector<std::shared_ptr<LibPkg::Package>> packages;
std::vector<LibPkg::PackageSpec> packages;
std::vector<std::string> warnings;
std::string error;
std::size_t specifiedIndex = std::numeric_limits<std::size_t>::max();

View File

@ -271,7 +271,7 @@ void ConductBuild::run()
m_pacmanStagingConfigPath = m_workingDirectory + "/pacman-staging.conf";
// parse build preparation
ReflectiveRapidJSON::JsonDeserializationErrors errors;
auto errors = ReflectiveRapidJSON::JsonDeserializationErrors();
try {
m_buildPreparationFilePath = restoreJsonObject(
m_buildPreparation, m_workingDirectory, buildPreparationFileName, RestoreJsonExistingFileHandling::RequireExistingFile);
@ -293,9 +293,7 @@ void ConductBuild::run()
reportError("The destination database and target architecture specified in build-preparation.json must not be empty.");
return;
}
for (const auto &buildDataForPackage : m_buildPreparation.buildData) {
const auto &packageName = buildDataForPackage.first;
const auto &buildData = buildDataForPackage.second;
for (const auto &[packageName, buildData] : m_buildPreparation.buildData) {
if (packageName.empty()) {
reportError("The build data contains an empty package name.");
return;
@ -308,7 +306,7 @@ void ConductBuild::run()
reportError(argsToString("The build data for \"" % packageName % "\" has no packages."));
return;
}
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
if (!package) {
reportError(argsToString("The package of build data for \"" % packageName % "\" is null."));
return;
@ -742,7 +740,7 @@ bool ConductBuild::checkForFailedDependency(
if (buildProgress != m_buildProgress.progressByPackage.end() && buildProgress->second.addedToRepo) {
continue;
}
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
for (const auto &deps : dependencies) {
for (const auto &dependency : *deps) {
if (package->providesDependency(dependency)) {
@ -862,11 +860,11 @@ InvocationResult ConductBuild::invokeMakechrootpkg(
// previous batch have been built and that the order batch compution is correct)
if ((m_buildAsFarAsPossible || m_buildPreparation.manuallyOrdered) && hasFailuresInPreviousBatches) {
const auto &buildData = m_buildPreparation.buildData[packageName];
std::vector<const std::vector<LibPkg::Dependency> *> dependencies;
auto dependencies = std::vector<const std::vector<LibPkg::Dependency> *>();
dependencies.reserve(buildData.packages.size() + 2);
dependencies.emplace_back(&buildData.sourceInfo->makeDependencies);
dependencies.emplace_back(&buildData.sourceInfo->checkDependencies);
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
dependencies.emplace_back(&package->dependencies);
}
if (checkForFailedDependency(packageName, dependencies)) {
@ -985,13 +983,13 @@ void ConductBuild::addPackageToRepo(
auto buildResult = BuildResult{};
auto readLock = lockToRead();
const auto &buildData = m_buildPreparation.buildData[packageName];
const auto &firstPackage = buildData.packages.front();
const auto &firstPackage = buildData.packages.front().pkg;
auto sourcePackageName = packageName % '-' % firstPackage->version + m_sourcePackageExtension;
// determine names of binary packages to be copied
binaryPackages.reserve(buildData.packages.size());
buildResult.binaryPackageNames.reserve(buildData.packages.size());
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
const auto isAny = package->isArchAny();
const auto &arch = isAny ? "any" : m_buildPreparation.targetArch;
const auto &packageFileName = buildResult.binaryPackageNames.emplace_back(
@ -1467,13 +1465,14 @@ PackageStagingNeeded ConductBuild::checkWhetherStagingIsNeededAndPopulateRebuild
if (!db) {
throw std::runtime_error("Configured database \"" % dbName + "\" has been removed.");
}
const auto &packages = db->packages;
for (const auto &builtPackage : builtPackages) {
if (const auto i = packages.find(builtPackage.name); i != packages.end()) {
LibPkg::Package::exportProvides(i->second, removedProvides, removedLibProvides);
if (affectedDbName.empty()) {
affectedDbName = dbName;
}
auto existingPackage = db->findPackage(builtPackage.name);
if (!existingPackage) {
continue;
}
LibPkg::Package::exportProvides(existingPackage, removedProvides, removedLibProvides);
if (affectedDbName.empty()) {
affectedDbName = dbName;
}
}
}
@ -1516,48 +1515,38 @@ PackageStagingNeeded ConductBuild::checkWhetherStagingIsNeededAndPopulateRebuild
};
for (const auto &db : relevantDbs) {
const auto isDestinationDb = db->name == m_buildPreparation.targetDb && db->arch == m_buildPreparation.targetArch;
const auto &requiredDeps = db->requiredDeps;
RebuildInfoByPackage *rebuildInfoForDb = nullptr;
for (const auto &[removedDependencyName, removedDependencyDetail] : removedProvides) {
for (auto affectedDependencies = requiredDeps.equal_range(removedDependencyName);
affectedDependencies.first != affectedDependencies.second; ++affectedDependencies.first) {
if (!LibPkg::Dependency::matches(
removedDependencyDetail.mode, removedDependencyDetail.version, affectedDependencies.first->second.version)) {
continue;
}
if (!rebuildInfoForDb) {
rebuildInfoForDb = &m_buildProgress.rebuildList[db->name];
}
const auto &affectedPackages = affectedDependencies.first->second.relevantPackages;
for (const auto &affectedPackage : affectedPackages) {
if (isDestinationDb && isPackageWeWantToUpdateItself(*affectedPackage)) {
continue; // skip if that's just the package we want to update itself
}
needsStaging = true;
(*rebuildInfoForDb)[affectedPackage->name].provides.emplace_back(
removedDependencyName, removedDependencyDetail.version, removedDependencyDetail.mode);
listOfAffectedPackages.emplace_back(db->name % '/' + affectedPackage->name);
}
}
}
for (const auto &removedLibProvide : removedLibProvides) {
if (const auto affectedLibRequires = db->requiredLibs.find(removedLibProvide); affectedLibRequires != db->requiredLibs.end()) {
const auto &affectedPackages = affectedLibRequires->second;
if (affectedPackages.empty()) {
continue;
}
for (const auto &affectedPackage : affectedPackages) {
if (isDestinationDb && isPackageWeWantToUpdateItself(*affectedPackage)) {
continue; // skip if that's just the package we want to update itself
for (const auto &removedDependency : removedProvides) {
const auto &removedDependencyName = removedDependency.first;
const auto &removedDependencyDetail = removedDependency.second;
db->providingPackages(LibPkg::Dependency(removedDependencyName, removedDependencyDetail.version, removedDependencyDetail.mode), true,
[&](LibPkg::StorageID, LibPkg::Package &&affectedPackage) {
if (isDestinationDb && isPackageWeWantToUpdateItself(affectedPackage)) {
return false; // skip if that's just the package we want to update itself
}
if (!rebuildInfoForDb) {
rebuildInfoForDb = &m_buildProgress.rebuildList[db->name];
}
needsStaging = true;
(*rebuildInfoForDb)[affectedPackage->name].libprovides.emplace_back(removedLibProvide);
listOfAffectedPackages.emplace_back(db->name % '/' + affectedPackage->name);
(*rebuildInfoForDb)[affectedPackage.name].provides.emplace_back(
removedDependencyName, removedDependencyDetail.version, removedDependencyDetail.mode);
listOfAffectedPackages.emplace_back(db->name % '/' + affectedPackage.name);
return false;
});
}
for (const auto &removedLibProvide : removedLibProvides) {
db->providingPackages(removedLibProvide, true, [&](LibPkg::StorageID, LibPkg::Package &&affectedPackage) {
if (isDestinationDb && isPackageWeWantToUpdateItself(affectedPackage)) {
return false; // skip if that's just the package we want to update itself
}
}
if (!rebuildInfoForDb) {
rebuildInfoForDb = &m_buildProgress.rebuildList[db->name];
}
needsStaging = true;
(*rebuildInfoForDb)[affectedPackage.name].libprovides.emplace_back(removedLibProvide);
listOfAffectedPackages.emplace_back(db->name % '/' + affectedPackage.name);
return false;
});
}
}

View File

@ -348,9 +348,9 @@ void PrepareBuild::processSrcInfo(WebClient::AurSnapshotQuerySession &multiSessi
void PrepareBuild::addResultFromSrcInfo(WebClient::AurSnapshotQuerySession &multiSession, const std::string &packageName, const std::string &srcInfo)
{
auto snapshotResult = WebClient::AurSnapshotResult{ .packageName = packageName, .packages = LibPkg::Package::fromInfo(srcInfo, false) };
if (snapshotResult.packages.empty() || snapshotResult.packages.front()->name.empty()) {
if (snapshotResult.packages.empty() || snapshotResult.packages.front().pkg->name.empty()) {
snapshotResult.error = "Unable to parse .SRCINFO: no package name present";
} else if (!(snapshotResult.sourceInfo = snapshotResult.packages.front()->sourceInfo)) {
} else if (!(snapshotResult.sourceInfo = snapshotResult.packages.front().pkg->sourceInfo)) {
snapshotResult.error = "Unable to parse .SRCINFO: no source info present";
}
multiSession.addResponse(std::move(snapshotResult));
@ -507,7 +507,7 @@ bool PrepareBuild::pullFurtherDependencies(const std::vector<LibPkg::Dependency>
continue;
}
for (const auto &[packageName, buildData] : m_buildDataByPackage) {
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
if (package->providesDependency(dependency)) {
dependencyExists = true;
break;
@ -744,7 +744,7 @@ void PrepareBuild::bumpVersions()
auto existingVersion = existingVersionStr.empty() ? LibPkg::PackageVersion{} : LibPkg::PackageVersion::fromString(existingVersionStr);
LibPkg::PackageAmendment amendment;
LibPkg::PackageVersion newVersion;
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
newVersion = LibPkg::PackageVersion::fromString(package->version);
if (existingVersionStr.empty()) {
existingVersion = newVersion;
@ -787,7 +787,7 @@ void PrepareBuild::bumpVersions()
newVersion.package = std::move(amendedVersions.newPkgRel);
}
const auto newVersionStr = newVersion.toString();
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
package->version = newVersionStr;
}
m_warnings.emplace_back("New version of " % packageName % " (and its split packages) is " + newVersionStr);
@ -847,7 +847,7 @@ void PrepareBuild::computeDependencies(WebClient::AurSnapshotQuerySession::Conta
}
furtherDependenciesNeeded = pullFurtherDependencies(buildData.sourceInfo->makeDependencies) || furtherDependenciesNeeded;
furtherDependenciesNeeded = pullFurtherDependencies(buildData.sourceInfo->checkDependencies) || furtherDependenciesNeeded;
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
furtherDependenciesNeeded = pullFurtherDependencies(package->dependencies) || furtherDependenciesNeeded;
}
}
@ -909,7 +909,7 @@ std::unordered_map<std::string, BatchItem> PrepareBuild::prepareBatches()
auto [i, newItem] = batchItems.try_emplace(packageName, BatchItem{ .name = &packageName, .buildData = &buildData });
auto &batchItem = i->second;
const auto &sourceInfo = buildData.sourceInfo;
for (const auto &package : buildData.packages) {
for (const auto &[packageID, package] : buildData.packages) {
for (const auto &deps : { sourceInfo->makeDependencies, sourceInfo->checkDependencies, package->dependencies }) {
for (const auto &dep : deps) {
batchItem.requiredDependencies.add(dep, package);

View File

@ -48,7 +48,7 @@ void ReloadLibraryDependencies::run()
// initialize
m_remainingPackages = 0;
auto configReadLock = init(BuildActionAccess::ReadConfig, RequiredDatabases::MaybeDestination, RequiredParameters::None);
if (holds_alternative<monostate>(configReadLock)) {
if (std::holds_alternative<std::monostate>(configReadLock)) {
return;
}
@ -61,9 +61,10 @@ void ReloadLibraryDependencies::run()
// find relevant databases and packages
m_buildAction->appendOutput(Phrases::SuccessMessage, "Finding relevant databases/packages ...\n");
m_relevantPackagesByDatabase.reserve(m_destinationDbs.empty() ? m_setup.config.databases.size() : m_destinationDbs.size());
std::unordered_set<LibPkg::Database *> relevantDbs;
std::unordered_set<LibPkg::Package *> relevantPkgs;
LibPkg::DependencySet missingDeps;
auto relevantDbs = std::unordered_set<LibPkg::Database *>();
auto relevantPkgs = std::unordered_map<LibPkg::StorageID, std::shared_ptr<LibPkg::Package>>();
auto missingDeps = LibPkg::DependencySet();
auto visitedPackages = std::unordered_set<LibPkg::StorageID>();
if (m_destinationDbs.empty()) {
for (auto &db : m_setup.config.databases) {
relevantDbs.emplace(&db);
@ -74,7 +75,7 @@ void ReloadLibraryDependencies::run()
continue;
}
const auto databaseDependencyOrderRes = m_setup.config.computeDatabaseDependencyOrder(*destinationDb);
if (holds_alternative<string>(databaseDependencyOrderRes)) {
if (std::holds_alternative<string>(databaseDependencyOrderRes)) {
m_messages.errors.emplace_back(
destinationDb->name % ": unable to consider dependencies: " + std::get<std::string>(databaseDependencyOrderRes));
}
@ -84,13 +85,15 @@ void ReloadLibraryDependencies::run()
}
}
for (auto *const destinationDb : m_destinationDbs) {
for (const auto &[packageName, package] : destinationDb->packages) {
m_setup.config.pullDependentPackages(package, relevantDbs, relevantPkgs, missingDeps);
}
destinationDb->allPackages([&, this](LibPkg::StorageID, LibPkg::Package &&package) {
m_setup.config.pullDependentPackages(
std::make_shared<LibPkg::Package>(std::move(package)), relevantDbs, relevantPkgs, missingDeps, visitedPackages);
return false;
});
}
}
for (const auto &[dependencyName, dependencyDetail] : missingDeps) {
std::vector<std::string_view> packageNames;
auto packageNames = std::vector<std::string_view>();
packageNames.reserve(dependencyDetail.relevantPackages.size());
for (const auto &package : dependencyDetail.relevantPackages) {
packageNames.emplace_back(package->name);
@ -101,30 +104,29 @@ void ReloadLibraryDependencies::run()
for (auto *const db : relevantDbs) {
const auto isDestinationDb = m_destinationDbs.empty() || m_destinationDbs.find(db) != m_destinationDbs.end();
auto &relevantDbInfo = m_relevantPackagesByDatabase.emplace_back(DatabaseToConsider{ .name = db->name, .arch = db->arch });
relevantDbInfo.packages.reserve(db->packages.size());
for (const auto &[packageName, package] : db->packages) {
db->allPackages([&](LibPkg::StorageID packageID, LibPkg::Package &&package) {
// allow aborting the build action
if (reportAbortedIfAborted()) {
return;
return true;
}
// skip if package should be excluded
if (!packageExcludeRegexValue.empty() && std::regex_match(package->name, packageExcludeRegex)) {
m_messages.notes.emplace_back(db->name % '/' % packageName + ": matches exclude regex");
continue;
if (!packageExcludeRegexValue.empty() && std::regex_match(package.name, packageExcludeRegex)) {
m_messages.notes.emplace_back(db->name % '/' % package.name + ": matches exclude regex");
return false;
}
// skip if the package info is missing (we need the binary package's file name here)
const auto &packageInfo = package->packageInfo;
const auto &packageInfo = package.packageInfo;
if (!packageInfo) {
m_messages.errors.emplace_back(db->name % '/' % packageName + ": no package info");
continue;
m_messages.errors.emplace_back(db->name % '/' % package.name + ": no package info");
return false;
}
// skip the package if it is not part of the destination DB or required by a package of the destination DB
if (!isDestinationDb && relevantPkgs.find(package.get()) == relevantPkgs.end()) {
if (!isDestinationDb && relevantPkgs.find(packageID) == relevantPkgs.end()) {
if (m_skippingNote.tellp()) {
m_skippingNote << ", ";
}
m_skippingNote << db->name << '/' << packageName;
continue;
m_skippingNote << db->name << '/' << package.name;
return false;
}
// find the package on disk; otherwise add an URL to download it from the configured mirror
std::string path, url, cachePath;
@ -160,32 +162,36 @@ void ReloadLibraryDependencies::run()
}
}
if (path.empty()) {
m_messages.errors.emplace_back(db->name % '/' % packageName + ": binary package not found and no mirror configured");
continue;
m_messages.errors.emplace_back(db->name % '/' % package.name + ": binary package not found and no mirror configured");
return false;
}
// skip if the package info has already been loaded from package contents and the present binary package is not newer
auto lastModified = DateTime();
if (url.empty()) {
lastModified = LibPkg::lastModified(path);
if (!force && package->origin == LibPkg::PackageOrigin::PackageContents && package->timestamp >= lastModified) {
m_messages.notes.emplace_back(db->name % '/' % packageName % ": skipping because \"" % path % "\" is newer ("
% package->timestamp.toString() % " >= " % lastModified.toString()
if (!force && package.origin == LibPkg::PackageOrigin::PackageContents && package.timestamp >= lastModified) {
m_messages.notes.emplace_back(db->name % '/' % package.name % ": skipping because \"" % path % "\" is newer ("
% package.timestamp.toString() % " >= " % lastModified.toString()
+ ")\n");
continue;
return false;
}
}
// add the full path to the binary package to relevant packages
auto &relevantPkg = relevantDbInfo.packages.emplace_back(
PackageToConsider{ .path = std::move(path), .url = std::move(url), .lastModified = lastModified });
// create a temporary package object to hold the info parsed from the .PKGINFO file
relevantPkg.info.name = package->name;
relevantPkg.info.name = package.name;
// -> assign certain fields which are used by addDepsAndProvidesFromOtherPackage() to check whether the packages are matching
relevantPkg.info.version = package->version;
relevantPkg.info.version = package.version;
relevantPkg.info.packageInfo = std::make_unique<LibPkg::PackageInfo>();
relevantPkg.info.packageInfo->buildDate = package->packageInfo->buildDate;
relevantPkg.info.packageInfo->buildDate = package.packageInfo->buildDate;
// -> gather source info such as make and check dependencies as well
relevantPkg.info.sourceInfo = std::make_shared<LibPkg::SourceInfo>();
++m_remainingPackages;
return false;
});
if (reportAbortedIfAborted()) {
return;
}
}
configReadLock = std::monostate{};
@ -344,14 +350,11 @@ void ReloadLibraryDependencies::loadPackageInfoFromContents()
continue;
}
// find the package in the database again
const auto packageIterator = db->packages.find(package.info.name);
if (packageIterator == db->packages.end()) {
const auto [packageID, existingPackage] = db->findPackageWithID(package.info.name);
if (!existingPackage) {
continue; // the package has been removed while we were loading package contents
}
// remove the current dependencies on database level
db->removePackageDependencies(packageIterator);
// add the dependencies/provides to the existing package
const auto &existingPackage = packageIterator->second;
if (!existingPackage->addDepsAndProvidesFromOtherPackage(package.info)) {
continue; // the package does no longer match what's in the database
}
@ -360,7 +363,7 @@ void ReloadLibraryDependencies::loadPackageInfoFromContents()
existingPackage->timestamp = package.lastModified;
}
// add the new dependencies on database-level
db->addPackageDependencies(existingPackage);
db->forceUpdatePackage(existingPackage);
++counter;
}
}

View File

@ -105,20 +105,14 @@ void PackageMovementAction::initWorkingDirectory()
void PackageMovementAction::locatePackages()
{
// determine repo path and package paths
LibPkg::Database *db;
if (m_sourceDbs.empty()) {
db = *m_destinationDbs.begin();
} else {
db = *m_sourceDbs.begin();
}
const auto &packages = db->packages;
auto *const db = m_sourceDbs.empty() ? *m_destinationDbs.begin() : *m_sourceDbs.begin();
for (const auto &packageName : m_buildAction->packageNames) {
const auto &package = packages.find(packageName);
if (package == packages.end()) {
const auto package = db->findPackage(packageName);
if (!package) {
m_result.failedPackages.emplace_back(packageName, "package not listed in database file");
continue;
}
auto packageLocation = db->locatePackage(package->second->computeFileName());
auto packageLocation = db->locatePackage(package->computeFileName());
if (packageLocation.error.has_value()) {
m_result.failedPackages.emplace_back(
packageName, argsToString("unable to locate package within repo directory: ", packageLocation.error.value().what()));
@ -450,24 +444,25 @@ void CheckForProblems::run()
problems.emplace_back(
RepositoryProblem{ .desc = "configured local package directory \"" % db->localPkgDir + "\" is not a directory" });
}
for (const auto &[pkgName, pkg] : db->packages) {
if (!pkg->packageInfo) {
problems.emplace_back(RepositoryProblem{ .desc = "no package info present", .pkg = pkgName });
continue;
db->allPackages([&](LibPkg::StorageID, LibPkg::Package &&package) {
if (!package.packageInfo) {
problems.emplace_back(RepositoryProblem{ .desc = "no package info present", .pkg = package.name });
return false;
}
const auto packageLocation = db->locatePackage(pkg->packageInfo->fileName);
const auto packageLocation = db->locatePackage(package.packageInfo->fileName);
if (!packageLocation.exists) {
problems.emplace_back(
RepositoryProblem{ .desc = "binary package \"" % pkg->packageInfo->fileName + "\" not present", .pkg = pkgName });
RepositoryProblem{ .desc = "binary package \"" % package.packageInfo->fileName + "\" not present", .pkg = package.name });
}
if (m_requirePackageSignatures) {
const auto signatureLocation = db->locatePackage(pkg->packageInfo->fileName + ".sig");
const auto signatureLocation = db->locatePackage(package.packageInfo->fileName + ".sig");
if (!signatureLocation.exists) {
problems.emplace_back(RepositoryProblem{
.desc = "signature file for package \"" % pkg->packageInfo->fileName + "\" not present", .pkg = pkgName });
.desc = "signature file for package \"" % package.packageInfo->fileName + "\" not present", .pkg = package.name });
}
}
}
return false;
});
} catch (const std::filesystem::filesystem_error &e) {
problems.emplace_back(RepositoryProblem{ .desc = argsToString("unable to check presence of files: ", e.what()) });
}
@ -476,8 +471,8 @@ void CheckForProblems::run()
checkForUnresolvedPackages:
auto unresolvedPackages = db->detectUnresolvedPackages(
m_setup.config, std::vector<std::shared_ptr<LibPkg::Package>>(), LibPkg::DependencySet(), ignoreDeps, ignoreLibDeps);
for (auto &[package, unresolvedDeps] : unresolvedPackages) {
problems.emplace_back(RepositoryProblem{ .desc = std::move(unresolvedDeps), .pkg = package->name });
for (auto &[packageSpec, unresolvedDeps] : unresolvedPackages) {
problems.emplace_back(RepositoryProblem{ .desc = std::move(unresolvedDeps), .pkg = packageSpec.pkg->name });
}
}
@ -528,8 +523,8 @@ void CleanRepository::run()
std::variant<std::monostate, SharedLoggingLock, UniqueLoggingLock> lock;
RepoDirType type = RepoDirType::New;
};
std::unordered_map<std::string, RepoDir> repoDirs;
bool fatalError = false;
auto repoDirs = std::unordered_map<std::string, RepoDir>();
auto fatalError = false;
const auto addAnyAndSrcDir = [this, &repoDirs](LibPkg::Database &db) {
// find the "any" directory which contains arch neutral packages which are possibly shared between databases
try {
@ -654,8 +649,10 @@ void CleanRepository::run()
"multiple/ambiguous *.db files present: " + joinStrings<decltype(dbFileNames), std::string>(dbFileNames, ", "));
}
// initialize temporary database object for the repository
auto &db = otherDbs.emplace_back(std::make_unique<LibPkg::Database>(dbFilePaths.front().stem(), dbFilePaths.front()));
auto &db = otherDbs.emplace_back(
std::make_unique<LibPkg::Database>(argsToString("clean-repository-", dbFilePaths.front().stem()), dbFilePaths.front()));
db->arch = dirInfo.canonicalPath.stem();
db->initStorage(*m_setup.config.storage());
db->loadPackages();
dirInfo.relevantDbs.emplace(db.get());
// acquire lock for db directory
@ -735,12 +732,11 @@ void CleanRepository::run()
// check whether the file is still referenced by and relevant database and move it to archive if not
auto fileStillReferenced = false;
auto actuallyReferencedFileNames = std::vector<std::string_view>();
for (const auto *const db : dirInfo.relevantDbs) {
const auto i = db->packages.find(packageName);
if (i == db->packages.end()) {
for (auto *const db : dirInfo.relevantDbs) {
const auto pkg = db->findPackage(packageName);
if (!pkg) {
continue;
}
const auto &pkg = i->second;
const auto &pkgInfo = pkg->packageInfo;
if (!pkgInfo || pkgInfo->fileName.empty()) {
m_messages.warnings.emplace_back(

View File

@ -27,7 +27,7 @@ void UpdateCheck::run()
if (m_fromAur && !m_packageLookupDone
&& WebClient::queryAurPackagesForDatabase(m_buildAction->log(), m_setup, m_setup.building.ioContext,
&get<shared_lock<shared_mutex>>(configReadLock), **m_destinationDbs.begin(), [this](std::vector<std::shared_ptr<LibPkg::Package>> &&) {
&std::get<std::shared_lock<std::shared_mutex>>(configReadLock), **m_destinationDbs.begin(), [this](std::vector<LibPkg::PackageSpec> &&) {
m_packageLookupDone = true;
run();
})) {
@ -35,7 +35,7 @@ void UpdateCheck::run()
}
auto result = checkForUpdates();
get<shared_lock<shared_mutex>>(configReadLock).unlock();
std::get<std::shared_lock<std::shared_mutex>>(configReadLock).unlock();
auto buildActionWriteLock = m_setup.building.lockToWrite();
m_buildAction->resultData = move(result);

View File

@ -222,18 +222,19 @@ void ServiceSetup::loadConfigFiles(bool restoreStateAndDiscardDatabases)
{
// read config file
cout << Phrases::InfoMessage << "Reading config file: " << configFilePath << Phrases::EndFlush;
IniFile configIni;
auto configIni = IniFile();
try {
// parse ini
ifstream configFile;
configFile.exceptions(fstream::badbit | fstream::failbit);
configFile.open(configFilePath, fstream::in);
auto configFile = std::ifstream();
configFile.exceptions(std::fstream::badbit | std::fstream::failbit);
configFile.open(configFilePath, std::fstream::in);
configIni.parse(configFile);
// read basic configuration values (not cached)
for (const auto &iniEntry : configIni.data()) {
if (iniEntry.first.empty()) {
convertValue(iniEntry.second, "pacman_config_file_path", pacmanConfigFilePath);
convertValue(iniEntry.second, "working_directory", workingDirectory);
convertValue(iniEntry.second, "max_dbs", maxDbs);
}
}
// apply working directory
@ -265,6 +266,7 @@ void ServiceSetup::loadConfigFiles(bool restoreStateAndDiscardDatabases)
// restore state/cache and discard databases
if (restoreStateAndDiscardDatabases) {
restoreState();
config.initStorage(dbPath.data(), maxDbs);
config.markAllDatabasesToBeDiscarded();
restoreStateAndDiscardDatabases = false;
}
@ -378,11 +380,11 @@ void ServiceSetup::printDatabases()
{
cerr << Phrases::SuccessMessage << "Found " << config.databases.size() << " databases:" << Phrases::End;
for (const auto &db : config.databases) {
cerr << Phrases::SubMessage << db.name << "@" << db.arch << ": " << db.packages.size() << " packages, last updated on "
cerr << Phrases::SubMessage << db.name << "@" << db.arch << ": " << db.packageCount() << " packages, last updated on "
<< db.lastUpdate.toString(DateTimeOutputFormat::DateAndTime) << Phrases::End << " - path: " << db.path
<< "\n - local db dir: " << db.localDbDir << "\n - local package dir: " << db.localPkgDir << '\n';
}
cerr << Phrases::SubMessage << "AUR (" << config.aur.packages.size() << " packages cached)" << Phrases::End;
cerr << Phrases::SubMessage << "AUR (" << config.aur.packageCount() << " packages cached)" << Phrases::End;
}
std::string_view ServiceSetup::cacheFilePath() const
@ -390,40 +392,41 @@ std::string_view ServiceSetup::cacheFilePath() const
return "cache-v" LIBREPOMGR_CACHE_VERSION ".bin";
}
RAPIDJSON_NAMESPACE::Document ServiceSetup::libraryDependenciesToJson() const
RAPIDJSON_NAMESPACE::Document ServiceSetup::libraryDependenciesToJson()
{
namespace JR = ReflectiveRapidJSON::JsonReflector;
auto document = RAPIDJSON_NAMESPACE::Document(RAPIDJSON_NAMESPACE::kObjectType);
auto &alloc = document.GetAllocator();
for (const auto &db : config.databases) {
for (auto &db : config.databases) {
auto dbValue = RAPIDJSON_NAMESPACE::Value(RAPIDJSON_NAMESPACE::Type::kObjectType);
for (const auto &[pkgName, pkg] : db.packages) {
if (!pkg->packageInfo) {
continue;
db.allPackages([&](StorageID, Package &&package) {
if (!package.packageInfo) {
return false;
}
if (pkg->libdepends.empty() && pkg->libprovides.empty()) {
if (package.libdepends.empty() && package.libprovides.empty()) {
auto hasVersionedPythonOrPerlDep = false;
for (const auto &dependency : pkg->dependencies) {
for (const auto &dependency : package.dependencies) {
if (dependency.mode == DependencyMode::Any || dependency.version.empty()
|| (dependency.name != "python" && dependency.name != "python2" && dependency.name != "perl")) {
continue;
return false;
}
hasVersionedPythonOrPerlDep = true;
break;
}
if (!hasVersionedPythonOrPerlDep) {
continue;
return false;
}
}
auto pkgValue = RAPIDJSON_NAMESPACE::Value(RAPIDJSON_NAMESPACE::Type::kObjectType);
auto pkgObj = pkgValue.GetObject();
JR::push(pkg->version, "v", pkgObj, alloc);
JR::push(pkg->packageInfo->buildDate, "t", pkgObj, alloc);
JR::push(pkg->dependencies, "d", pkgObj, alloc); // for versioned Python/Perl deps
JR::push(pkg->libdepends, "ld", pkgObj, alloc);
JR::push(pkg->libprovides, "lp", pkgObj, alloc);
dbValue.AddMember(RAPIDJSON_NAMESPACE::StringRef(pkgName.data(), JR::rapidJsonSize(pkgName.size())), pkgValue, alloc);
}
JR::push(package.version, "v", pkgObj, alloc);
JR::push(package.packageInfo->buildDate, "t", pkgObj, alloc);
JR::push(package.dependencies, "d", pkgObj, alloc); // for versioned Python/Perl deps
JR::push(package.libdepends, "ld", pkgObj, alloc);
JR::push(package.libprovides, "lp", pkgObj, alloc);
dbValue.AddMember(RAPIDJSON_NAMESPACE::StringRef(package.name.data(), JR::rapidJsonSize(package.name.size())), pkgValue, alloc);
return false;
});
document.AddMember(RAPIDJSON_NAMESPACE::Value(db.name % '@' + db.arch, alloc), dbValue, alloc);
}
return document;
@ -442,7 +445,7 @@ void ServiceSetup::restoreLibraryDependenciesFromJson(const string &json, Reflec
const auto dbObj = document.GetObject();
for (const auto &dbEntry : dbObj) {
if (!dbEntry.value.IsObject()) {
errors->reportTypeMismatch<decltype(LibPkg::Database::packages)>(document.GetType());
errors->reportTypeMismatch<RAPIDJSON_NAMESPACE::kObjectType>(document.GetType());
continue;
}
auto *const db = config.findOrCreateDatabaseFromDenotation(std::string_view(dbEntry.name.GetString()));
@ -454,7 +457,7 @@ void ServiceSetup::restoreLibraryDependenciesFromJson(const string &json, Reflec
}
const auto pkgObj = pkgEntry.value.GetObject();
auto name = std::string(pkgEntry.name.GetString());
auto &pkg = db->packages[name];
auto [pkgID, pkg] = db->findPackageWithID(name);
if (pkg) {
// do not mess with already existing packages; this restoring stuff is supposed to be done before loading packages from DBs
continue;
@ -468,7 +471,7 @@ void ServiceSetup::restoreLibraryDependenciesFromJson(const string &json, Reflec
JR::pull(pkg->dependencies, "d", pkgObj, errors); // for versioned Python/Perl deps
JR::pull(pkg->libdepends, "ld", pkgObj, errors);
JR::pull(pkg->libprovides, "lp", pkgObj, errors);
db->addPackageDependencies(pkg);
db->updatePackage(pkg);
}
}
}

View File

@ -42,6 +42,8 @@ struct LIBREPOMGR_EXPORT ServiceSetup : public LibPkg::Lockable {
std::string pacmanConfigFilePath = "/etc/pacman.conf";
std::filesystem::path initialWorkingDirectory;
std::string workingDirectory = "workingdir";
std::string dbPath = "libpkg.db";
std::uint32_t maxDbs = 512;
// variables relevant for the web server; only changed when (re)loading config
struct LIBREPOMGR_EXPORT WebServerSetup {
@ -145,7 +147,7 @@ struct LIBREPOMGR_EXPORT ServiceSetup : public LibPkg::Lockable {
void loadConfigFiles(bool restoreStateAndDiscardDatabases);
void printDatabases();
std::string_view cacheFilePath() const;
RAPIDJSON_NAMESPACE::Document libraryDependenciesToJson() const;
RAPIDJSON_NAMESPACE::Document libraryDependenciesToJson();
void restoreLibraryDependenciesFromJson(const std::string &json, ReflectiveRapidJSON::JsonDeserializationErrors *errors);
std::size_t restoreState();
std::size_t saveState();

View File

@ -26,6 +26,7 @@ using CppUtilities::operator<<; // must be visible prior to the call site
#include <chrono>
using namespace std;
using namespace std::literals;
using namespace CPPUNIT_NS;
using namespace CppUtilities;
using namespace CppUtilities::Literals;
@ -60,6 +61,7 @@ public:
void testCleanup();
private:
void initStorage();
void loadBasicTestSetup();
void loadTestConfig();
void logTestSetup();
@ -68,6 +70,7 @@ private:
void runBuildAction(const char *message, TimeSpan timeout = TimeSpan::fromSeconds(5));
template <typename InternalBuildActionType> InternalBuildActionType *internalBuildAction();
std::string m_dbFile;
ServiceSetup m_setup;
std::shared_ptr<BuildAction> m_buildAction;
std::filesystem::path m_workingDir;
@ -95,6 +98,12 @@ void BuildActionsTests::tearDown()
std::filesystem::current_path(m_workingDir);
}
void BuildActionsTests::initStorage()
{
m_dbFile = workingCopyPath("test-build-actions.db", WorkingCopyMode::Cleanup);
m_setup.config.initStorage(m_dbFile.data());
}
/*!
* \brief Assigns certain build variables to use fake scripts (instead of invoking e.g. the real makepkg).
* \remarks The fake scripts are essentially no-ops which merely print the script name and the passed arguments.
@ -122,6 +131,9 @@ void BuildActionsTests::loadBasicTestSetup()
*/
void BuildActionsTests::loadTestConfig()
{
if (!m_setup.config.storage()) {
initStorage();
}
m_setup.loadConfigFiles(false);
m_setup.building.workingDirectory = m_setup.workingDirectory + "/building";
m_setup.printDatabases();
@ -135,11 +147,12 @@ void BuildActionsTests::loadTestConfig()
*/
void BuildActionsTests::logTestSetup()
{
for (const auto &db : m_setup.config.databases) {
for (auto &db : m_setup.config.databases) {
cout << EscapeCodes::Phrases::Info << "Packages of " << db.name << ':' << EscapeCodes::Phrases::End;
for (const auto &[pkgName, pkg] : db.packages) {
cout << " - " << pkgName << '\n';
}
db.allPackages([](LibPkg::StorageID, LibPkg::Package &&package) {
cout << " - " << package.name << '\n';
return false;
});
}
cout.flush();
}
@ -277,16 +290,22 @@ void BuildActionsTests::testBuildActionProcess()
void BuildActionsTests::testParsingInfoFromPkgFiles()
{
// init config
LibPkg::Config &config = m_setup.config;
config.databases = { { "foo.db" }, { "bar.db" }, { "baz.db" } };
initStorage();
auto &config = m_setup.config;
for (const auto dbName : { "foo.db"sv, "bar.db"sv, "baz.db"sv }) {
config.findOrCreateDatabase(dbName, std::string_view());
}
// init db object
LibPkg::Database &fooDb = config.databases[0];
auto harfbuzz = fooDb.packages["mingw-w64-harfbuzz"] = LibPkg::Package::fromPkgFileName("mingw-w64-harfbuzz-1.4.2-1-any.pkg.tar.xz");
auto syncthingtray = fooDb.packages["syncthingtray"] = LibPkg::Package::fromPkgFileName("syncthingtray-0.6.2-1-x86_64.pkg.tar.xz");
auto &fooDb = config.databases[0];
auto &barDb = config.databases[1];
const auto harfbuzz = LibPkg::Package::fromPkgFileName("mingw-w64-harfbuzz-1.4.2-1-any.pkg.tar.xz");
const auto harfbuzzID = fooDb.updatePackage(harfbuzz);
const auto syncthingtray = LibPkg::Package::fromPkgFileName("syncthingtray-0.6.2-1-x86_64.pkg.tar.xz");
const auto syncthingtrayID = fooDb.updatePackage(syncthingtray);
fooDb.localPkgDir = directory(testFilePath("repo/foo/mingw-w64-harfbuzz-1.4.2-1-any.pkg.tar.xz"));
LibPkg::Database &barDb = config.databases[1];
auto cmake = barDb.packages["cmake"] = LibPkg::Package::fromPkgFileName("cmake-3.8.2-1-x86_64.pkg.tar.xz");
const auto cmake = LibPkg::Package::fromPkgFileName("cmake-3.8.2-1-x86_64.pkg.tar.xz");
barDb.updatePackage(cmake);
CPPUNIT_ASSERT_EQUAL_MESSAGE("origin", LibPkg::PackageOrigin::PackageFileName, cmake->origin);
barDb.localPkgDir = directory(testFilePath("repo/bar/cmake-3.8.2-1-x86_64.pkg.tar.xz"));
@ -305,11 +324,13 @@ void BuildActionsTests::testParsingInfoFromPkgFiles()
const auto pkgsRequiringLibGCC = config.findPackagesProvidingLibrary("pe-i386::libgcc_s_sjlj-1.dll", true);
CPPUNIT_ASSERT_EQUAL(1_st, pkgsRequiringLibGCC.size());
CPPUNIT_ASSERT_EQUAL(harfbuzz, pkgsRequiringLibGCC.front().pkg);
CPPUNIT_ASSERT_EQUAL(harfbuzz->name, pkgsRequiringLibGCC.front().pkg->name);
CPPUNIT_ASSERT_EQUAL(harfbuzzID, pkgsRequiringLibGCC.front().id);
const auto pkgsProvidingLibSyncthingConnector = config.findPackagesProvidingLibrary("elf-x86_64::libsyncthingconnector.so.0.6.2", false);
CPPUNIT_ASSERT_EQUAL(1_st, pkgsProvidingLibSyncthingConnector.size());
CPPUNIT_ASSERT_EQUAL(syncthingtray, pkgsProvidingLibSyncthingConnector.front().pkg);
CPPUNIT_ASSERT_EQUAL(syncthingtray->name, pkgsProvidingLibSyncthingConnector.front().pkg->name);
CPPUNIT_ASSERT_EQUAL(syncthingtrayID, pkgsProvidingLibSyncthingConnector.front().id);
}
/*!
@ -392,6 +413,7 @@ void BuildActionsTests::testConductingBuild()
{
// load basic test setup and create build action
loadBasicTestSetup();
initStorage();
m_buildAction = std::make_shared<BuildAction>(0, &m_setup);
m_buildAction->type = BuildActionType::ConductBuild;
m_buildAction->directory = "conduct-build-test";
@ -533,11 +555,13 @@ void BuildActionsTests::testConductingBuild()
auto *const miscDb = m_setup.config.findDatabase("misc"sv, "x86_64"sv);
CPPUNIT_ASSERT_MESSAGE("boost database present", boostDb);
CPPUNIT_ASSERT_MESSAGE("misc database present", miscDb);
auto &boostLibsPackage = boostDb->packages["boost-libs"];
auto boostLibsPackage = boostDb->findPackage("boost-libs");
CPPUNIT_ASSERT_MESSAGE("boost-libs package present", boostLibsPackage);
boostLibsPackage->libprovides = { "elf-x86_64::libboost_regex.so.1.72.0" };
boostLibsPackage->libdepends = { "elf-x86_64::libstdc++.so.6" };
boostDb->forceUpdatePackage(boostLibsPackage);
auto &sourceHighlightPackage = miscDb->packages["source-highlight"];
auto sourceHighlightPackage = miscDb->findPackage("source-highlight");
CPPUNIT_ASSERT_MESSAGE("source-highlight package present", sourceHighlightPackage);
sourceHighlightPackage->libprovides = { "elf-x86_64::libsource-highlight.so.4" };
sourceHighlightPackage->libdepends
= { "elf-x86_64::libboost_regex.so.1.72.0", "elf-x86_64::libsource-highlight.so.4", "elf-x86_64::libstdc++.so.6" };
@ -642,9 +666,11 @@ void BuildActionsTests::testCleanup()
// parse db
// note: The db actually only contains source-highlight and mingw-w64-harfbuzz
auto &miscDb = m_setup.config.databases.emplace_back("misc", repoDir64 / "misc.db");
miscDb.localDbDir = miscDb.localPkgDir = repoDir64;
miscDb.loadPackages();
initStorage();
auto *const miscDb = m_setup.config.findOrCreateDatabase("misc"sv, std::string_view());
miscDb->path = repoDir64 / "misc.db";
miscDb->localDbDir = miscDb->localPkgDir = repoDir64;
miscDb->loadPackages();
// create and run build action
m_buildAction = std::make_shared<BuildAction>(0, &m_setup);

View File

@ -52,6 +52,7 @@ public:
void testPostingBuildActionsFromTask();
private:
std::string m_dbFile;
ServiceSetup m_setup;
boost::beast::error_code m_lastError;
string m_body;
@ -74,7 +75,9 @@ WebAPITests::WebAPITests()
void WebAPITests::setUp()
{
applicationInfo.version = APP_VERSION;
m_dbFile = workingCopyPath("test-webapi.db", WorkingCopyMode::Cleanup);
m_setup.webServer.port = randomPort();
m_setup.config.initStorage(m_dbFile.data());
}
void WebAPITests::tearDown()

View File

@ -136,8 +136,8 @@ void getUnresolved(const Params &params, ResponseHandler &&handler)
const auto unresolvedPackages = db.detectUnresolvedPackages(params.setup.config, newPackages, removedPackages);
auto value = RAPIDJSON_NAMESPACE::Value(RAPIDJSON_NAMESPACE::Type::kObjectType);
auto obj = value.GetObject();
for (const auto &[package, unresolvedDependencies] : unresolvedPackages) {
JR::push(unresolvedDependencies, package->name.data(), obj, document.GetAllocator());
for (const auto &[packageSpec, unresolvedDependencies] : unresolvedPackages) {
JR::push(unresolvedDependencies, packageSpec.pkg->name.data(), obj, document.GetAllocator());
}
obj.AddMember(RAPIDJSON_NAMESPACE::StringRef(db.name.data(), JR::rapidJsonSize(db.name.size())), value, document.GetAllocator());
}
@ -242,9 +242,9 @@ void getPackages(const Params &params, ResponseHandler &&handler)
const auto isDbAur = dbName == "aur";
if (fromAur && (dbName.empty() || isDbAur)) {
auto packageNameStr = std::string(packageName);
if (auto i = aurDb.packages.find(packageNameStr), end = aurDb.packages.end();
i != end && (!details || i->second->origin != PackageOrigin::AurRpcSearch)) {
aurPackages.emplace_back(PackageSearchResult{ aurDb, i->second });
if (const auto [aurPackageID, aurPackage] = aurDb.findPackageWithID(packageNameStr);
aurPackage && (!details || aurPackage->origin != PackageOrigin::AurRpcSearch)) {
aurPackages.emplace_back(aurDb, aurPackage, aurPackageID);
} else {
neededAurPackages.emplace_back(std::move(packageNameStr));
}
@ -325,9 +325,9 @@ void getPackages(const Params &params, ResponseHandler &&handler)
ReflectiveRapidJSON::JsonReflector::push(std::move(package), documentArray, document->GetAllocator());
}
} else if (!queriedAurPackages.empty()) {
for (auto &package : queriedAurPackages) {
for (auto &[packageID, package] : queriedAurPackages) {
ReflectiveRapidJSON::JsonReflector::push(
PackageSearchResult{ params.setup.config.aur, std::move(package) }, documentArray, document->GetAllocator());
PackageSearchResult{ params.setup.config.aur, std::move(package), packageID }, documentArray, document->GetAllocator());
}
}
configLock.unlock();

View File

@ -50,15 +50,13 @@ void searchAurPackages(LogContext &log, ServiceSetup &setup, const std::string &
// parse retrieved JSON
const auto &body = get<Response>(session2.response).body();
try {
const auto packages = Package::fromAurRpcJson(body.data(), body.size(), PackageOrigin::AurRpcSearch);
// cache the AUR packages
// parse and cache the AUR packages
auto packages = Package::fromAurRpcJson(body.data(), body.size(), PackageOrigin::AurRpcSearch);
auto lock = setup.config.lockToWrite();
for (auto &package : packages) {
setup.config.aur.updatePackage(package);
for (auto &[packageID, package] : packages) {
packageID = setup.config.aur.updatePackage(package);
}
lock.unlock();
multiSession->addResponses(packages);
} catch (const RAPIDJSON_NAMESPACE::ParseResult &e) {
log(Phrases::ErrorMessage, "Unable to parse AUR search result: ", serializeParseError(e), '\n');
@ -98,15 +96,15 @@ std::shared_ptr<AurQuerySession> queryAurPackagesInternal(LogContext &log, Servi
// parse retrieved JSON
const auto &body = get<Response>(session2.response).body();
try {
const auto packagesFromAur = Package::fromAurRpcJson(body.data(), body.size());
// cache the AUR packages
// parse and cache the AUR packages
auto packagesFromAur = Package::fromAurRpcJson(body.data(), body.size());
auto lock = setup.config.lockToWrite();
for (auto &package : packagesFromAur) {
setup.config.aur.updatePackage(package);
auto updater = PackageUpdater(setup.config.aur);
for (auto &[packageID, package] : packagesFromAur) {
packageID = updater.update(package);
}
updater.commit();
lock.unlock();
multiSession->addResponses(packagesFromAur);
} catch (const RAPIDJSON_NAMESPACE::ParseResult &e) {
log(Phrases::ErrorMessage, "Unable to parse AUR package from RPC: ", serializeParseError(e), '\n');
@ -164,18 +162,19 @@ std::shared_ptr<AurQuerySession> queryAurPackages(LogContext &log, ServiceSetup
std::shared_ptr<AurQuerySession> queryAurPackagesForDatabase(LogContext &log, ServiceSetup &setup, boost::asio::io_context &ioContext,
std::shared_lock<std::shared_mutex> *configReadLock, LibPkg::Database &database, typename AurQuerySession::HandlerType &&handler)
{
vector<string> missingPackages;
std::shared_lock<std::shared_mutex> ownConfigReadLock;
auto missingPackages = std::vector<std::string>();
auto ownConfigReadLock = std::shared_lock<std::shared_mutex>();
if (!configReadLock) {
ownConfigReadLock = setup.config.lockToRead();
configReadLock = &ownConfigReadLock;
}
const auto &aurPackages = setup.config.aur.packages;
for (const auto &package : database.packages) {
if (aurPackages.find(package.first) == aurPackages.end()) {
missingPackages.emplace_back(package.first);
auto &aurDb = setup.config.aur;
database.allPackages([&aurDb, &missingPackages](StorageID, Package &&package) {
if (const auto aurPackage = aurDb.findPackage(package.name); !aurPackage) {
missingPackages.emplace_back(package.name);
}
}
return false;
});
if (missingPackages.empty()) {
return nullptr;
}
@ -279,9 +278,9 @@ void queryAurSnapshots(LogContext &log, ServiceSetup &setup, const std::vector<A
if (!havePkgbuild) {
result.error = "PKGINFO is missing";
}
if (result.packages.empty() || result.packages.front()->name.empty()) {
if (result.packages.empty() || result.packages.front().pkg->name.empty()) {
result.error = "Unable to parse .SRCINFO: no package name present";
} else if (!(result.sourceInfo = result.packages.front()->sourceInfo)) {
} else if (!(result.sourceInfo = result.packages.front().pkg->sourceInfo)) {
result.error = "Unable to parse .SRCINFO: no source info present";
}
multiSession->addResponse(move(result));

View File

@ -20,7 +20,7 @@ struct AurSnapshotResult {
std::string packageName;
std::string errorOutput;
std::shared_ptr<LibPkg::SourceInfo> sourceInfo;
std::vector<std::shared_ptr<LibPkg::Package>> packages;
std::vector<LibPkg::PackageSpec> packages;
std::string error;
};
struct AurSnapshotQueryParams {
@ -28,7 +28,7 @@ struct AurSnapshotQueryParams {
const std::string *targetDirectory;
};
using AurQuerySession = MultiSession<std::shared_ptr<LibPkg::Package>>;
using AurQuerySession = MultiSession<LibPkg::PackageSpec>;
using AurSnapshotQuerySession = MultiSession<AurSnapshotResult>;
void searchAurPackages(LogContext &log, ServiceSetup &setup, const std::string &searchTerms, boost::asio::io_context &ioContext,

View File

@ -12,8 +12,6 @@
#include <optional>
#include <regex>
using namespace std;
using namespace LibPkg;
using namespace CppUtilities;
int main(int argc, const char *argv[])
@ -43,38 +41,51 @@ int main(int argc, const char *argv[])
OperationArgument listArg("list", '\0', "lists the files contained within the specified package");
ConfigValueArgument packageArg("package", '\0', "the name of the package", { "name" });
packageArg.setImplicit(true);
packageArg.setRequired(true);
listArg.setSubArguments({ &packageArg, &dbFileArg, &loadPacmanConfigArg });
parser.setMainArguments({ &searchArg, &listArg, &helpArg });
parser.setDefaultArgument(&helpArg);
parser.parseArgs(argc, argv);
// init config from pacman config to get relevant dbs
Config cfg;
auto cfg = LibPkg::Config();
if (loadPacmanConfigArg.isPresent()) {
try {
cfg.loadPacmanConfig("/etc/pacman.conf");
} catch (const runtime_error &e) {
cerr << "Unable to load pacman config." << endl;
exit(1);
} catch (const std::runtime_error &e) {
std::cerr << "Unable to load pacman config: " << e.what() << std::endl;
std::exit(1);
}
}
// allow adding custom db paths
if (dbFileArg.isPresent()) {
for (const char *dbPath : dbFileArg.values(0)) {
Database &db = cfg.databases.emplace_back(string(), dbPath);
for (const char *const dbPath : dbFileArg.values()) {
auto &db = cfg.databases.emplace_back(std::string(), dbPath);
db.name = std::filesystem::path(db.path).stem().string();
db.localPkgDir = directory(db.path);
}
}
if (cfg.databases.empty()) {
cerr << "No databases configured." << endl;
exit(2);
std::cerr << "No databases configured." << std::endl;
std::exit(2);
}
// load all packages for the dbs
for (Database &db : cfg.databases) {
auto ec = std::error_code();
auto tmpDir = std::filesystem::temp_directory_path(ec);
if (ec) {
std::cerr << "Unable to locate temp directory path: " << ec.message() << std::endl;
std::exit(4);
}
try {
cfg.initStorage((tmpDir.string() + "/pacfind.db").data());
} catch (const std::runtime_error &e) {
std::cerr << "Unable to initialize temporary storage: " << e.what() << std::endl;
std::exit(1);
}
for (auto &db : cfg.databases) {
try {
if (endsWith(db.path, ".files")) {
db.filesPath = db.path;
@ -82,27 +93,27 @@ int main(int argc, const char *argv[])
db.filesPath = db.filesPathFromRegularPath();
}
db.loadPackages(true);
} catch (const runtime_error &e) {
cerr << "Unable to load database \"" << db.name << "\": " << e.what() << '\n';
} catch (const std::runtime_error &e) {
std::cerr << "Unable to load database \"" << db.name << "\": " << e.what() << '\n';
}
}
// print file list for certain package
if (packageArg.isPresent()) {
if (listArg.isPresent()) {
const auto pkgs = cfg.findPackages(packageArg.firstValue());
for (const auto &pkg : pkgs) {
if (const auto *const db = std::get<Database *>(pkg.db); !db->name.empty()) {
cout << db->name << '/';
if (const auto *const db = std::get<LibPkg::Database *>(pkg.db); !db->name.empty()) {
std::cout << db->name << '/';
}
cout << pkg.pkg->name;
std::cout << pkg.pkg->name;
if (!pkg.pkg->packageInfo) {
cout << '\n';
std::cout << '\n';
continue;
}
for (const auto &path : pkg.pkg->packageInfo->files) {
cout << "\n - " << path;
std::cout << "\n - " << path;
}
cout << '\n';
std::cout << '\n';
}
return 0;
}
@ -115,19 +126,19 @@ int main(int argc, const char *argv[])
try {
regex = std::regex(searchTerm, std::regex::egrep);
} catch (const std::regex_error &e) {
cerr << "Specified regex is invalid: " << e.what() << endl;
exit(3);
std::cerr << "Specified regex is invalid: " << e.what() << std::endl;
std::exit(3);
}
}
for (const Database &db : cfg.databases) {
for (const auto &pkg : db.packages) {
const auto &pkgInfo = pkg.second->packageInfo;
for (auto &db : cfg.databases) {
db.allPackages([&](LibPkg::StorageID, LibPkg::Package &&package) {
const auto &pkgInfo = package.packageInfo;
if (!pkgInfo) {
continue;
return false;
}
auto foundOne = false;
for (const string &file : pkgInfo->files) {
const auto found = regex.has_value() ? std::regex_match(file, regex.value()) : file.find(searchTerm) != string::npos;
for (const auto &file : pkgInfo->files) {
const auto found = regex.has_value() ? std::regex_match(file, regex.value()) : file.find(searchTerm) != std::string::npos;
if (negate) {
if (found) {
foundOne = true;
@ -141,20 +152,21 @@ int main(int argc, const char *argv[])
}
if (!foundOne) {
if (!db.name.empty()) {
cout << db.name << '/';
std::cout << db.name << '/';
}
cout << pkg.first << '\n';
std::cout << package.name << '\n';
foundOne = true;
}
cout << " - " << file << '\n';
std::cout << " - " << file << '\n';
}
if (negate && !foundOne) {
if (!db.name.empty()) {
cout << db.name << '/';
std::cout << db.name << '/';
}
cout << pkg.first << '\n';
std::cout << package.name << '\n';
}
}
return false;
});
}
return 0;