#include <map>
#include <algorithm>
+#include <solv/solvversion.h>
+
#include "zypp/base/InputStream.h"
#include "zypp/base/LogTools.h"
#include "zypp/base/Gettext.h"
+#include "zypp/base/DefaultIntegral.h"
#include "zypp/base/Function.h"
#include "zypp/base/Regex.h"
#include "zypp/PathInfo.h"
///////////////////////////////////////////////////////////////////
namespace zypp
{
+
+ ///////////////////////////////////////////////////////////////////
+ namespace env
+ {
+ /** To trigger appdata refresh unconditionally */
+ inline bool ZYPP_PLUGIN_APPDATA_FORCE_COLLECT()
+ {
+ const char * env = getenv("ZYPP_PLUGIN_APPDATA_FORCE_COLLECT");
+ return( env && str::strToBool( env, true ) );
+ }
+ } // namespace env
+ ///////////////////////////////////////////////////////////////////
+
+ ///////////////////////////////////////////////////////////////////
+ namespace
+ {
+ ///////////////////////////////////////////////////////////////////
+ /// \class UrlCredentialExtractor
+ /// \brief Extract credentials in \ref Url authority and store them via \ref CredentialManager.
+ ///
+ /// Lazy init CredentialManager and save collected credentials when
+ /// going out of scope.
+ ///
+ /// Methods return whether a password has been collected/extracted.
+ ///
+ /// \code
+ /// UrlCredentialExtractor( "/rootdir" ).collect( oneUrlOrUrlContainer );
+ /// \endcode
+ /// \code
+ /// {
+ /// UrlCredentialExtractor extractCredentials;
+ /// extractCredentials.collect( oneUrlOrUrlContainer );
+ /// extractCredentials.extract( oneMoreUrlOrUrlContainer );
+ /// ....
+ /// }
+ /// \endcode
+ ///
+ class UrlCredentialExtractor
+ {
+ public:
+ UrlCredentialExtractor( Pathname & root_r )
+ : _root( root_r )
+ {}
+
+ ~UrlCredentialExtractor()
+ { if ( _cmPtr ) _cmPtr->save(); }
+
+ /** Remember credentials stored in URL authority leaving the password in \a url_r. */
+ bool collect( const Url & url_r )
+ {
+ bool ret = url_r.hasCredentialsInAuthority();
+ if ( ret )
+ {
+ if ( !_cmPtr ) _cmPtr.reset( new media::CredentialManager( _root ) );
+ _cmPtr->addUserCred( url_r );
+ }
+ return ret;
+ }
+ /** \overload operating on Url container */
+ template<class TContainer>
+ bool collect( const TContainer & urls_r )
+ { bool ret = false; for ( const Url & url : urls_r ) { if ( collect( url ) && !ret ) ret = true; } return ret; }
+
+ /** Remember credentials stored in URL authority stripping the passowrd from \a url_r. */
+ bool extract( Url & url_r )
+ {
+ bool ret = collect( url_r );
+ if ( ret )
+ url_r.setPassword( std::string() );
+ return ret;
+ }
+ /** \overload operating on Url container */
+ template<class TContainer>
+ bool extract( TContainer & urls_r )
+ { bool ret = false; for ( Url & url : urls_r ) { if ( extract( url ) && !ret ) ret = true; } return ret; }
+
+ private:
+ const Pathname & _root;
+ scoped_ptr<media::CredentialManager> _cmPtr;
+ };
+ } // namespace
+ ///////////////////////////////////////////////////////////////////
+
///////////////////////////////////////////////////////////////////
namespace
{
/**
* \short Simple callback to collect the results
*
- * Classes like RepoFileParser call the callback
+ * Classes like RepoFileReader call the callback
* once per each repo in a file.
*
* Passing this functor as callback, you can collect
MIL << "repo file: " << file << endl;
RepoCollector collector;
parser::RepoFileReader parser( file, bind( &RepoCollector::collect, &collector, _1 ) );
- return collector.repos;
+ return std::move(collector.repos);
}
////////////////////////////////////////////////////////////////////////////
{
MIL << "directory " << dir << endl;
std::list<RepoInfo> repos;
- std::list<Pathname> entries;
- if ( filesystem::readdir( entries, dir, false ) != 0 )
+ bool nonroot( geteuid() != 0 );
+ if ( nonroot && ! PathInfo(dir).userMayRX() )
{
- // TranslatorExplanation '%s' is a pathname
- ZYPP_THROW(Exception(str::form(_("Failed to read directory '%s'"), dir.c_str())));
+ JobReport::warning( str::FormatNAC(_("Cannot read repo directory '%1%': Permission denied")) % dir );
}
-
- str::regex allowedRepoExt("^\\.repo(_[0-9]+)?$");
- for ( std::list<Pathname>::const_iterator it = entries.begin(); it != entries.end(); ++it )
+ else
{
- if (str::regex_match(it->extension(), allowedRepoExt))
+ std::list<Pathname> entries;
+ if ( filesystem::readdir( entries, dir, false ) != 0 )
{
- std::list<RepoInfo> tmp = repositories_in_file( *it );
- repos.insert( repos.end(), tmp.begin(), tmp.end() );
+ // TranslatorExplanation '%s' is a pathname
+ ZYPP_THROW(Exception(str::form(_("Failed to read directory '%s'"), dir.c_str())));
+ }
- //std::copy( collector.repos.begin(), collector.repos.end(), std::back_inserter(repos));
- //MIL << "ok" << endl;
+ str::regex allowedRepoExt("^\\.repo(_[0-9]+)?$");
+ for ( std::list<Pathname>::const_iterator it = entries.begin(); it != entries.end(); ++it )
+ {
+ if ( str::regex_match(it->extension(), allowedRepoExt) )
+ {
+ if ( nonroot && ! PathInfo(*it).userMayR() )
+ {
+ JobReport::warning( str::FormatNAC(_("Cannot read repo file '%1%': Permission denied")) % *it );
+ }
+ else
+ {
+ const std::list<RepoInfo> & tmp( repositories_in_file( *it ) );
+ repos.insert( repos.end(), tmp.begin(), tmp.end() );
+ }
+ }
}
}
return repos;
inline void assert_alias( const RepoInfo & info )
{
if ( info.alias().empty() )
- ZYPP_THROW( RepoNoAliasException() );
+ ZYPP_THROW( RepoNoAliasException( info ) );
// bnc #473834. Maybe we can match the alias against a regex to define
// and check for valid aliases
if ( info.alias()[0] == '.')
inline void assert_alias( const ServiceInfo & info )
{
if ( info.alias().empty() )
- ZYPP_THROW( ServiceNoAliasException() );
+ ZYPP_THROW( ServiceNoAliasException( info ) );
// bnc #473834. Maybe we can match the alias against a regex to define
// and check for valid aliases
if ( info.alias()[0] == '.')
////////////////////////////////////////////////////////////////////////////
+ ///////////////////////////////////////////////////////////////////
+ namespace
+ {
+ /** Whether repo is not under RM control and provides it's own methadata paths. */
+ inline bool isTmpRepo( const RepoInfo & info_r )
+ { return( info_r.filepath().empty() && info_r.usesAutoMethadataPaths() ); }
+ } // namespace
+ ///////////////////////////////////////////////////////////////////
+
/**
* \short Calculates the raw cache path for a repository, this is usually
* /var/cache/zypp/alias
inline Pathname rawcache_path_for_repoinfo( const RepoManagerOptions &opt, const RepoInfo &info )
{
assert_alias(info);
- return opt.repoRawCachePath / info.escaped_alias();
+ return isTmpRepo( info ) ? info.metadataPath() : opt.repoRawCachePath / info.escaped_alias();
}
/**
* for example /var/cache/zypp/alias/addondir
*/
inline Pathname rawproductdata_path_for_repoinfo( const RepoManagerOptions &opt, const RepoInfo &info )
- {
- assert_alias(info);
- return opt.repoRawCachePath / info.escaped_alias() / info.path();
- }
+ { return rawcache_path_for_repoinfo( opt, info ) / info.path(); }
/**
* \short Calculates the packages cache path for a repository
inline Pathname packagescache_path_for_repoinfo( const RepoManagerOptions &opt, const RepoInfo &info )
{
assert_alias(info);
- return opt.repoPackagesCachePath / info.escaped_alias();
+ return isTmpRepo( info ) ? info.packagesPath() : opt.repoPackagesCachePath / info.escaped_alias();
}
/**
* \short Calculates the solv cache path for a repository
*/
- inline Pathname solv_path_for_repoinfo( const RepoManagerOptions &opt, const RepoInfo &info)
+ inline Pathname solv_path_for_repoinfo( const RepoManagerOptions &opt, const RepoInfo &info )
{
assert_alias(info);
- return opt.repoSolvCachePath / info.escaped_alias();
+ return isTmpRepo( info ) ? info.metadataPath().dirname() / "%SLV%" : opt.repoSolvCachePath / info.escaped_alias();
}
////////////////////////////////////////////////////////////////////////////
return ret;
}
+ std:: ostream & operator<<( std::ostream & str, const RepoManagerOptions & obj )
+ {
+#define OUTS(X) str << " " #X "\t" << obj.X << endl
+ str << "RepoManagerOptions (" << obj.rootDir << ") {" << endl;
+ OUTS( repoRawCachePath );
+ OUTS( repoSolvCachePath );
+ OUTS( repoPackagesCachePath );
+ OUTS( knownReposPath );
+ OUTS( knownServicesPath );
+ OUTS( pluginsPath );
+ str << "}" << endl;
+#undef OUTS
+ return str;
+ }
+
///////////////////////////////////////////////////////////////////
/// \class RepoManager::Impl
/// \brief RepoManager implementation.
init_knownRepositories();
}
+ ~Impl()
+ {
+ // trigger appdata refresh if some repos change
+ if ( ( _reposDirty || env::ZYPP_PLUGIN_APPDATA_FORCE_COLLECT() )
+ && geteuid() == 0 && ( _options.rootDir.empty() || _options.rootDir == "/" ) )
+ {
+ try {
+ std::list<Pathname> entries;
+ filesystem::readdir( entries, _options.pluginsPath/"appdata", false );
+ if ( ! entries.empty() )
+ {
+ ExternalProgram::Arguments cmd;
+ cmd.push_back( "<" ); // discard stdin
+ cmd.push_back( ">" ); // discard stdout
+ cmd.push_back( "PROGRAM" ); // [2] - fix index below if changing!
+ for ( const auto & rinfo : repos() )
+ {
+ if ( ! rinfo.enabled() )
+ continue;
+ cmd.push_back( "-R" );
+ cmd.push_back( rinfo.alias() );
+ cmd.push_back( "-t" );
+ cmd.push_back( rinfo.type().asString() );
+ cmd.push_back( "-p" );
+ cmd.push_back( rinfo.metadataPath().asString() );
+ }
+
+ for_( it, entries.begin(), entries.end() )
+ {
+ PathInfo pi( *it );
+ //DBG << "/tmp/xx ->" << pi << endl;
+ if ( pi.isFile() && pi.userMayRX() )
+ {
+ // trigger plugin
+ cmd[2] = pi.asString(); // [2] - PROGRAM
+ ExternalProgram prog( cmd, ExternalProgram::Stderr_To_Stdout );
+ }
+ }
+ }
+ }
+ catch (...) {} // no throw in dtor
+ }
+ }
+
public:
- bool repoEmpty() const { return _repos.empty(); }
- RepoSizeType repoSize() const { return _repos.size(); }
- RepoConstIterator repoBegin() const { return _repos.begin(); }
- RepoConstIterator repoEnd() const { return _repos.end(); }
+ bool repoEmpty() const { return repos().empty(); }
+ RepoSizeType repoSize() const { return repos().size(); }
+ RepoConstIterator repoBegin() const { return repos().begin(); }
+ RepoConstIterator repoEnd() const { return repos().end(); }
bool hasRepo( const std::string & alias ) const
- { return foundAliasIn( alias, _repos ); }
+ { return foundAliasIn( alias, repos() ); }
RepoInfo getRepo( const std::string & alias ) const
{
- RepoConstIterator it( findAlias( alias, _repos ) );
- return it == _repos.end() ? RepoInfo::noRepo : *it;
+ RepoConstIterator it( findAlias( alias, repos() ) );
+ return it == repos().end() ? RepoInfo::noRepo : *it;
}
public:
void buildCache( const RepoInfo & info, CacheBuildPolicy policy, OPT_PROGRESS );
repo::RepoType probe( const Url & url, const Pathname & path = Pathname() ) const;
+ repo::RepoType probeCache( const Pathname & path_r ) const;
void cleanCacheDirGarbage( OPT_PROGRESS );
void removeService( const ServiceInfo & service )
{ removeService( service.alias() ); }
- void refreshServices();
+ void refreshServices( const RefreshServiceOptions & options_r );
- void refreshService( const std::string & alias );
- void refreshService( const ServiceInfo & service )
- { refreshService( service.alias() ); }
+ void refreshService( const std::string & alias, const RefreshServiceOptions & options_r );
+ void refreshService( const ServiceInfo & service, const RefreshServiceOptions & options_r )
+ { refreshService( service.alias(), options_r ); }
void modifyService( const std::string & oldAlias, const ServiceInfo & newService );
void getRepositoriesInService( const std::string & alias, OutputIterator out ) const
{
MatchServiceAlias filter( alias );
- std::copy( boost::make_filter_iterator( filter, _repos.begin(), _repos.end() ),
- boost::make_filter_iterator( filter, _repos.end(), _repos.end() ),
+ std::copy( boost::make_filter_iterator( filter, repos().begin(), repos().end() ),
+ boost::make_filter_iterator( filter, repos().end(), repos().end() ),
out);
}
void init_knownServices();
void init_knownRepositories();
+ const RepoSet & repos() const { return _reposX; }
+ RepoSet & reposManip() { if ( ! _reposDirty ) _reposDirty = true; return _reposX; }
+
private:
RepoManagerOptions _options;
- RepoSet _repos;
+ RepoSet _reposX;
ServiceSet _services;
+ DefaultIntegral<bool,false> _reposDirty;
+
private:
friend Impl * rwcowClone<Impl>( const Impl * rhs );
/** clone for RWCOW_pointer */
repo::PluginServices(_options.pluginsPath/"services", ServiceCollector(_services));
}
+ ///////////////////////////////////////////////////////////////////
+ namespace {
+ /** Delete \a cachePath_r subdirs not matching known aliases in \a repoEscAliases_r (must be sorted!)
+ * \note bnc#891515: Auto-cleanup only zypp.conf default locations. Otherwise
+ * we'd need some magic file to identify zypp cache directories. Without this
+ * we may easily remove user data (zypper --pkg-cache-dir . download ...)
+ */
+ inline void cleanupNonRepoMetadtaFolders( const Pathname & cachePath_r,
+ const Pathname & defaultCachePath_r,
+ const std::list<std::string> & repoEscAliases_r )
+ {
+ if ( cachePath_r != defaultCachePath_r )
+ return;
+
+ std::list<std::string> entries;
+ if ( filesystem::readdir( entries, cachePath_r, false ) == 0 )
+ {
+ entries.sort();
+ std::set<std::string> oldfiles;
+ set_difference( entries.begin(), entries.end(), repoEscAliases_r.begin(), repoEscAliases_r.end(),
+ std::inserter( oldfiles, oldfiles.end() ) );
+ for ( const std::string & old : oldfiles )
+ {
+ if ( old == Repository::systemRepoAlias() ) // don't remove the @System solv file
+ continue;
+ filesystem::recursive_rmdir( cachePath_r / old );
+ }
+ }
+ }
+ } // namespace
+ ///////////////////////////////////////////////////////////////////
void RepoManager::Impl::init_knownRepositories()
{
MIL << "start construct known repos" << endl;
if ( PathInfo(_options.knownReposPath).isExist() )
{
std::list<std::string> repoEscAliases;
+ std::list<RepoInfo> orphanedRepos;
for ( RepoInfo & repoInfo : repositories_in_dir(_options.knownReposPath) )
{
// set the metadata path for the repo
repoInfo.setMetadataPath( rawcache_path_for_repoinfo(_options, repoInfo) );
// set the downloaded packages path for the repo
repoInfo.setPackagesPath( packagescache_path_for_repoinfo(_options, repoInfo) );
+ // remember it
+ _reposX.insert( repoInfo ); // direct access via _reposX in ctor! no reposManip.
+
+ // detect orphaned repos belonging to a deleted service
+ const std::string & serviceAlias( repoInfo.service() );
+ if ( ! ( serviceAlias.empty() || hasService( serviceAlias ) ) )
+ {
+ WAR << "Schedule orphaned service repo for deletion: " << repoInfo << endl;
+ orphanedRepos.push_back( repoInfo );
+ continue; // don't remember it in repoEscAliases
+ }
- _repos.insert( repoInfo );
repoEscAliases.push_back(repoInfo.escaped_alias());
}
- repoEscAliases.sort();
- // delete metadata folders without corresponding repo (e.g. old tmp directories)
- for ( const Pathname & cachePath : { _options.repoRawCachePath
- , _options.repoSolvCachePath } )
+ // Cleanup orphanded service repos:
+ if ( ! orphanedRepos.empty() )
{
- std::list<std::string> entries;
- if ( filesystem::readdir( entries, cachePath, false ) == 0 )
+ for ( const auto & repoInfo : orphanedRepos )
{
- entries.sort();
- std::set<std::string> oldfiles;
- set_difference( entries.begin(), entries.end(), repoEscAliases.begin(), repoEscAliases.end(),
- std::inserter( oldfiles, oldfiles.end() ) );
- for ( const std::string & old : oldfiles )
+ MIL << "Delete orphaned service repo " << repoInfo.alias() << endl;
+ // translators: Cleanup a repository previously owned by a meanwhile unknown (deleted) service.
+ // %1% = service name
+ // %2% = repository name
+ JobReport::warning( str::FormatNAC(_("Unknown service '%1%': Removing orphaned service repository '%2%'"))
+ % repoInfo.service()
+ % repoInfo.alias() );
+ try {
+ removeRepository( repoInfo );
+ }
+ catch ( const Exception & caugth )
{
- if ( old == Repository::systemRepoAlias() ) // don't remove the @System solv file
- continue;
- filesystem::recursive_rmdir( cachePath / old );
+ JobReport::error( caugth.asUserHistory() );
}
}
}
+
+ // delete metadata folders without corresponding repo (e.g. old tmp directories)
+ //
+ // bnc#891515: Auto-cleanup only zypp.conf default locations. Otherwise
+ // we'd need somemagic file to identify zypp cache directories. Without this
+ // we may easily remove user data (zypper --pkg-cache-dir . download ...)
+ repoEscAliases.sort();
+ RepoManagerOptions defaultCache( _options.rootDir );
+ cleanupNonRepoMetadtaFolders( _options.repoRawCachePath, defaultCache.repoRawCachePath, repoEscAliases );
+ cleanupNonRepoMetadtaFolders( _options.repoSolvCachePath, defaultCache.repoSolvCachePath, repoEscAliases );
+ cleanupNonRepoMetadtaFolders( _options.repoPackagesCachePath, defaultCache.repoPackagesCachePath, repoEscAliases );
}
MIL << "end construct known repos" << endl;
}
RepoType repokind = info.type();
// If unknown, probe the local metadata
if ( repokind == RepoType::NONE )
- repokind = probe( productdatapath.asUrl() );
+ repokind = probeCache( productdatapath );
RepoStatus status;
switch ( repokind.toEnum() )
{
case RepoType::RPMMD_e :
- status = RepoStatus( productdatapath/"repodata/repomd.xml");
+ status = RepoStatus( productdatapath/"repodata/repomd.xml") && RepoStatus( mediarootpath/"media.1/media" );
break;
case RepoType::YAST2_e :
RepoType repokind = info.type();
if ( repokind.toEnum() == RepoType::NONE_e )
// unknown, probe the local metadata
- repokind = probe( productdatapath.asUrl() );
+ repokind = probeCache( productdatapath );
// if still unknown, just return
if (repokind == RepoType::NONE_e)
return;
assert_alias(info);
try
{
- MIL << "Going to try to check whether refresh is needed for " << url << endl;
+ MIL << "Going to try to check whether refresh is needed for " << url << " (" << info.type() << ")" << endl;
// first check old (cached) metadata
Pathname mediarootpath = rawcache_path_for_repoinfo( _options, info );
filesystem::assert_dir( mediarootpath );
RepoStatus oldstatus = metadataStatus( info );
-
if ( oldstatus.empty() )
{
MIL << "No cached metadata, going to refresh" << endl;
return REFRESH_NEEDED;
}
+ if ( url.schemeIsVolatile() )
{
- if ( url.schemeIsVolatile() )
- {
- MIL << "never refresh CD/DVD" << endl;
- return REPO_UP_TO_DATE;
- }
- if ( url.schemeIsLocal() )
- {
- policy = RefreshIfNeededIgnoreDelay;
- }
+ MIL << "Never refresh CD/DVD" << endl;
+ return REPO_UP_TO_DATE;
+ }
+
+ if ( policy == RefreshForced )
+ {
+ MIL << "Forced refresh!" << endl;
+ return REFRESH_NEEDED;
+ }
+
+ if ( url.schemeIsLocal() )
+ {
+ policy = RefreshIfNeededIgnoreDelay;
}
// now we've got the old (cached) status, we can decide repo.refresh.delay
- if (policy != RefreshForced && policy != RefreshIfNeededIgnoreDelay)
+ if ( policy != RefreshIfNeededIgnoreDelay )
{
// difference in seconds
double diff = difftime(
}
}
- // To test the new matadta create temp dir as sibling of mediarootpath
- filesystem::TmpDir tmpdir( filesystem::TmpDir::makeSibling( mediarootpath ) );
-
repo::RepoType repokind = info.type();
// if unknown: probe it
if ( repokind == RepoType::NONE )
}
// check status
- bool refresh = false;
if ( oldstatus == newstatus )
{
MIL << "repo has not changed" << endl;
- if ( policy == RefreshForced )
- {
- MIL << "refresh set to forced" << endl;
- refresh = true;
- }
+ touchIndexFile( info );
+ return REPO_UP_TO_DATE;
}
- else
+ else // includes newstatus.empty() if e.g. repo format changed
{
MIL << "repo has changed, going to refresh" << endl;
- refresh = true;
+ return REFRESH_NEEDED;
}
-
- if (!refresh)
- touchIndexFile(info);
-
- return refresh ? REFRESH_NEEDED : REPO_UP_TO_DATE;
-
}
catch ( const Exception &e )
{
assert_urls(info);
// we will throw this later if no URL checks out fine
- RepoException rexception(_PL("Valid metadata not found at specified URL",
- "Valid metadata not found at specified URLs",
- info.baseUrlsSize() ) );
+ RepoException rexception( info, PL_("Valid metadata not found at specified URL",
+ "Valid metadata not found at specified URLs",
+ info.baseUrlsSize() ) );
+ // Suppress (interactive) media::MediaChangeReport if we in have multiple basurls (>1)
+ media::ScopedDisableMediaChangeReport guard( info.baseUrlsSize() > 1 );
// try urls one by one
for ( RepoInfo::urls_const_iterator it = info.baseUrlsBegin(); it != info.baseUrlsEnd(); ++it )
{
MIL << "Going to refresh metadata from " << url << endl;
+ // bsc#1048315: Always re-probe in case of repo format change.
+ // TODO: Would be sufficient to verify the type and re-probe
+ // if verification failed (or type is RepoType::NONE)
repo::RepoType repokind = info.type();
-
- // if the type is unknown, try probing.
- if ( repokind == RepoType::NONE )
{
- // unknown, probe it
- repokind = probe( *it, info.path() );
-
- if (repokind.toEnum() != RepoType::NONE_e)
+ repo::RepoType probed = probe( *it, info.path() );
+ if ( repokind != probed )
{
+ repokind = probed;
// Adjust the probed type in RepoInfo
info.setProbedType( repokind ); // lazy init!
//save probed type only for repos in system
}
else
{
- ZYPP_THROW(RepoUnknownTypeException());
+ ZYPP_THROW(RepoUnknownTypeException( info ));
}
// ok we have the metadata, now exchange
// the contents
filesystem::exchange( tmpdir.path(), mediarootpath );
+ if ( ! isTmpRepo( info ) )
+ reposManip(); // remember to trigger appdata refresh
// we are done.
return;
// cause of the problem of the first URL remembered
if (it == info.baseUrlsBegin())
rexception.remember(e);
+ else
+ rexception.addHistory( e.asUserString() );
+
}
} // for every url
ERR << "No more urls..." << endl;
if ( cache_status == raw_metadata_status )
{
MIL << info.alias() << " cache is up to date with metadata." << endl;
- if ( policy == BuildIfNeeded ) {
- return;
+ if ( policy == BuildIfNeeded )
+ {
+ // On the fly add missing solv.idx files for bash completion.
+ const Pathname & base = solv_path_for_repoinfo( _options, info);
+ if ( ! PathInfo(base/"solv.idx").isExist() )
+ sat::updateSolvFileIndex( base/"solv" );
+
+ return;
}
else {
MIL << info.alias() << " cache rebuild is forced" << endl;
{
case RepoType::NONE_e:
// unknown, probe the local metadata
- repokind = probe( productdatapath.asUrl() );
+ repokind = probeCache( productdatapath );
break;
default:
break;
scoped_ptr<MediaMounter> forPlainDirs;
ExternalProgram::Arguments cmd;
- cmd.push_back( "repo2solv.sh" );
+ cmd.push_back( PathInfo( "/usr/bin/repo2solv" ).isFile() ? "repo2solv" : "repo2solv.sh" );
// repo2solv expects -o as 1st arg!
cmd.push_back( "-o" );
cmd.push_back( solvfile.asString() );
if ( repokind == RepoType::RPMPLAINDIR )
{
- forPlainDirs.reset( new MediaMounter( *info.baseUrlsBegin() ) );
+ forPlainDirs.reset( new MediaMounter( info.url() ) );
// recusive for plaindir as 2nd arg!
cmd.push_back( "-R" );
// FIXME this does only work form dir: URLs
// We keep it.
guard.resetDispose();
+ sat::updateSolvFileIndex( solvfile ); // content digest for zypper bash completion
}
break;
default:
- ZYPP_THROW(RepoUnknownTypeException( _("Unhandled repository type") ));
+ ZYPP_THROW(RepoUnknownTypeException( info, _("Unhandled repository type") ));
break;
}
// update timestamp and checksum
////////////////////////////////////////////////////////////////////////////
+
+ /** Probe the metadata type of a repository located at \c url.
+ * Urls here may be rewritten by \ref MediaSetAccess to reflect the correct media number.
+ *
+ * \note Metadata in local cache directories must be probed using \ref probeCache as
+ * a cache path must not be rewritten (bnc#946129)
+ */
repo::RepoType RepoManager::Impl::probe( const Url & url, const Pathname & path ) const
{
MIL << "going to probe the repo type at " << url << " (" << path << ")" << endl;
return repo::RepoType::NONE;
}
+ /** Probe Metadata in a local cache directory
+ *
+ * \note Metadata in local cache directories must not be probed using \ref probe as
+ * a cache path must not be rewritten (bnc#946129)
+ */
+ repo::RepoType RepoManager::Impl::probeCache( const Pathname & path_r ) const
+ {
+ MIL << "going to probe the cached repo at " << path_r << endl;
+
+ repo::RepoType ret = repo::RepoType::NONE;
+
+ if ( PathInfo(path_r/"/repodata/repomd.xml").isFile() )
+ { ret = repo::RepoType::RPMMD; }
+ else if ( PathInfo(path_r/"/content").isFile() )
+ { ret = repo::RepoType::YAST2; }
+ else if ( PathInfo(path_r).isDir() )
+ { ret = repo::RepoType::RPMPLAINDIR; }
+
+ MIL << "Probed cached type " << ret << " at " << path_r << endl;
+ return ret;
+ }
+
////////////////////////////////////////////////////////////////////////////
void RepoManager::Impl::cleanCacheDirGarbage( const ProgressData::ReceiverFnc & progressrcv )
{
Repository repo = sat::Pool::instance().addRepoSolv( solvfile, info );
// test toolversion in order to rebuild solv file in case
- // it was written by an old libsolv-tool parser.
- //
- // Known version strings used:
- // - <no string>
- // - "1.0"
- //
- sat::LookupRepoAttr toolversion( sat::SolvAttr::repositoryToolVersion, repo );
- if ( toolversion.begin().asString().empty() )
+ // it was written by a different libsolv-tool parser.
+ const std::string & toolversion( sat::LookupRepoAttr( sat::SolvAttr::repositoryToolVersion, repo ).begin().asString() );
+ if ( toolversion != LIBSOLV_TOOLVERSION )
{
repo.eraseFromPool();
- ZYPP_THROW(Exception("Solv-file was created by old parser."));
+ ZYPP_THROW(Exception(str::Str() << "Solv-file was created by '"<<toolversion<<"'-parser (want "<<LIBSOLV_TOOLVERSION<<")."));
}
- // else: up-to-date (or even newer).
}
catch ( const Exception & exp )
{
MIL << "Try adding repo " << info << endl;
RepoInfo tosave = info;
- if ( _repos.find(tosave) != _repos.end() )
+ if ( repos().find(tosave) != repos().end() )
ZYPP_THROW(RepoAlreadyExistsException(info));
// check the first url for now
if ( _options.probe )
{
DBG << "unknown repository type, probing" << endl;
+ assert_urls(tosave);
- RepoType probedtype;
- probedtype = probe( *tosave.baseUrlsBegin(), info.path() );
- if ( tosave.baseUrlsSize() > 0 )
- {
- if ( probedtype == RepoType::NONE )
- ZYPP_THROW(RepoUnknownTypeException());
- else
- tosave.setType(probedtype);
- }
+ RepoType probedtype( probe( tosave.url(), info.path() ) );
+ if ( probedtype == RepoType::NONE )
+ ZYPP_THROW(RepoUnknownTypeException(info));
+ else
+ tosave.setType(probedtype);
}
progress.set(50);
tosave.dumpAsIniOn(file);
tosave.setFilepath(repofile);
- tosave.setMetadataPath( metadataPath( tosave ) );
- tosave.setPackagesPath( packagesPath( tosave ) );
+ tosave.setMetadataPath( rawcache_path_for_repoinfo( _options, tosave ) );
+ tosave.setPackagesPath( packagescache_path_for_repoinfo( _options, tosave ) );
{
- // We chould fix the API as we must injet those paths
+ // We should fix the API as we must inject those paths
// into the repoinfo in order to keep it usable.
RepoInfo & oinfo( const_cast<RepoInfo &>(info) );
- oinfo.setMetadataPath( metadataPath( tosave ) );
- oinfo.setPackagesPath( packagesPath( tosave ) );
+ oinfo.setFilepath(repofile);
+ oinfo.setMetadataPath( rawcache_path_for_repoinfo( _options, tosave ) );
+ oinfo.setPackagesPath( packagescache_path_for_repoinfo( _options, tosave ) );
}
- _repos.insert(tosave);
+ reposManip().insert(tosave);
progress.set(90);
// check for credentials in Urls
- bool havePasswords = false;
- for_( urlit, tosave.baseUrlsBegin(), tosave.baseUrlsEnd() )
- if ( urlit->hasCredentialsInAuthority() )
- {
- havePasswords = true;
- break;
- }
- // save the credentials
- if ( havePasswords )
- {
- media::CredentialManager cm(
- media::CredManagerOptions(_options.rootDir) );
+ UrlCredentialExtractor( _options.rootDir ).collect( tosave.baseUrls() );
- for_(urlit, tosave.baseUrlsBegin(), tosave.baseUrlsEnd())
- if (urlit->hasCredentialsInAuthority())
- //! \todo use a method calling UI callbacks to ask where to save creds?
- cm.saveInUser(media::AuthData(*urlit));
- }
-
- HistoryLog().addRepository(tosave);
+ HistoryLog(_options.rootDir).addRepository(tosave);
progress.toMax();
MIL << "done" << endl;
++it )
{
MIL << "Saving " << (*it).alias() << endl;
- it->setFilepath(repofile.asString());
it->dumpAsIniOn(file);
- _repos.insert(*it);
+ it->setFilepath(repofile);
+ it->setMetadataPath( rawcache_path_for_repoinfo( _options, *it ) );
+ it->setPackagesPath( packagescache_path_for_repoinfo( _options, *it ) );
+ reposManip().insert(*it);
HistoryLog(_options.rootDir).addRepository(*it);
}
RepoInfo todelete = *it;
if (todelete.filepath().empty())
{
- ZYPP_THROW(RepoException( _("Can't figure out where the repo is stored.") ));
+ ZYPP_THROW(RepoException( todelete, _("Can't figure out where the repo is stored.") ));
}
else
{
// figure how many repos are there in the file:
std::list<RepoInfo> filerepos = repositories_in_file(todelete.filepath());
- if ( (filerepos.size() == 1) && ( filerepos.front().alias() == todelete.alias() ) )
+ if ( filerepos.size() == 0 // bsc#984494: file may have already been deleted
+ ||(filerepos.size() == 1 && filerepos.front().alias() == todelete.alias() ) )
{
- // easy, only this one, just delete the file
- if ( filesystem::unlink(todelete.filepath()) != 0 )
+ // easy: file does not exist, contains no or only the repo to delete: delete the file
+ int ret = filesystem::unlink( todelete.filepath() );
+ if ( ! ( ret == 0 || ret == ENOENT ) )
{
// TranslatorExplanation '%s' is a filename
- ZYPP_THROW(RepoException(str::form( _("Can't delete '%s'"), todelete.filepath().c_str() )));
+ ZYPP_THROW(RepoException( todelete, str::form( _("Can't delete '%s'"), todelete.filepath().c_str() )));
}
- MIL << todelete.alias() << " sucessfully deleted." << endl;
+ MIL << todelete.alias() << " successfully deleted." << endl;
}
else
{
// now delete metadata (#301037)
cleanMetadata( todelete, mSubprogrcv );
cleanPackages( todelete, pSubprogrcv );
- _repos.erase(todelete);
- MIL << todelete.alias() << " sucessfully deleted." << endl;
+ reposManip().erase(todelete);
+ MIL << todelete.alias() << " successfully deleted." << endl;
HistoryLog(_options.rootDir).removeRepository(todelete);
return;
} // else filepath is empty
if (toedit.filepath().empty())
{
- ZYPP_THROW(RepoException( _("Can't figure out where the repo is stored.") ));
+ ZYPP_THROW(RepoException( toedit, _("Can't figure out where the repo is stored.") ));
}
else
{
newinfo.dumpAsIniOn(file);
}
+ if ( toedit.enabled() && !newinfo.enabled() )
+ {
+ // On the fly remove solv.idx files for bash completion if a repo gets disabled.
+ const Pathname & solvidx = solv_path_for_repoinfo(_options, newinfo)/"solv.idx";
+ if ( PathInfo(solvidx).isExist() )
+ filesystem::unlink( solvidx );
+ }
+
newinfo.setFilepath(toedit.filepath());
- _repos.erase(toedit);
- _repos.insert(newinfo);
+ newinfo.setMetadataPath( rawcache_path_for_repoinfo( _options, newinfo ) );
+ newinfo.setPackagesPath( packagescache_path_for_repoinfo( _options, newinfo ) );
+ {
+ // We should fix the API as we must inject those paths
+ // into the repoinfo in order to keep it usable.
+ RepoInfo & oinfo( const_cast<RepoInfo &>(newinfo_r) );
+ oinfo.setFilepath(toedit.filepath());
+ oinfo.setMetadataPath( rawcache_path_for_repoinfo( _options, newinfo ) );
+ oinfo.setPackagesPath( packagescache_path_for_repoinfo( _options, newinfo ) );
+ }
+ reposManip().erase(toedit);
+ reposManip().insert(newinfo);
+ // check for credentials in Urls
+ UrlCredentialExtractor( _options.rootDir ).collect( newinfo.baseUrls() );
HistoryLog(_options.rootDir).modifyRepository(toedit, newinfo);
MIL << "repo " << alias << " modified" << endl;
}
RepoInfo RepoManager::Impl::getRepositoryInfo( const std::string & alias, const ProgressData::ReceiverFnc & progressrcv )
{
- RepoConstIterator it( findAlias( alias, _repos ) );
- if ( it != _repos.end() )
+ RepoConstIterator it( findAlias( alias, repos() ) );
+ if ( it != repos().end() )
return *it;
RepoInfo info;
info.setAlias( alias );
saveService( toSave );
_services.insert( toSave );
- // check for credentials in Url (username:password, not ?credentials param)
- if ( toSave.url().hasCredentialsInAuthority() )
- {
- media::CredentialManager cm(
- media::CredManagerOptions(_options.rootDir) );
-
- //! \todo use a method calling UI callbacks to ask where to save creds?
- cm.saveInUser(media::AuthData(toSave.url()));
- }
+ // check for credentials in Url
+ UrlCredentialExtractor( _options.rootDir ).collect( toSave.url() );
MIL << "added service " << toSave.alias() << endl;
}
Pathname location = service.filepath();
if( location.empty() )
{
- ZYPP_THROW(ServiceException( _("Can't figure out where the service is stored.") ));
+ ZYPP_THROW(ServiceException( service, _("Can't figure out where the service is stored.") ));
}
ServiceSet tmpSet;
if ( filesystem::unlink(location) != 0 )
{
// TranslatorExplanation '%s' is a filename
- ZYPP_THROW(ServiceException(str::form( _("Can't delete '%s'"), location.c_str() )));
+ ZYPP_THROW(ServiceException( service, str::form( _("Can't delete '%s'"), location.c_str() ) ));
}
- MIL << alias << " sucessfully deleted." << endl;
+ MIL << alias << " successfully deleted." << endl;
}
else
{
it->dumpAsIniOn(file);
}
- MIL << alias << " sucessfully deleted from file " << location << endl;
+ MIL << alias << " successfully deleted from file " << location << endl;
}
// now remove all repositories added by this service
////////////////////////////////////////////////////////////////////////////
- void RepoManager::Impl::refreshServices()
+ void RepoManager::Impl::refreshServices( const RefreshServiceOptions & options_r )
{
// copy the set of services since refreshService
// can eventually invalidate the iterator
continue;
try {
- refreshService(*it);
+ refreshService(*it, options_r);
}
catch ( const repo::ServicePluginInformalException & e )
{ ;/* ignore ServicePluginInformalException */ }
}
}
- void RepoManager::Impl::refreshService( const std::string & alias )
+ void RepoManager::Impl::refreshService( const std::string & alias, const RefreshServiceOptions & options_r )
{
ServiceInfo service( getService( alias ) );
assert_alias( service );
assert_url( service );
+ MIL << "Going to refresh service '" << service.alias() << "', url: " << service.url() << ", opts: " << options_r << endl;
+
+ if ( service.ttl() && !( options_r.testFlag( RefreshService_forceRefresh) || options_r.testFlag( RefreshService_restoreStatus ) ) )
+ {
+ // Service defines a TTL; maybe we can re-use existing data without refresh.
+ Date lrf = service.lrf();
+ if ( lrf )
+ {
+ Date now( Date::now() );
+ if ( lrf <= now )
+ {
+ if ( (lrf+=service.ttl()) > now ) // lrf+= !
+ {
+ MIL << "Skip: '" << service.alias() << "' metadata valid until " << lrf << endl;
+ return;
+ }
+ }
+ else
+ WAR << "Force: '" << service.alias() << "' metadata last refresh in the future: " << lrf << endl;
+ }
+ }
+
// NOTE: It might be necessary to modify and rewrite the service info.
// Either when probing the type, or when adjusting the repositories
// enable/disable state.:
bool serviceModified = false;
- MIL << "Going to refresh service '" << service.alias() << "', url: "<< service.url() << endl;
- //! \todo add callbacks for apps (start, end, repo removed, repo added, repo changed)
+ //! \todo add callbacks for apps (start, end, repo removed, repo added, repo changed)?
// if the type is unknown, try probing.
if ( service.type() == repo::ServiceType::NONE )
DBG << "ServicesTargetDistro: " << servicesTargetDistro << endl;
// parse it
+ Date::Duration origTtl = service.ttl(); // FIXME Ugly hack: const service.ttl modified when parsing
RepoCollector collector(servicesTargetDistro);
// FIXME Ugly hack: ServiceRepos may throw ServicePluginInformalException
// which is actually a notification. Using an exception for this
// and in zypper.
std::pair<DefaultIntegral<bool,false>, repo::ServicePluginInformalException> uglyHack;
try {
- ServiceRepos repos(service, bind( &RepoCollector::collect, &collector, _1 ));
+ ServiceRepos( service, bind( &RepoCollector::collect, &collector, _1 ) );
}
catch ( const repo::ServicePluginInformalException & e )
{
uglyHack.first = true;
uglyHack.second = e;
}
-
+ if ( service.ttl() != origTtl ) // repoindex.xml changed ttl
+ {
+ if ( !service.ttl() )
+ service.setLrf( Date() ); // don't need lrf when zero ttl
+ serviceModified = true;
+ }
////////////////////////////////////////////////////////////////////////////
// On the fly remember the new repo states as defined the reopoindex.xml.
// Move into ServiceInfo later.
{
// First of all: Prepend service alias:
it->setAlias( str::form( "%s:%s", service.alias().c_str(), it->alias().c_str() ) );
- // set refrence to the parent service
+ // set reference to the parent service
it->setService( service.alias() );
// remember the new parsed repo state
newRepoStates[it->alias()] = *it;
- // if the repo url was not set by the repoindex parser, set service's url
- Url url;
- if ( it->baseUrlsEmpty() )
- url = service.url();
- else
+ // - If the repo url was not set by the repoindex parser, set service's url.
+ // - Libzypp currently has problem with separate url + path handling so just
+ // append a path, if set, to the baseurls
+ // - Credentials in the url authority will be extracted later, either if the
+ // repository is added or if we check for changed urls.
+ Pathname path;
+ if ( !it->path().empty() )
{
- // service repo can contain only one URL now, so no need to iterate.
- url = *it->baseUrlsBegin();
+ if ( it->path() != "/" )
+ path = it->path();
+ it->setPath("");
}
- // libzypp currently has problem with separate url + path handling
- // so just append the path to the baseurl
- if ( !it->path().empty() )
+ if ( it->baseUrlsEmpty() )
{
- Pathname path(url.getPathName());
- path /= it->path();
- url.setPathName( path.asString() );
- it->setPath("");
+ Url url( service.rawUrl() );
+ if ( !path.empty() )
+ url.setPathName( url.getPathName() / path );
+ it->setBaseUrl( std::move(url) );
+ }
+ else if ( !path.empty() )
+ {
+ RepoInfo::url_set urls( it->rawBaseUrls() );
+ for ( Url & url : urls )
+ {
+ url.setPathName( url.getPathName() / path );
+ }
+ it->setBaseUrls( std::move(urls) );
}
-
- // save the url
- it->setBaseUrl( url );
}
////////////////////////////////////////////////////////////////////////////
}
////////////////////////////////////////////////////////////////////////////
- // create missing repositories and modify exising ones if needed...
+ // create missing repositories and modify existing ones if needed...
+ UrlCredentialExtractor urlCredentialExtractor( _options.rootDir ); // To collect any credentials stored in repo URLs
for_( it, collector.repos.begin(), collector.repos.end() )
{
// User explicitly requested the repo being enabled?
TriBool toBeEnabled( indeterminate ); // indeterminate - follow the service request
DBG << "Service request to " << (it->enabled()?"enable":"disable") << " service repo " << it->alias() << endl;
- if ( service.repoToEnableFind( it->alias() ) )
+ if ( options_r.testFlag( RefreshService_restoreStatus ) )
{
- DBG << "User request to enable service repo " << it->alias() << endl;
- toBeEnabled = true;
- // Remove from enable request list.
- // NOTE: repoToDisable is handled differently.
- // It gets cleared on each refresh.
- service.delRepoToEnable( it->alias() );
- serviceModified = true;
+ DBG << "Opt RefreshService_restoreStatus " << it->alias() << endl;
+ // this overrides any pending request!
+ // Remove from enable request list.
+ // NOTE: repoToDisable is handled differently.
+ // It gets cleared on each refresh.
+ service.delRepoToEnable( it->alias() );
+ // toBeEnabled stays indeterminate!
}
- else if ( service.repoToDisableFind( it->alias() ) )
+ else
{
- DBG << "User request to disable service repo " << it->alias() << endl;
- toBeEnabled = false;
+ if ( service.repoToEnableFind( it->alias() ) )
+ {
+ DBG << "User request to enable service repo " << it->alias() << endl;
+ toBeEnabled = true;
+ // Remove from enable request list.
+ // NOTE: repoToDisable is handled differently.
+ // It gets cleared on each refresh.
+ service.delRepoToEnable( it->alias() );
+ serviceModified = true;
+ }
+ else if ( service.repoToDisableFind( it->alias() ) )
+ {
+ DBG << "User request to disable service repo " << it->alias() << endl;
+ toBeEnabled = false;
+ }
}
-
RepoInfoList::iterator oldRepo( findAlias( it->alias(), oldRepos ) );
if ( oldRepo == oldRepos.end() )
{
// NOTE: Assert toBeEnabled is boolean afterwards!
if ( oldRepo->enabled() == it->enabled() )
toBeEnabled = it->enabled(); // service requests no change to the system
+ else if (options_r.testFlag( RefreshService_restoreStatus ) )
+ {
+ toBeEnabled = it->enabled(); // RefreshService_restoreStatus forced
+ DBG << "Opt RefreshService_restoreStatus " << it->alias() << " forces " << (toBeEnabled?"enabled":"disabled") << endl;
+ }
else
{
const auto & last = service.repoStates().find( oldRepo->alias() );
// all other attributes follow the service request:
+ // changed name (raw!)
+ if ( oldRepo->rawName() != it->rawName() )
+ {
+ DBG << "Service repo " << it->alias() << " gets new NAME " << it->rawName() << endl;
+ oldRepo->setName( it->rawName() );
+ oldRepoModified = true;
+ }
+
// changed autorefresh
if ( oldRepo->autorefresh() != it->autorefresh() )
{
}
// changed url?
- // service repo can contain only one URL now, so no need to iterate.
- if ( oldRepo->url() != it->url() )
{
- DBG << "Service repo " << it->alias() << " gets new URL " << it->url() << endl;
- oldRepo->setBaseUrl( it->url() );
- oldRepoModified = true;
- }
+ RepoInfo::url_set newUrls( it->rawBaseUrls() );
+ urlCredentialExtractor.extract( newUrls ); // Extract! to prevent passwds from disturbing the comparison below
+ if ( oldRepo->rawBaseUrls() != newUrls )
+ {
+ DBG << "Service repo " << it->alias() << " gets new URLs " << newUrls << endl;
+ oldRepo->setBaseUrls( std::move(newUrls) );
+ oldRepoModified = true;
+ }
+ }
+
+ // changed gpg check settings?
+ // ATM only plugin services can set GPG values.
+ if ( service.type() == ServiceType::PLUGIN )
+ {
+ TriBool ogpg[3]; // Gpg RepoGpg PkgGpg
+ TriBool ngpg[3];
+ oldRepo->getRawGpgChecks( ogpg[0], ogpg[1], ogpg[2] );
+ it-> getRawGpgChecks( ngpg[0], ngpg[1], ngpg[2] );
+#define Z_CHKGPG(I,N) \
+ if ( ! sameTriboolState( ogpg[I], ngpg[I] ) ) \
+ { \
+ DBG << "Service repo " << it->alias() << " gets new "#N"Check " << ngpg[I] << endl; \
+ oldRepo->set##N##Check( ngpg[I] ); \
+ oldRepoModified = true; \
+ }
+ Z_CHKGPG( 0, Gpg );
+ Z_CHKGPG( 1, RepoGpg );
+ Z_CHKGPG( 2, PkgGpg );
+#undef Z_CHKGPG
+ }
// save if modified:
if ( oldRepoModified )
}
////////////////////////////////////////////////////////////////////////////
- // save service if modified:
- if ( serviceModified )
+ // save service if modified: (unless a plugin service)
+ if ( service.type() != ServiceType::PLUGIN )
{
- // write out modified service file.
- modifyService( service.alias(), service );
+ if ( service.ttl() )
+ {
+ service.setLrf( Date::now() ); // remember last refresh
+ serviceModified = true; // or use a cookie file
+ }
+
+ if ( serviceModified )
+ {
+ // write out modified service file.
+ modifyService( service.alias(), service );
+ }
}
if ( uglyHack.first )
if ( service.type() == ServiceType::PLUGIN )
{
- ZYPP_THROW(ServicePluginImmutableException());
+ ZYPP_THROW(ServicePluginImmutableException( service ));
}
const ServiceInfo & oldService = getService(oldAlias);
Pathname location = oldService.filepath();
if( location.empty() )
{
- ZYPP_THROW(ServiceException( _("Can't figure out where the service is stored.") ));
+ ZYPP_THROW(ServiceException( oldService, _("Can't figure out where the service is stored.") ));
}
// remember: there may multiple services being defined in one file:
_services.erase(oldAlias);
_services.insert(service);
+ // check for credentials in Urls
+ UrlCredentialExtractor( _options.rootDir ).collect( service.url() );
+
// changed properties affecting also repositories
if ( oldAlias != service.alias() // changed alias
void RepoManager::removeService( const ServiceInfo & service )
{ return _pimpl->removeService( service ); }
- void RepoManager::refreshServices()
- { return _pimpl->refreshServices(); }
+ void RepoManager::refreshServices( const RefreshServiceOptions & options_r )
+ { return _pimpl->refreshServices( options_r ); }
- void RepoManager::refreshService( const std::string & alias )
- { return _pimpl->refreshService( alias ); }
+ void RepoManager::refreshService( const std::string & alias, const RefreshServiceOptions & options_r )
+ { return _pimpl->refreshService( alias, options_r ); }
- void RepoManager::refreshService( const ServiceInfo & service )
- { return _pimpl->refreshService( service ); }
+ void RepoManager::refreshService( const ServiceInfo & service, const RefreshServiceOptions & options_r )
+ { return _pimpl->refreshService( service, options_r ); }
void RepoManager::modifyService( const std::string & oldAlias, const ServiceInfo & service )
{ return _pimpl->modifyService( oldAlias, service ); }