///////////////////////////////////////////////////////////////////
namespace
{
+ ///////////////////////////////////////////////////////////////////
+ /// \class UrlCredentialExtractor
+ /// \brief Extract credentials in \ref Url authority and store them via \ref CredentialManager.
+ ///
+ /// Lazy init CredentialManager and save collected credentials when
+ /// going out of scope.
+ ///
+ /// Methods return whether a password has been collected/extracted.
+ ///
+ /// \code
+ /// UrlCredentialExtractor( "/rootdir" ).collect( oneUrlOrUrlContainer );
+ /// \endcode
+ /// \code
+ /// {
+ /// UrlCredentialExtractor extractCredentials;
+ /// extractCredentials.collect( oneUrlOrUrlContainer );
+ /// extractCredentials.extract( oneMoreUrlOrUrlContainer );
+ /// ....
+ /// }
+ /// \endcode
+ ///
+ class UrlCredentialExtractor
+ {
+ public:
+ UrlCredentialExtractor( Pathname & root_r )
+ : _root( root_r )
+ {}
+
+ ~UrlCredentialExtractor()
+ { if ( _cmPtr ) _cmPtr->save(); }
+
+ /** Remember credentials stored in URL authority leaving the password in \a url_r. */
+ bool collect( const Url & url_r )
+ {
+ bool ret = url_r.hasCredentialsInAuthority();
+ if ( ret )
+ {
+ if ( !_cmPtr ) _cmPtr.reset( new media::CredentialManager( _root ) );
+ _cmPtr->addUserCred( url_r );
+ }
+ return ret;
+ }
+ /** \overload operating on Url container */
+ template<class TContainer>
+ bool collect( const TContainer & urls_r )
+ { bool ret = false; for ( const Url & url : urls_r ) { if ( collect( url ) && !ret ) ret = true; } return ret; }
+
+ /** Remember credentials stored in URL authority stripping the passowrd from \a url_r. */
+ bool extract( Url & url_r )
+ {
+ bool ret = collect( url_r );
+ if ( ret )
+ url_r.setPassword( std::string() );
+ return ret;
+ }
+ /** \overload operating on Url container */
+ template<class TContainer>
+ bool extract( TContainer & urls_r )
+ { bool ret = false; for ( Url & url : urls_r ) { if ( extract( url ) && !ret ) ret = true; } return ret; }
+
+ private:
+ const Pathname & _root;
+ scoped_ptr<media::CredentialManager> _cmPtr;
+ };
+ } // namespace
+ ///////////////////////////////////////////////////////////////////
+
+ ///////////////////////////////////////////////////////////////////
+ namespace
+ {
/** Simple media mounter to access non-downloading URLs e.g. for non-local plaindir repos.
* \ingroup g_RAII
*/
bool nonroot( geteuid() != 0 );
if ( nonroot && ! PathInfo(dir).userMayRX() )
{
- JobReport::warning( formatNAC(_("Cannot read repo directory '%1%': Permission denied")) % dir );
+ JobReport::warning( str::FormatNAC(_("Cannot read repo directory '%1%': Permission denied")) % dir );
}
else
{
{
if ( nonroot && ! PathInfo(*it).userMayR() )
{
- JobReport::warning( formatNAC(_("Cannot read repo file '%1%': Permission denied")) % *it );
+ JobReport::warning( str::FormatNAC(_("Cannot read repo file '%1%': Permission denied")) % *it );
}
else
{
// translators: Cleanup a repository previously owned by a meanwhile unknown (deleted) service.
// %1% = service name
// %2% = repository name
- JobReport::warning( formatNAC(_("Unknown service '%1%': Removing orphaned service repository '%2%'" ))
+ JobReport::warning( str::FormatNAC(_("Unknown service '%1%': Removing orphaned service repository '%2%'"))
% repoInfo.service()
% repoInfo.alias() );
try {
assert_urls(info);
// we will throw this later if no URL checks out fine
- RepoException rexception( info, PL_("Valid metadata not found at specified URL",
+ RepoException rexception( info, _PL("Valid metadata not found at specified URL",
"Valid metadata not found at specified URLs",
info.baseUrlsSize() ) );
// cause of the problem of the first URL remembered
if (it == info.baseUrlsBegin())
rexception.remember(e);
+ else
+ rexception.addHistory( e.asUserString() );
+
}
} // for every url
ERR << "No more urls..." << endl;
if ( cache_status == raw_metadata_status )
{
MIL << info.alias() << " cache is up to date with metadata." << endl;
- if ( policy == BuildIfNeeded )
- {
- // On the fly add missing solv.idx files for bash completion.
- const Pathname & base = solv_path_for_repoinfo( _options, info);
- if ( ! PathInfo(base/"solv.idx").isExist() )
- sat::updateSolvFileIndex( base/"solv" );
-
- return;
+ if ( policy == BuildIfNeeded ) {
+ return;
}
else {
MIL << info.alias() << " cache rebuild is forced" << endl;
// We keep it.
guard.resetDispose();
- sat::updateSolvFileIndex( solvfile ); // content digest for zypper bash completion
}
break;
default:
progress.set(90);
// check for credentials in Urls
- bool havePasswords = false;
- for_( urlit, tosave.baseUrlsBegin(), tosave.baseUrlsEnd() )
- if ( urlit->hasCredentialsInAuthority() )
- {
- havePasswords = true;
- break;
- }
- // save the credentials
- if ( havePasswords )
- {
- media::CredentialManager cm(
- media::CredManagerOptions(_options.rootDir) );
-
- for_(urlit, tosave.baseUrlsBegin(), tosave.baseUrlsEnd())
- if (urlit->hasCredentialsInAuthority())
- //! \todo use a method calling UI callbacks to ask where to save creds?
- cm.saveInUser(media::AuthData(*urlit));
- }
+ UrlCredentialExtractor( _options.rootDir ).collect( tosave.baseUrls() );
- HistoryLog().addRepository(tosave);
+ HistoryLog(_options.rootDir).addRepository(tosave);
progress.toMax();
MIL << "done" << endl;
{
// figure how many repos are there in the file:
std::list<RepoInfo> filerepos = repositories_in_file(todelete.filepath());
- if ( (filerepos.size() == 1) && ( filerepos.front().alias() == todelete.alias() ) )
+ if ( filerepos.size() == 0 // bsc#984494: file may have already been deleted
+ ||(filerepos.size() == 1 && filerepos.front().alias() == todelete.alias() ) )
{
- // easy, only this one, just delete the file
- if ( filesystem::unlink(todelete.filepath()) != 0 )
+ // easy: file does not exist, contains no or only the repo to delete: delete the file
+ int ret = filesystem::unlink( todelete.filepath() );
+ if ( ! ( ret == 0 || ret == ENOENT ) )
{
// TranslatorExplanation '%s' is a filename
ZYPP_THROW(RepoException( todelete, str::form( _("Can't delete '%s'"), todelete.filepath().c_str() )));
newinfo.dumpAsIniOn(file);
}
- if ( toedit.enabled() && !newinfo.enabled() )
- {
- // On the fly remove solv.idx files for bash completion if a repo gets disabled.
- const Pathname & solvidx = solv_path_for_repoinfo(_options, newinfo)/"solv.idx";
- if ( PathInfo(solvidx).isExist() )
- filesystem::unlink( solvidx );
- }
-
newinfo.setFilepath(toedit.filepath());
reposManip().erase(toedit);
reposManip().insert(newinfo);
+ // check for credentials in Urls
+ UrlCredentialExtractor( _options.rootDir ).collect( newinfo.baseUrls() );
HistoryLog(_options.rootDir).modifyRepository(toedit, newinfo);
MIL << "repo " << alias << " modified" << endl;
}
saveService( toSave );
_services.insert( toSave );
- // check for credentials in Url (username:password, not ?credentials param)
- if ( toSave.url().hasCredentialsInAuthority() )
- {
- media::CredentialManager cm(
- media::CredManagerOptions(_options.rootDir) );
-
- //! \todo use a method calling UI callbacks to ask where to save creds?
- cm.saveInUser(media::AuthData(toSave.url()));
- }
+ // check for credentials in Url
+ UrlCredentialExtractor( _options.rootDir ).collect( toSave.url() );
MIL << "added service " << toSave.alias() << endl;
}
ServiceInfo service( getService( alias ) );
assert_alias( service );
assert_url( service );
- MIL << "Going to refresh service '" << service.alias() << "', url: " << service.url() << ", opts: " << options_r << endl;
-
- if ( service.ttl() && !options_r.testFlag( RefreshService_forceRefresh ) )
- {
- // Service defines a TTL; maybe we can re-use existing data without refresh.
- Date lrf = service.lrf();
- if ( lrf )
- {
- Date now( Date::now() );
- if ( lrf <= now )
- {
- if ( (lrf+=service.ttl()) > now ) // lrf+= !
- {
- MIL << "Skip: '" << service.alias() << "' metadata valid until " << lrf << endl;
- return;
- }
- }
- else
- WAR << "Force: '" << service.alias() << "' metadata last refresh in the future: " << lrf << endl;
- }
- }
-
// NOTE: It might be necessary to modify and rewrite the service info.
// Either when probing the type, or when adjusting the repositories
// enable/disable state.:
bool serviceModified = false;
+ MIL << "Going to refresh service '" << service.alias() << "', url: "<< service.url() << ", opts: " << options_r << endl;
- //! \todo add callbacks for apps (start, end, repo removed, repo added, repo changed)?
+ //! \todo add callbacks for apps (start, end, repo removed, repo added, repo changed)
// if the type is unknown, try probing.
if ( service.type() == repo::ServiceType::NONE )
DBG << "ServicesTargetDistro: " << servicesTargetDistro << endl;
// parse it
- Date::Duration origTtl = service.ttl(); // FIXME Ugly hack: const service.ttl modified when parsing
RepoCollector collector(servicesTargetDistro);
// FIXME Ugly hack: ServiceRepos may throw ServicePluginInformalException
// which is actually a notification. Using an exception for this
// and in zypper.
std::pair<DefaultIntegral<bool,false>, repo::ServicePluginInformalException> uglyHack;
try {
- ServiceRepos( service, bind( &RepoCollector::collect, &collector, _1 ) );
+ ServiceRepos repos(service, bind( &RepoCollector::collect, &collector, _1 ));
}
catch ( const repo::ServicePluginInformalException & e )
{
uglyHack.first = true;
uglyHack.second = e;
}
- if ( service.ttl() != origTtl ) // repoindex.xml changed ttl
- {
- if ( !service.ttl() )
- service.setLrf( Date() ); // don't need lrf when zero ttl
- serviceModified = true;
- }
+
////////////////////////////////////////////////////////////////////////////
// On the fly remember the new repo states as defined the reopoindex.xml.
// Move into ServiceInfo later.
{
// First of all: Prepend service alias:
it->setAlias( str::form( "%s:%s", service.alias().c_str(), it->alias().c_str() ) );
- // set refrence to the parent service
+ // set reference to the parent service
it->setService( service.alias() );
// remember the new parsed repo state
newRepoStates[it->alias()] = *it;
- // if the repo url was not set by the repoindex parser, set service's url
- Url url;
- if ( it->baseUrlsEmpty() )
- url = service.rawUrl();
- else
+ // - If the repo url was not set by the repoindex parser, set service's url.
+ // - Libzypp currently has problem with separate url + path handling so just
+ // append a path, if set, to the baseurls
+ // - Credentials in the url authority will be extracted later, either if the
+ // repository is added or if we check for changed urls.
+ Pathname path;
+ if ( !it->path().empty() )
{
- // service repo can contain only one URL now, so no need to iterate.
- url = it->rawUrl(); // raw!
+ if ( it->path() != "/" )
+ path = it->path();
+ it->setPath("");
}
- // libzypp currently has problem with separate url + path handling
- // so just append the path to the baseurl
- if ( !it->path().empty() )
+ if ( it->baseUrlsEmpty() )
{
- Pathname path(url.getPathName());
- path /= it->path();
- url.setPathName( path.asString() );
- it->setPath("");
+ Url url( service.rawUrl() );
+ if ( !path.empty() )
+ url.setPathName( url.getPathName() / path );
+ it->setBaseUrl( std::move(url) );
+ }
+ else if ( !path.empty() )
+ {
+ RepoInfo::url_set urls( it->rawBaseUrls() );
+ for ( Url & url : urls )
+ {
+ url.setPathName( url.getPathName() / path );
+ }
+ it->setBaseUrls( std::move(urls) );
}
-
- // save the url
- it->setBaseUrl( url );
}
////////////////////////////////////////////////////////////////////////////
}
////////////////////////////////////////////////////////////////////////////
- // create missing repositories and modify exising ones if needed...
+ // create missing repositories and modify existing ones if needed...
+ UrlCredentialExtractor urlCredentialExtractor( _options.rootDir ); // To collect any credentials stored in repo URLs
for_( it, collector.repos.begin(), collector.repos.end() )
{
// User explicitly requested the repo being enabled?
}
// changed url?
- // service repo can contain only one URL now, so no need to iterate.
- if ( oldRepo->rawUrl() != it->rawUrl() )
{
- DBG << "Service repo " << it->alias() << " gets new URL " << it->rawUrl() << endl;
- oldRepo->setBaseUrl( it->rawUrl() );
- oldRepoModified = true;
- }
+ RepoInfo::url_set newUrls( it->rawBaseUrls() );
+ urlCredentialExtractor.extract( newUrls ); // Extract! to prevent passwds from disturbing the comparison below
+ if ( oldRepo->rawBaseUrls() != newUrls )
+ {
+ DBG << "Service repo " << it->alias() << " gets new URLs " << newUrls << endl;
+ oldRepo->setBaseUrls( std::move(newUrls) );
+ oldRepoModified = true;
+ }
+ }
+
+ // changed gpg check settings?
+ // ATM only plugin services can set GPG values.
+ if ( service.type() == ServiceType::PLUGIN )
+ {
+ TriBool ogpg[3]; // Gpg RepoGpg PkgGpg
+ TriBool ngpg[3];
+ oldRepo->getRawGpgChecks( ogpg[0], ogpg[1], ogpg[2] );
+ it-> getRawGpgChecks( ngpg[0], ngpg[1], ngpg[2] );
+#define Z_CHKGPG(I,N) \
+ if ( ! sameTriboolState( ogpg[I], ngpg[I] ) ) \
+ { \
+ DBG << "Service repo " << it->alias() << " gets new "#N"Check " << ngpg[I] << endl; \
+ oldRepo->set##N##Check( ngpg[I] ); \
+ oldRepoModified = true; \
+ }
+ Z_CHKGPG( 0, Gpg );
+ Z_CHKGPG( 1, RepoGpg );
+ Z_CHKGPG( 2, PkgGpg );
+#undef Z_CHKGPG
+ }
// save if modified:
if ( oldRepoModified )
////////////////////////////////////////////////////////////////////////////
// save service if modified: (unless a plugin service)
- if ( service.type() != ServiceType::PLUGIN )
+ if ( serviceModified && service.type() != ServiceType::PLUGIN )
{
- if ( service.ttl() )
- {
- service.setLrf( Date::now() ); // remember last refresh
- serviceModified = true; // or use a cookie file
- }
-
- if ( serviceModified )
- {
- // write out modified service file.
- modifyService( service.alias(), service );
- }
+ // write out modified service file.
+ modifyService( service.alias(), service );
}
if ( uglyHack.first )
_services.erase(oldAlias);
_services.insert(service);
+ // check for credentials in Urls
+ UrlCredentialExtractor( _options.rootDir ).collect( service.url() );
+
// changed properties affecting also repositories
if ( oldAlias != service.alias() // changed alias