*
*/
+#include <cstdlib>
#include <iostream>
#include <fstream>
+#include <sstream>
#include <list>
#include <algorithm>
#include "zypp/base/InputStream.h"
#include "zypp/base/Logger.h"
#include "zypp/base/Gettext.h"
#include "zypp/base/Function.h"
+#include "zypp/base/Regex.h"
#include "zypp/PathInfo.h"
#include "zypp/TmpPath.h"
#include "zypp/repo/RepoException.h"
#include "zypp/RepoManager.h"
-#include "zypp/cache/CacheStore.h"
+#include "zypp/cache/SolvStore.h"
#include "zypp/repo/cached/RepoImpl.h"
#include "zypp/media/MediaManager.h"
#include "zypp/MediaSetAccess.h"
+#include "zypp/ExternalProgram.h"
+#include "zypp/ManagedFile.h"
#include "zypp/parser/RepoFileReader.h"
#include "zypp/repo/yum/Downloader.h"
#include "zypp/parser/yum/RepoParser.h"
-#include "zypp/parser/plaindir/RepoParser.h"
+//#include "zypp/parser/plaindir/RepoParser.h"
#include "zypp/repo/susetags/Downloader.h"
#include "zypp/parser/susetags/RepoParser.h"
#include "zypp/ZYppCallbacks.h"
+#include "sat/Pool.h"
+#include "satsolver/pool.h"
+#include "satsolver/repo.h"
+#include "satsolver/repo_solv.h"
+
using namespace std;
using namespace zypp;
using namespace zypp::repo;
RepoManagerOptions::RepoManagerOptions()
{
- ZConfig globalConfig;
- repoCachePath = globalConfig.defaultRepoCachePath();
- repoRawCachePath = globalConfig.defaultRepoRawCachePath();
- knownReposPath = globalConfig.defaultKnownReposPath();
+ repoCachePath = ZConfig::instance().repoCachePath();
+ repoRawCachePath = ZConfig::instance().repoMetadataPath();
+ knownReposPath = ZConfig::instance().knownReposPath();
}
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
+ /**
+ * \short Internal version of clean cache
+ *
+ * Takes an extra SolvStore reference, so we avoid internally
+ * having 2 SolvStores writing to the same database.
+ */
+ static void cleanCacheInternal( cache::SolvStore &store,
+ const RepoInfo &info,
+ const ProgressData::ReceiverFnc & progressrcv = ProgressData::ReceiverFnc() )
+ {
+// ProgressData progress;
+// callback::SendReport<ProgressReport> report;
+// progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
+// progress.name(str::form(_("Cleaning repository '%s' cache"), info.name().c_str()));
+//
+// if ( !store.isCached(info.alias()) )
+// return;
+//
+// MIL << info.alias() << " cleaning cache..." << endl;
+//
+// CombinedProgressData subprogrcv(progress);
+//
+// store.cleanRepository(info.alias(), subprogrcv);
+ }
+
+ ////////////////////////////////////////////////////////////////////////////
+
/**
* Reads RepoInfo's from a repo file.
*
/**
* \short List of RepoInfo's from a directory
*
- * Goes trough every file in a directory and adds all
+ * Goes trough every file ending with ".repo" in a directory and adds all
* RepoInfo's contained in that file.
*
* \param dir pathname of the directory to read.
if ( filesystem::readdir( entries, Pathname(dir), false ) != 0 )
ZYPP_THROW(Exception("failed to read directory"));
+ str::regex allowedRepoExt("^\\.repo(_[0-9]+)?$");
for ( list<Pathname>::const_iterator it = entries.begin(); it != entries.end(); ++it )
{
- list<RepoInfo> tmp = repositories_in_file( *it );
- repos.insert( repos.end(), tmp.begin(), tmp.end() );
+ if (str::regex_match(it->extension(), allowedRepoExt))
+ {
+ list<RepoInfo> tmp = repositories_in_file( *it );
+ repos.insert( repos.end(), tmp.begin(), tmp.end() );
- //std::copy( collector.repos.begin(), collector.repos.end(), std::back_inserter(repos));
- //MIL << "ok" << endl;
+ //std::copy( collector.repos.begin(), collector.repos.end(), std::back_inserter(repos));
+ //MIL << "ok" << endl;
+ }
}
return repos;
}
static void assert_urls( const RepoInfo &info )
{
- if (info.baseUrls().empty())
+ if (info.baseUrlsEmpty())
ZYPP_THROW(RepoNoUrlException());
}
Impl * clone() const
{ return new Impl( *this ); }
};
+
///////////////////////////////////////////////////////////////////
/** \relates RepoManager::Impl Stream output */
std::list<RepoInfo> RepoManager::knownRepositories() const
{
MIL << endl;
-
+
if ( PathInfo(_pimpl->options.knownReposPath).isExist() )
- return repositories_in_dir(_pimpl->options.knownReposPath);
+ {
+ RepoInfoList repos = repositories_in_dir(_pimpl->options.knownReposPath);
+ for ( RepoInfoList::iterator it = repos.begin();
+ it != repos.end();
+ ++it )
+ {
+ // set the metadata path for the repo
+ Pathname metadata_path = rawcache_path_for_repoinfo(_pimpl->options, (*it));
+ (*it).setMetadataPath(metadata_path);
+ }
+ return repos;
+ }
else
return std::list<RepoInfo>();
////////////////////////////////////////////////////////////////////////////
- RepoStatus RepoManager::rawMetadataStatus( const RepoInfo &info )
+ Pathname RepoManager::metadataPath( const RepoInfo &info ) const
+ {
+ return rawcache_path_for_repoinfo(_pimpl->options, info );
+ }
+
+ ////////////////////////////////////////////////////////////////////////////
+
+ RepoStatus RepoManager::metadataStatus( const RepoInfo &info ) const
{
Pathname rawpath = rawcache_path_for_repoinfo( _pimpl->options, info );
RepoType repokind = info.type();
case RepoType::YAST2_e :
{
- status = RepoStatus( rawpath + "/content");
+ // the order of RepoStatus && RepoStatus matters! (#304310)
+ status = RepoStatus( rawpath + "/content") && (RepoStatus( rawpath + "/media.1/media"));
}
break;
-
+
case RepoType::RPMPLAINDIR_e :
{
if ( PathInfo(Pathname(rawpath + "/cookie")).isExist() )
}
return status;
}
-
+
+ void RepoManager::touchIndexFile(const RepoInfo & info)
+ {
+ Pathname rawpath = rawcache_path_for_repoinfo( _pimpl->options, info );
+
+ RepoType repokind = info.type();
+ if ( repokind.toEnum() == RepoType::NONE_e )
+ // unknown, probe the local metadata
+ repokind = probe(rawpath.asUrl());
+ // if still unknown, just return
+ if (repokind == RepoType::NONE_e)
+ return;
+
+ Pathname p;
+ switch ( repokind.toEnum() )
+ {
+ case RepoType::RPMMD_e :
+ p = Pathname(rawpath + "/repodata/repomd.xml");
+ break;
+
+ case RepoType::YAST2_e :
+ p = Pathname(rawpath + "/content");
+ break;
+
+ case RepoType::RPMPLAINDIR_e :
+ p = Pathname(rawpath + "/cookie");
+ break;
+
+ case RepoType::NONE_e :
+ default:
+ break;
+ }
+
+ // touch the file, ignore error (they are logged anyway)
+ filesystem::touch(p);
+ }
+
+ bool RepoManager::checkIfToRefreshMetadata( const RepoInfo &info,
+ const Url &url,
+ RawMetadataRefreshPolicy policy )
+ {
+ assert_alias(info);
+
+ RepoStatus oldstatus;
+ RepoStatus newstatus;
+
+ try
+ {
+ MIL << "Going to try to check whether refresh is needed for " << url << endl;
+
+ repo::RepoType repokind = info.type();
+
+ // if the type is unknown, try probing.
+ switch ( repokind.toEnum() )
+ {
+ case RepoType::NONE_e:
+ // unknown, probe it
+ repokind = probe(url);
+ break;
+ default:
+ break;
+ }
+
+ Pathname rawpath = rawcache_path_for_repoinfo( _pimpl->options, info );
+ filesystem::assert_dir(rawpath);
+ oldstatus = metadataStatus(info);
+
+ // now we've got the old (cached) status, we can decide repo.refresh.delay
+ if (policy != RefreshForced)
+ {
+ // difference in seconds
+ double diff = difftime(
+ (Date::ValueType)Date::now(),
+ (Date::ValueType)oldstatus.timestamp()) / 60;
+
+ DBG << "oldstatus: " << (Date::ValueType)oldstatus.timestamp() << endl;
+ DBG << "current time: " << (Date::ValueType)Date::now() << endl;
+ DBG << "last refresh = " << diff << " minutes ago" << endl;
+
+ if (diff < ZConfig::instance().repo_refresh_delay())
+ {
+ MIL << "Repository '" << info.alias()
+ << "' has been refreshed less than repo.refresh.delay ("
+ << ZConfig::instance().repo_refresh_delay()
+ << ") minutes ago. Advising to skip refresh" << endl;
+ return false;
+ }
+ }
+
+ // create temp dir as sibling of rawpath
+ filesystem::TmpDir tmpdir( filesystem::TmpDir::makeSibling( rawpath ) );
+
+ if ( ( repokind.toEnum() == RepoType::RPMMD_e ) ||
+ ( repokind.toEnum() == RepoType::YAST2_e ) )
+ {
+ MediaSetAccess media(url);
+ shared_ptr<repo::Downloader> downloader_ptr;
+
+ if ( repokind.toEnum() == RepoType::RPMMD_e )
+ downloader_ptr.reset(new yum::Downloader(info.path()));
+ else
+ downloader_ptr.reset( new susetags::Downloader(info.path()));
+
+ RepoStatus newstatus = downloader_ptr->status(media);
+ bool refresh = false;
+ if ( oldstatus.checksum() == newstatus.checksum() )
+ {
+ MIL << "repo has not changed" << endl;
+ if ( policy == RefreshForced )
+ {
+ MIL << "refresh set to forced" << endl;
+ refresh = true;
+ }
+ }
+ else
+ {
+ MIL << "repo has changed, going to refresh" << endl;
+ refresh = true;
+ }
+
+ if (!refresh)
+ touchIndexFile(info);
+
+ return refresh;
+ }
+#if 0
+ else if ( repokind.toEnum() == RepoType::RPMPLAINDIR_e )
+ {
+ RepoStatus newstatus = parser::plaindir::dirStatus(url.getPathName());
+ bool refresh = false;
+ if ( oldstatus.checksum() == newstatus.checksum() )
+ {
+ MIL << "repo has not changed" << endl;
+ if ( policy == RefreshForced )
+ {
+ MIL << "refresh set to forced" << endl;
+ refresh = true;
+ }
+ }
+ else
+ {
+ MIL << "repo has changed, going to refresh" << endl;
+ refresh = true;
+ }
+
+ if (!refresh)
+ touchIndexFile(info);
+
+ return refresh;
+ }
+#endif
+ else
+ {
+ ZYPP_THROW(RepoUnknownTypeException());
+ }
+ }
+ catch ( const Exception &e )
+ {
+ ZYPP_CAUGHT(e);
+ ERR << "refresh check failed for " << url << endl;
+ ZYPP_RETHROW(e);
+ }
+
+ return true; // default
+ }
+
void RepoManager::refreshMetadata( const RepoInfo &info,
RawMetadataRefreshPolicy policy,
const ProgressData::ReceiverFnc & progress )
assert_alias(info);
assert_urls(info);
- RepoStatus oldstatus;
- RepoStatus newstatus;
+ // we will throw this later if no URL checks out fine
+ RepoException rexception(_("Valid metadata not found at specified URL(s)"));
+
// try urls one by one
for ( RepoInfo::urls_const_iterator it = info.baseUrlsBegin(); it != info.baseUrlsEnd(); ++it )
{
try
{
Url url(*it);
- filesystem::TmpDir tmpdir;
+
+ // check whether to refresh metadata
+ // if the check fails for this url, it throws, so another url will be checked
+ if (!checkIfToRefreshMetadata(info, url, policy))
+ return;
+
+ MIL << "Going to refresh metadata from " << url << endl;
repo::RepoType repokind = info.type();
}
Pathname rawpath = rawcache_path_for_repoinfo( _pimpl->options, info );
- oldstatus = rawMetadataStatus(info);
+ filesystem::assert_dir(rawpath);
+
+ // create temp dir as sibling of rawpath
+ filesystem::TmpDir tmpdir( filesystem::TmpDir::makeSibling( rawpath ) );
if ( ( repokind.toEnum() == RepoType::RPMMD_e ) ||
( repokind.toEnum() == RepoType::YAST2_e ) )
{
MediaSetAccess media(url);
shared_ptr<repo::Downloader> downloader_ptr;
-
+
if ( repokind.toEnum() == RepoType::RPMMD_e )
downloader_ptr.reset(new yum::Downloader(info.path()));
else
downloader_ptr.reset( new susetags::Downloader(info.path()));
-
+
/**
* Given a downloader, sets the other repos raw metadata
* path as cache paths for the fetcher, so if another
{
downloader_ptr->addCachePath(rawcache_path_for_repoinfo( _pimpl->options, *it ));
}
-
- RepoStatus newstatus = downloader_ptr->status(media);
- bool refresh = false;
- if ( oldstatus.checksum() == newstatus.checksum() )
- {
- MIL << "repo has not changed" << endl;
- if ( policy == RefreshForced )
- {
- MIL << "refresh set to forced" << endl;
- refresh = true;
- }
- }
- else
- {
- refresh = true;
- }
- if ( refresh )
- downloader_ptr->download( media, tmpdir.path());
- else
- return;
- // no error
+
+ downloader_ptr->download( media, tmpdir.path());
}
+#if 0
else if ( repokind.toEnum() == RepoType::RPMPLAINDIR_e )
{
RepoStatus newstatus = parser::plaindir::dirStatus(url.getPathName());
- bool refresh = false;
- if ( oldstatus.checksum() == newstatus.checksum() )
- {
- MIL << "repo has not changed" << endl;
- if ( policy == RefreshForced )
- {
- MIL << "refresh set to forced" << endl;
- refresh = true;
- }
- }
- else
- {
- refresh = true;
- }
- if ( refresh )
- {
- std::ofstream file(( tmpdir.path() + "/cookie").c_str());
- if (!file) {
- ZYPP_THROW (Exception( "Can't open " + tmpdir.path().asString() + "/cookie" ) );
- }
- file << url << endl;
- file << newstatus.checksum() << endl;
-
- file.close();
+ std::ofstream file(( tmpdir.path() + "/cookie").c_str());
+ if (!file) {
+ ZYPP_THROW (Exception( "Can't open " + tmpdir.path().asString() + "/cookie" ) );
}
- else
- return;
- // no error
+ file << url << endl;
+ file << newstatus.checksum() << endl;
+
+ file.close();
}
+#endif
else
{
ZYPP_THROW(RepoUnknownTypeException());
}
-
+
// ok we have the metadata, now exchange
// the contents
- TmpDir oldmetadata;
- filesystem::assert_dir(rawpath);
+ TmpDir oldmetadata( TmpDir::makeSibling( rawpath ) );
filesystem::rename( rawpath, oldmetadata.path() );
// move the just downloaded there
filesystem::rename( tmpdir.path(), rawpath );
{
ZYPP_CAUGHT(e);
ERR << "Trying another url..." << endl;
+
+ // remember the exception caught for the *first URL*
+ // if all other URLs fail, the rexception will be thrown with the
+ // cause of the problem of the first URL remembered
+ if (it == info.baseUrlsBegin())
+ rexception.remember(e);
}
} // for every url
ERR << "No more urls..." << endl;
- ZYPP_THROW(RepoException("Cant refresh metadata"));
+ ZYPP_THROW(rexception);
}
////////////////////////////////////////////////////////////////////////////
void RepoManager::cleanMetadata( const RepoInfo &info,
- const ProgressData::ReceiverFnc & progress )
+ const ProgressData::ReceiverFnc & progressfnc )
{
+ ProgressData progress(100);
+ progress.sendTo(progressfnc);
+
filesystem::recursive_rmdir(rawcache_path_for_repoinfo(_pimpl->options, info));
+ progress.toMax();
}
void RepoManager::buildCache( const RepoInfo &info,
CacheBuildPolicy policy,
const ProgressData::ReceiverFnc & progressrcv )
{
- ProgressData progress(100);
- callback::SendReport<ProgressReport> report;
- progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
- progress.name(str::form(_("Building repository '%s' cache"), info.alias().c_str()));
- progress.toMin();
-
assert_alias(info);
Pathname rawpath = rawcache_path_for_repoinfo(_pimpl->options, info);
- cache::CacheStore store(_pimpl->options.repoCachePath);
+ Pathname base = _pimpl->options.repoCachePath + info.alias();
+ Pathname solvfile = base.extend(".solv");
+
+ //cache::SolvStore store(_pimpl->options.repoCachePath);
- RepoStatus raw_metadata_status = rawMetadataStatus(info);
+ RepoStatus raw_metadata_status = metadataStatus(info);
if ( raw_metadata_status.empty() )
{
ZYPP_THROW(RepoMetadataException(info));
}
- if ( store.isCached( info.alias() ) )
+ bool needs_cleaning = false;
+ if ( isCached( info ) )
{
MIL << info.alias() << " is already cached." << endl;
- data::RecordId id = store.lookupRepository(info.alias());
- RepoStatus cache_status = store.repositoryStatus(id);
+ //data::RecordId id = store.lookupRepository(info.alias());
+ RepoStatus cache_status = cacheStatus(info);
if ( cache_status.checksum() == raw_metadata_status.checksum() )
{
MIL << info.alias() << " cache is up to date with metadata." << endl;
if ( policy == BuildIfNeeded ) {
- progress.toMax();
return;
}
else {
MIL << info.alias() << " cache rebuild is forced" << endl;
}
}
- MIL << info.alias() << " cleaning cache..." << endl;
- store.cleanRepository(id);
+
+ needs_cleaning = true;
+ }
+
+ ProgressData progress(100);
+ callback::SendReport<ProgressReport> report;
+ progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
+ progress.name(str::form(_("Building repository '%s' cache"), info.name().c_str()));
+ progress.toMin();
+
+ if (needs_cleaning)
+ {
+// Pathname name = _pimpl->options.repoCachePath;
+// //data::RecordId id = store.lookupRepository(info.alias());
+// ostringstream os;
+// os << id.get();
+// name += os.str() + ".solv";
+// unlink (name);
+// cleanCacheInternal( store, info);
+ cleanCache(info);
}
MIL << info.alias() << " building cache..." << endl;
- data::RecordId id = store.lookupOrAppendRepository(info.alias());
+ //data::RecordId id = store.lookupOrAppendRepository(info.alias());
// do we have type?
repo::RepoType repokind = info.type();
break;
}
- CombinedProgressData subprogrcv( progress, 100);
-
+ MIL << "repo type is " << repokind << endl;
+
+ switch ( repokind.toEnum() )
+ {
+ case RepoType::RPMMD_e :
+ case RepoType::YAST2_e :
+ {
+ MIL << "Executing solv converter" << endl;
+ // Take care we unlink the solvfile on exception
+ ManagedFile guard( solvfile, filesystem::unlink );
+
+ string cmd( str::form( "repo2solv.sh \"%s\" > '%s'", rawpath.c_str(), solvfile.c_str() ) );
+ ExternalProgram prog( cmd, ExternalProgram::Stderr_To_Stdout );
+ for ( string output( prog.receiveLine() ); output.length(); output = prog.receiveLine() ) {
+ MIL << " " << output;
+ }
+ int ret = prog.close();
+ if ( ret != 0 )
+ ZYPP_THROW(RepoUnknownTypeException());
+
+ // We keep it.
+ guard.resetDispose();
+ }
+ break;
+ default:
+ ZYPP_THROW(Exception("Unhandled repostory type"));
+ break;
+ }
+#if 0
switch ( repokind.toEnum() )
{
case RepoType::RPMMD_e :
+ if (0)
{
+ CombinedProgressData subprogrcv( progress, 100);
parser::yum::RepoParser parser(id, store, parser::yum::RepoParserOpts(), subprogrcv);
parser.parse(rawpath);
// no error
}
break;
case RepoType::YAST2_e :
+ if (0)
{
+ CombinedProgressData subprogrcv( progress, 100);
parser::susetags::RepoParser parser(id, store, subprogrcv);
parser.parse(rawpath);
// no error
}
break;
+#endif
+#if 0
case RepoType::RPMPLAINDIR_e :
{
+ CombinedProgressData subprogrcv( progress, 100);
InputStream is(rawpath + "cookie");
string buffer;
getline( is.stream(), buffer);
parser.parse(url.getPathName());
}
break;
+
default:
ZYPP_THROW(RepoUnknownTypeException());
}
-
+#endif
// update timestamp and checksum
- store.updateRepositoryStatus(id, raw_metadata_status);
-
+ //store.updateRepositoryStatus(id, raw_metadata_status);
+ setCacheStatus(info.alias(), raw_metadata_status);
MIL << "Commit cache.." << endl;
- store.commit();
- progress.toMax();
+ //store.commit();
+ //progress.toMax();
}
////////////////////////////////////////////////////////////////////////////
- repo::RepoType RepoManager::probe( const Url &url )
+ repo::RepoType RepoManager::probe( const Url &url ) const
{
if ( url.getScheme() == "dir" && ! PathInfo( url.getPathName() ).isDir() )
{
return repo::RepoType::NONE;
}
- MediaSetAccess access(url);
- if ( access.doesFileExist("/repodata/repomd.xml") )
- return repo::RepoType::RPMMD;
- if ( access.doesFileExist("/content") )
- return repo::RepoType::YAST2;
-
- // if it is a local url of type dir
- if ( (! media::MediaManager::downloads(url)) && ( url.getScheme() == "dir" ) )
+ try
{
- Pathname path = Pathname(url.getPathName());
- if ( PathInfo(path).isDir() )
+ MediaSetAccess access(url);
+ if ( access.doesFileExist("/repodata/repomd.xml") )
+ return repo::RepoType::RPMMD;
+ if ( access.doesFileExist("/content") )
+ return repo::RepoType::YAST2;
+
+ // if it is a local url of type dir
+ if ( (! media::MediaManager::downloads(url)) && ( url.getScheme() == "dir" ) )
{
- // allow empty dirs for now
- return repo::RepoType::RPMPLAINDIR;
+ Pathname path = Pathname(url.getPathName());
+ if ( PathInfo(path).isDir() )
+ {
+ // allow empty dirs for now
+ return repo::RepoType::RPMPLAINDIR;
+ }
}
}
+ catch ( const media::MediaException &e )
+ {
+ ZYPP_CAUGHT(e);
+ RepoException enew("Error trying to read from " + url.asString());
+ enew.remember(e);
+ ZYPP_THROW(enew);
+ }
+ catch ( const Exception &e )
+ {
+ ZYPP_CAUGHT(e);
+ Exception enew("Unknown error reading from " + url.asString());
+ enew.remember(e);
+ ZYPP_THROW(enew);
+ }
return repo::RepoType::NONE;
}
void RepoManager::cleanCache( const RepoInfo &info,
const ProgressData::ReceiverFnc & progressrcv )
{
- ProgressData progress(100);
- callback::SendReport<ProgressReport> report;
- progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
- progress.name(str::form(_("Cleaning repository '%s' cache"), info.alias().c_str()));
- progress.toMin();
-
- cache::CacheStore store(_pimpl->options.repoCachePath);
-
- data::RecordId id = store.lookupRepository(info.alias());
- store.cleanRepository(id);
- store.commit();
+ Pathname name = _pimpl->options.repoCachePath;
+ name += info.alias() + ".solv";
+ unlink (name);
}
////////////////////////////////////////////////////////////////////////////
bool RepoManager::isCached( const RepoInfo &info ) const
{
- cache::CacheStore store(_pimpl->options.repoCachePath);
- return store.isCached(info.alias());
+ Pathname name = _pimpl->options.repoCachePath;
+ return PathInfo(name + Pathname(info.alias()).extend(".solv")).isExist();
}
- Repository RepoManager::createFromCache( const RepoInfo &info,
- const ProgressData::ReceiverFnc & progressrcv )
+ RepoStatus RepoManager::cacheStatus( const RepoInfo &info ) const
{
- callback::SendReport<ProgressReport> report;
- ProgressData progress;
- progress.sendTo(ProgressReportAdaptor( progressrcv, report ));
- progress.sendTo( progressrcv );
- progress.name(str::form(_("Reading repository '%s' cache"), info.alias().c_str()));
- progress.toMin();
-
- cache::CacheStore store(_pimpl->options.repoCachePath);
- if ( ! store.isCached( info.alias() ) )
- ZYPP_THROW(RepoNotCachedException());
+ Pathname base = _pimpl->options.repoCachePath + info.alias();
+ Pathname cookiefile = base.extend(".cookie");
+
+ return RepoStatus::fromCookieFile(cookiefile);
+ }
+
+ void RepoManager::setCacheStatus( const string &alias, const RepoStatus &status )
+ {
+ Pathname base = _pimpl->options.repoCachePath + alias;
+ Pathname cookiefile = base.extend(".cookie");
+
+ status.saveToCookieFile(cookiefile);
+ }
+
+ map<data::RecordId, Repo *> repo2solv;
- MIL << "Repository " << info.alias() << " is cached" << endl;
+ void RepoManager::loadFromCache( const std::string &alias,
+ const ProgressData::ReceiverFnc & progressrcv )
+ {
+ sat::Pool satpool( sat::Pool::instance() );
- data::RecordId id = store.lookupRepository(info.alias());
+ Pathname solvfile = (_pimpl->options.repoCachePath + alias).extend(".solv");
- repo::cached::RepoOptions opts( info, _pimpl->options.repoCachePath, id );
- opts.readingResolvablesProgress = progressrcv;
- repo::cached::RepoImpl::Ptr repoimpl =
- new repo::cached::RepoImpl( opts );
+ if ( ! PathInfo(solvfile).isExist() )
+ ZYPP_THROW(RepoNotCachedException());
- repoimpl->resolvables();
- // read the resolvables from cache
- return Repository(repoimpl);
+ sat::Repo repo = satpool.addRepoSolv(solvfile, alias );
}
-
+
+
////////////////////////////////////////////////////////////////////////////
/**
ProgressData progress(100);
callback::SendReport<ProgressReport> report;
progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
- progress.name(str::form(_("Adding repository '%s'"), info.alias().c_str()));
+ progress.name(str::form(_("Adding repository '%s'"), info.name().c_str()));
progress.toMin();
-
+
std::list<RepoInfo> repos = knownRepositories();
for ( std::list<RepoInfo>::const_iterator it = repos.begin();
it != repos.end();
ZYPP_THROW(RepoAlreadyExistsException(info.alias()));
}
+ RepoInfo tosave = info;
+
+ // check the first url for now
+ if ( ZConfig::instance().repo_add_probe()
+ || ( tosave.type() == RepoType::NONE && tosave.enabled()) )
+ {
+ DBG << "unknown repository type, probing" << endl;
+
+ RepoType probedtype;
+ probedtype = probe(*tosave.baseUrlsBegin());
+ if ( tosave.baseUrlsSize() > 0 )
+ {
+ if ( probedtype == RepoType::NONE )
+ ZYPP_THROW(RepoUnknownTypeException());
+ else
+ tosave.setType(probedtype);
+ }
+ }
+
progress.set(50);
-
+
// assert the directory exists
filesystem::assert_dir(_pimpl->options.knownReposPath);
-
+
Pathname repofile = generate_non_existing_name(_pimpl->options.knownReposPath,
- generate_filename(info));
+ generate_filename(tosave));
// now we have a filename that does not exists
MIL << "Saving repo in " << repofile << endl;
ZYPP_THROW (Exception( "Can't open " + repofile.asString() ) );
}
- info.dumpRepoOn(file);
+ tosave.dumpRepoOn(file);
progress.toMax();
MIL << "done" << endl;
}
// assert the directory exists
filesystem::assert_dir(_pimpl->options.knownReposPath);
-
+
Pathname repofile = generate_non_existing_name(_pimpl->options.knownReposPath, filename);
// now we have a filename that does not exists
MIL << "Saving " << repos.size() << " repo" << ( repos.size() ? "s" : "" ) << " in " << repofile << endl;
void RepoManager::removeRepository( const RepoInfo & info,
const ProgressData::ReceiverFnc & progressrcv)
{
+ ProgressData progress;
+ callback::SendReport<ProgressReport> report;
+ progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
+ progress.name(str::form(_("Removing repository '%s'"), info.name().c_str()));
+
MIL << "Going to delete repo " << info.alias() << endl;
std::list<RepoInfo> repos = knownRepositories();
{
ZYPP_THROW(RepoException("Can't delete " + todelete.filepath().asString()));
}
- MIL << todelete.alias() << " sucessfully deleted." << endl;
- return;
+ MIL << todelete.alias() << " sucessfully deleted." << endl;
}
else
{
// write them back except the deleted one.
//TmpFile tmp;
//std::ofstream file(tmp.path().c_str());
-
+
// assert the directory exists
filesystem::assert_dir(todelete.filepath().dirname());
-
+
std::ofstream file(todelete.filepath().c_str());
if (!file) {
//ZYPP_THROW (Exception( "Can't open " + tmp.path().asString() ) );
if ( (*fit).alias() != todelete.alias() )
(*fit).dumpRepoOn(file);
}
-
- cache::CacheStore store(_pimpl->options.repoCachePath);
-
- if ( store.isCached( todelete.alias() ) ) {
- MIL << "repository was cached. cleaning cache" << endl;
- store.cleanRepository(todelete.alias());
- }
-
- MIL << todelete.alias() << " sucessfully deleted." << endl;
- return;
}
+
+ CombinedProgressData subprogrcv(progress, 70);
+ CombinedProgressData cleansubprogrcv(progress, 30);
+ // now delete it from cache
+ if ( isCached(todelete) )
+ cleanCache( todelete, subprogrcv);
+ // now delete metadata (#301037)
+ cleanMetadata( todelete, cleansubprogrcv);
+ MIL << todelete.alias() << " sucessfully deleted." << endl;
+ return;
} // else filepath is empty
}
// write them back except the deleted one.
//TmpFile tmp;
//std::ofstream file(tmp.path().c_str());
-
+
// assert the directory exists
filesystem::assert_dir(toedit.filepath().dirname());
-
+
std::ofstream file(toedit.filepath().c_str());
if (!file) {
//ZYPP_THROW (Exception( "Can't open " + tmp.path().asString() ) );