*
*/
+#include <cstdlib>
#include <iostream>
#include <fstream>
+#include <sstream>
#include <list>
#include <algorithm>
#include "zypp/base/InputStream.h"
#include "zypp/base/Logger.h"
#include "zypp/base/Gettext.h"
#include "zypp/base/Function.h"
+#include "zypp/base/Regex.h"
#include "zypp/PathInfo.h"
#include "zypp/TmpPath.h"
#include "zypp/repo/RepoException.h"
#include "zypp/RepoManager.h"
-#include "zypp/cache/CacheStore.h"
+#include "zypp/cache/SolvStore.h"
#include "zypp/repo/cached/RepoImpl.h"
#include "zypp/media/MediaManager.h"
#include "zypp/MediaSetAccess.h"
+#include "zypp/ExternalProgram.h"
+#include "zypp/ManagedFile.h"
#include "zypp/parser/RepoFileReader.h"
#include "zypp/repo/yum/Downloader.h"
#include "zypp/parser/yum/RepoParser.h"
-#include "zypp/parser/plaindir/RepoParser.h"
+//#include "zypp/parser/plaindir/RepoParser.h"
#include "zypp/repo/susetags/Downloader.h"
#include "zypp/parser/susetags/RepoParser.h"
#include "zypp/ZYppCallbacks.h"
+#include "sat/Pool.h"
+#include "satsolver/pool.h"
+#include "satsolver/repo.h"
+#include "satsolver/repo_solv.h"
+
using namespace std;
using namespace zypp;
using namespace zypp::repo;
/**
* \short Internal version of clean cache
*
- * Takes an extra CacheStore reference, so we avoid internally
- * having 2 CacheStores writing to the same database.
+ * Takes an extra SolvStore reference, so we avoid internally
+ * having 2 SolvStores writing to the same database.
*/
- static void cleanCacheInternal( cache::CacheStore &store,
+ static void cleanCacheInternal( cache::SolvStore &store,
const RepoInfo &info,
const ProgressData::ReceiverFnc & progressrcv = ProgressData::ReceiverFnc() )
{
- ProgressData progress;
- callback::SendReport<ProgressReport> report;
- progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
- progress.name(str::form(_("Cleaning repository '%s' cache"), info.name().c_str()));
-
- if ( !store.isCached(info.alias()) )
- return;
-
- MIL << info.alias() << " cleaning cache..." << endl;
- data::RecordId id = store.lookupRepository(info.alias());
-
- CombinedProgressData subprogrcv(progress);
-
- store.cleanRepository(id, subprogrcv);
+// ProgressData progress;
+// callback::SendReport<ProgressReport> report;
+// progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
+// progress.name(str::form(_("Cleaning repository '%s' cache"), info.name().c_str()));
+//
+// if ( !store.isCached(info.alias()) )
+// return;
+//
+// MIL << info.alias() << " cleaning cache..." << endl;
+//
+// CombinedProgressData subprogrcv(progress);
+//
+// store.cleanRepository(info.alias(), subprogrcv);
}
-
+
////////////////////////////////////////////////////////////////////////////
-
+
/**
* Reads RepoInfo's from a repo file.
*
/**
* \short List of RepoInfo's from a directory
*
- * Goes trough every file in a directory and adds all
+ * Goes trough every file ending with ".repo" in a directory and adds all
* RepoInfo's contained in that file.
*
* \param dir pathname of the directory to read.
if ( filesystem::readdir( entries, Pathname(dir), false ) != 0 )
ZYPP_THROW(Exception("failed to read directory"));
+ str::regex allowedRepoExt("^\\.repo(_[0-9]+)?$");
for ( list<Pathname>::const_iterator it = entries.begin(); it != entries.end(); ++it )
{
- list<RepoInfo> tmp = repositories_in_file( *it );
- repos.insert( repos.end(), tmp.begin(), tmp.end() );
+ if (str::regex_match(it->extension(), allowedRepoExt))
+ {
+ list<RepoInfo> tmp = repositories_in_file( *it );
+ repos.insert( repos.end(), tmp.begin(), tmp.end() );
- //std::copy( collector.repos.begin(), collector.repos.end(), std::back_inserter(repos));
- //MIL << "ok" << endl;
+ //std::copy( collector.repos.begin(), collector.repos.end(), std::back_inserter(repos));
+ //MIL << "ok" << endl;
+ }
}
return repos;
}
static shared_ptr<Impl> _nullimpl( new Impl );
return _nullimpl;
}
-
+
private:
friend Impl * rwcowClone<Impl>( const Impl * rhs );
/** clone for RWCOW_pointer */
Impl * clone() const
{ return new Impl( *this ); }
};
+
///////////////////////////////////////////////////////////////////
/** \relates RepoManager::Impl Stream output */
}
////////////////////////////////////////////////////////////////////////////
-
+
RepoStatus RepoManager::metadataStatus( const RepoInfo &info ) const
{
Pathname rawpath = rawcache_path_for_repoinfo( _pimpl->options, info );
return refresh;
}
+#if 0
else if ( repokind.toEnum() == RepoType::RPMPLAINDIR_e )
{
RepoStatus newstatus = parser::plaindir::dirStatus(url.getPathName());
return refresh;
}
+#endif
else
{
ZYPP_THROW(RepoUnknownTypeException());
ERR << "refresh check failed for " << url << endl;
ZYPP_RETHROW(e);
}
-
+
return true; // default
}
downloader_ptr->download( media, tmpdir.path());
}
+#if 0
else if ( repokind.toEnum() == RepoType::RPMPLAINDIR_e )
{
RepoStatus newstatus = parser::plaindir::dirStatus(url.getPathName());
file.close();
}
+#endif
else
{
ZYPP_THROW(RepoUnknownTypeException());
{
ZYPP_CAUGHT(e);
ERR << "Trying another url..." << endl;
-
+
// remember the exception caught for the *first URL*
// if all other URLs fail, the rexception will be thrown with the
// cause of the problem of the first URL remembered
assert_alias(info);
Pathname rawpath = rawcache_path_for_repoinfo(_pimpl->options, info);
- cache::CacheStore store(_pimpl->options.repoCachePath);
+ Pathname base = _pimpl->options.repoCachePath + info.alias();
+ Pathname solvfile = base.extend(".solv");
+
+ //cache::SolvStore store(_pimpl->options.repoCachePath);
RepoStatus raw_metadata_status = metadataStatus(info);
if ( raw_metadata_status.empty() )
}
bool needs_cleaning = false;
- if ( store.isCached( info.alias() ) )
+ if ( isCached( info ) )
{
MIL << info.alias() << " is already cached." << endl;
- data::RecordId id = store.lookupRepository(info.alias());
- RepoStatus cache_status = store.repositoryStatus(id);
+ //data::RecordId id = store.lookupRepository(info.alias());
+ RepoStatus cache_status = cacheStatus(info);
if ( cache_status.checksum() == raw_metadata_status.checksum() )
{
MIL << info.alias() << " cache rebuild is forced" << endl;
}
}
-
+
needs_cleaning = true;
}
progress.toMin();
if (needs_cleaning)
- cleanCacheInternal( store, info);
+ {
+// Pathname name = _pimpl->options.repoCachePath;
+// //data::RecordId id = store.lookupRepository(info.alias());
+// ostringstream os;
+// os << id.get();
+// name += os.str() + ".solv";
+// unlink (name);
+// cleanCacheInternal( store, info);
+ cleanCache(info);
+ }
MIL << info.alias() << " building cache..." << endl;
- data::RecordId id = store.lookupOrAppendRepository(info.alias());
+ //data::RecordId id = store.lookupOrAppendRepository(info.alias());
// do we have type?
repo::RepoType repokind = info.type();
break;
}
-
+ MIL << "repo type is " << repokind << endl;
+
+ switch ( repokind.toEnum() )
+ {
+ case RepoType::RPMMD_e :
+ case RepoType::YAST2_e :
+ {
+ MIL << "Executing solv converter" << endl;
+ // Take care we unlink the solvfile on exception
+ ManagedFile guard( solvfile, filesystem::unlink );
+
+ string cmd( str::form( "repo2solv.sh \"%s\" > '%s'", rawpath.c_str(), solvfile.c_str() ) );
+ ExternalProgram prog( cmd, ExternalProgram::Stderr_To_Stdout );
+ for ( string output( prog.receiveLine() ); output.length(); output = prog.receiveLine() ) {
+ MIL << " " << output;
+ }
+ int ret = prog.close();
+ if ( ret != 0 )
+ ZYPP_THROW(RepoUnknownTypeException());
+
+ // We keep it.
+ guard.resetDispose();
+ }
+ break;
+ default:
+ ZYPP_THROW(Exception("Unhandled repostory type"));
+ break;
+ }
+#if 0
switch ( repokind.toEnum() )
{
case RepoType::RPMMD_e :
+ if (0)
{
CombinedProgressData subprogrcv( progress, 100);
parser::yum::RepoParser parser(id, store, parser::yum::RepoParserOpts(), subprogrcv);
}
break;
case RepoType::YAST2_e :
+ if (0)
{
CombinedProgressData subprogrcv( progress, 100);
parser::susetags::RepoParser parser(id, store, subprogrcv);
// no error
}
break;
+#endif
+#if 0
case RepoType::RPMPLAINDIR_e :
{
CombinedProgressData subprogrcv( progress, 100);
parser.parse(url.getPathName());
}
break;
+
default:
ZYPP_THROW(RepoUnknownTypeException());
}
-
+#endif
// update timestamp and checksum
- store.updateRepositoryStatus(id, raw_metadata_status);
-
+ //store.updateRepositoryStatus(id, raw_metadata_status);
+ setCacheStatus(info.alias(), raw_metadata_status);
MIL << "Commit cache.." << endl;
- store.commit();
+ //store.commit();
//progress.toMax();
}
return repo::RepoType::RPMMD;
if ( access.doesFileExist("/content") )
return repo::RepoType::YAST2;
-
+
// if it is a local url of type dir
if ( (! media::MediaManager::downloads(url)) && ( url.getScheme() == "dir" ) )
{
return repo::RepoType::NONE;
}
-
+
////////////////////////////////////////////////////////////////////////////
-
+
void RepoManager::cleanCache( const RepoInfo &info,
const ProgressData::ReceiverFnc & progressrcv )
{
- cache::CacheStore store(_pimpl->options.repoCachePath);
- cleanCacheInternal( store, info, progressrcv );
- store.commit();
+ Pathname name = _pimpl->options.repoCachePath;
+ name += info.alias() + ".solv";
+ unlink (name);
}
////////////////////////////////////////////////////////////////////////////
bool RepoManager::isCached( const RepoInfo &info ) const
{
- cache::CacheStore store(_pimpl->options.repoCachePath);
- return store.isCached(info.alias());
+ Pathname name = _pimpl->options.repoCachePath;
+ return PathInfo(name + Pathname(info.alias()).extend(".solv")).isExist();
}
RepoStatus RepoManager::cacheStatus( const RepoInfo &info ) const
{
- cache::CacheStore store(_pimpl->options.repoCachePath);
- data::RecordId id = store.lookupRepository(info.alias());
- RepoStatus cache_status = store.repositoryStatus(id);
- return cache_status;
+
+ Pathname base = _pimpl->options.repoCachePath + info.alias();
+ Pathname cookiefile = base.extend(".cookie");
+
+ return RepoStatus::fromCookieFile(cookiefile);
}
- Repository RepoManager::createFromCache( const RepoInfo &info,
- const ProgressData::ReceiverFnc & progressrcv )
+ void RepoManager::setCacheStatus( const string &alias, const RepoStatus &status )
{
- callback::SendReport<ProgressReport> report;
- ProgressData progress;
- progress.sendTo(ProgressReportAdaptor( progressrcv, report ));
- //progress.sendTo( progressrcv );
- progress.name(str::form(_("Reading repository '%s' cache"), info.name().c_str()));
-
- cache::CacheStore store(_pimpl->options.repoCachePath);
+ Pathname base = _pimpl->options.repoCachePath + alias;
+ Pathname cookiefile = base.extend(".cookie");
- if ( ! store.isCached( info.alias() ) )
- ZYPP_THROW(RepoNotCachedException());
+ status.saveToCookieFile(cookiefile);
+ }
- MIL << "Repository " << info.alias() << " is cached" << endl;
+ map<data::RecordId, Repo *> repo2solv;
- data::RecordId id = store.lookupRepository(info.alias());
+ void RepoManager::loadFromCache( const std::string &alias,
+ const ProgressData::ReceiverFnc & progressrcv )
+ {
+ sat::Pool satpool( sat::Pool::instance() );
+
+ Pathname solvfile = (_pimpl->options.repoCachePath + alias).extend(".solv");
- CombinedProgressData subprogrcv(progress);
+ if ( ! PathInfo(solvfile).isExist() )
+ ZYPP_THROW(RepoNotCachedException());
- repo::cached::RepoOptions opts( info, _pimpl->options.repoCachePath, id );
- opts.readingResolvablesProgress = subprogrcv;
- repo::cached::RepoImpl::Ptr repoimpl =
- new repo::cached::RepoImpl( opts );
-
- repoimpl->resolvables();
- // read the resolvables from cache
- return Repository(repoimpl);
+ sat::Repo repo = satpool.addRepoSolv(solvfile, alias );
}
-
+
+
////////////////////////////////////////////////////////////////////////////
/**
callback::SendReport<ProgressReport> report;
progress.sendTo( ProgressReportAdaptor( progressrcv, report ) );
progress.name(str::form(_("Removing repository '%s'"), info.name().c_str()));
-
+
MIL << "Going to delete repo " << info.alias() << endl;
std::list<RepoInfo> repos = knownRepositories();
CombinedProgressData subprogrcv(progress, 70);
CombinedProgressData cleansubprogrcv(progress, 30);
// now delete it from cache
- cleanCache( todelete, subprogrcv);
+ if ( isCached(todelete) )
+ cleanCache( todelete, subprogrcv);
// now delete metadata (#301037)
cleanMetadata( todelete, cleansubprogrcv);
MIL << todelete.alias() << " sucessfully deleted." << endl;