From: Tomas Mlcoch Date: Tue, 25 Feb 2014 17:05:20 +0000 (+0100) Subject: deltarepo: Removed from createrepo_c X-Git-Tag: upstream/0.10.0~264 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=b3592f57075924d25ae30ce614ee962f01b924ee;p=services%2Fcreaterepo_c.git deltarepo: Removed from createrepo_c DeltaRepo is now standalone project. DeltaRepo's homepage: https://github.com/Tojaj/DeltaRepo --- diff --git a/deltarepo/CMakeLists.txt b/deltarepo/CMakeLists.txt deleted file mode 100644 index f86a74f..0000000 --- a/deltarepo/CMakeLists.txt +++ /dev/null @@ -1,18 +0,0 @@ -EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) - -IF (${CMAKE_VERSION} VERSION_LESS "2.8.0") - CONFIGURE_FILE( "deltarepo.py" COPYONLY) - CONFIGURE_FILE( "managedeltarepos.py" COPYONLY) - CONFIGURE_FILE( "repoupdater.py" COPYONLY) -ELSE() - FILE(COPY deltarepo.py DESTINATION ./) - FILE(COPY managedeltarepos.py DESTINATION ./) - FILE(COPY repoupdater.py DESTINATION ./) -ENDIF() - -#INSTALL(PROGRAMS deltarepo.py DESTINATION bin/ RENAME deltarepo) -#INSTALL(PROGRAMS managedeltarepos.py DESTINATION bin/ RENAME managedeltarepos) -#INSTALL(PROGRAMS repoupdater.py DESTINATION bin/ RENAME repoupdater) -#INSTALL(DIRECTORY deltarepo/ -# DESTINATION ${PYTHON_INSTALL_DIR}/deltarepo -# FILES_MATCHING PATTERN "*.py") diff --git a/deltarepo/README.md b/deltarepo/README.md deleted file mode 100644 index b62f639..0000000 --- a/deltarepo/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Examples of usage - - ./deltarepo.py repo1 repo2 -v - - ./deltarepo.py --apply repo1 delta/ diff --git a/deltarepo/acceptance_tests/TODO.md b/deltarepo/acceptance_tests/TODO.md deleted file mode 100644 index 02a6b47..0000000 --- a/deltarepo/acceptance_tests/TODO.md +++ /dev/null @@ -1,5 +0,0 @@ -TODO -==== - -* Replace the real pacakges (iok, ...), with the fake (and smaller) ones - diff --git a/deltarepo/acceptance_tests/repos/note b/deltarepo/acceptance_tests/repos/note deleted file mode 100644 index 61d2f48..0000000 --- a/deltarepo/acceptance_tests/repos/note +++ /dev/null @@ -1,17 +0,0 @@ -regenrepos.sh -============= - -Script for regeneration of all testing repositories. - -Options -------- -This script could be configured by a few environment variables. - -* CREATEREPO - Command for createrepo (createrepo_c is default) -* MODIFYREPO - Command for modifyrepo (modifyrepo_c is default) -* EXTRAARGS - Extra arguments for the createrepo command (empty by default) - -Example of usage: ------------------ - -$ CREATEREPO=../../../../createrepo_c MODIFYREPO="../../../../modifyrepo_c" ./regenrepos.sh diff --git a/deltarepo/acceptance_tests/repos/packages/Archer-3.4.5-6.x86_64.rpm b/deltarepo/acceptance_tests/repos/packages/Archer-3.4.5-6.x86_64.rpm deleted file mode 100644 index 6067c88..0000000 Binary files a/deltarepo/acceptance_tests/repos/packages/Archer-3.4.5-6.x86_64.rpm and /dev/null differ diff --git a/deltarepo/acceptance_tests/repos/packages/balicek-utf8-1.1.1-1.x86_64.rpm b/deltarepo/acceptance_tests/repos/packages/balicek-utf8-1.1.1-1.x86_64.rpm deleted file mode 100644 index 68c4d0e..0000000 Binary files a/deltarepo/acceptance_tests/repos/packages/balicek-utf8-1.1.1-1.x86_64.rpm and /dev/null differ diff --git a/deltarepo/acceptance_tests/repos/packages/empty-0-0.x86_64.rpm b/deltarepo/acceptance_tests/repos/packages/empty-0-0.x86_64.rpm deleted file mode 100644 index cb324ae..0000000 Binary files a/deltarepo/acceptance_tests/repos/packages/empty-0-0.x86_64.rpm and /dev/null differ diff --git a/deltarepo/acceptance_tests/repos/packages/fake_bash-1.1.1-1.x86_64.rpm b/deltarepo/acceptance_tests/repos/packages/fake_bash-1.1.1-1.x86_64.rpm deleted file mode 100644 index ddab045..0000000 Binary files a/deltarepo/acceptance_tests/repos/packages/fake_bash-1.1.1-1.x86_64.rpm and /dev/null differ diff --git a/deltarepo/acceptance_tests/repos/packages/super_kernel-6.0.1-2.x86_64.rpm b/deltarepo/acceptance_tests/repos/packages/super_kernel-6.0.1-2.x86_64.rpm deleted file mode 100644 index b606e16..0000000 Binary files a/deltarepo/acceptance_tests/repos/packages/super_kernel-6.0.1-2.x86_64.rpm and /dev/null differ diff --git a/deltarepo/acceptance_tests/repos/regenrepos.sh b/deltarepo/acceptance_tests/repos/regenrepos.sh deleted file mode 100755 index 1511a0d..0000000 --- a/deltarepo/acceptance_tests/repos/regenrepos.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` -MY_DIR="$MY_DIR/" - -# Note: -# If you wanna use different createrepo tool, set the CREATEREPO -# environ variable (same for MERGEREPO) -# E.g.: -# $ CREATEREPO="createrepo" ./regenrepos.sh - - -for dir in $MY_DIR/repo*/ -do - echo "### Regeneration of $dir" - $dir/gen.sh - echo -done diff --git a/deltarepo/acceptance_tests/repos/repo1/foobar-1 b/deltarepo/acceptance_tests/repos/repo1/foobar-1 deleted file mode 100644 index 76fc659..0000000 --- a/deltarepo/acceptance_tests/repos/repo1/foobar-1 +++ /dev/null @@ -1 +0,0 @@ -a content \ No newline at end of file diff --git a/deltarepo/acceptance_tests/repos/repo1/gen.sh b/deltarepo/acceptance_tests/repos/repo1/gen.sh deleted file mode 100755 index e5dbbab..0000000 --- a/deltarepo/acceptance_tests/repos/repo1/gen.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --groupfile group.xml --revision "1st repo" --content "A content tag" . -$MODIFYREPO --mdtype="foobar" foobar-1 repodata/ -popd diff --git a/deltarepo/acceptance_tests/repos/repo1/group.xml b/deltarepo/acceptance_tests/repos/repo1/group.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/acceptance_tests/repos/repo1/group.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/acceptance_tests/repos/repo1/pkglist b/deltarepo/acceptance_tests/repos/repo1/pkglist deleted file mode 100644 index 8f68735..0000000 --- a/deltarepo/acceptance_tests/repos/repo1/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/fake_bash-1.1.1-1.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo1_only_pri/gen.sh b/deltarepo/acceptance_tests/repos/repo1_only_pri/gen.sh deleted file mode 100755 index 04d0807..0000000 --- a/deltarepo/acceptance_tests/repos/repo1_only_pri/gen.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --groupfile group.xml --revision "1st repo" --content "A content tag" . -rm repodata/*filelists.sqlite* -rm repodata/*other.sqlite* -rm repodata/*filelists.xml* -rm repodata/*other.xml* -rm repodata/*group.xml* -rm repodata/*primary.sqlite* -popd diff --git a/deltarepo/acceptance_tests/repos/repo1_only_pri/group.xml b/deltarepo/acceptance_tests/repos/repo1_only_pri/group.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/acceptance_tests/repos/repo1_only_pri/group.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/acceptance_tests/repos/repo1_only_pri/pkglist b/deltarepo/acceptance_tests/repos/repo1_only_pri/pkglist deleted file mode 100644 index 8f68735..0000000 --- a/deltarepo/acceptance_tests/repos/repo1_only_pri/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/fake_bash-1.1.1-1.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/gen.sh b/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/gen.sh deleted file mode 100755 index f8ad77f..0000000 --- a/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/gen.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --groupfile group.xml --revision "1st repo" --content "A content tag" . -rm repodata/*other.sqlite* -rm repodata/*filelists.sqlite* -rm repodata/*other.xml* -rm repodata/*group.xml* -rm repodata/*primary.sqlite* -popd diff --git a/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/group.xml b/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/group.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/group.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/pkglist b/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/pkglist deleted file mode 100644 index 8f68735..0000000 --- a/deltarepo/acceptance_tests/repos/repo1_only_pri_fil/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/fake_bash-1.1.1-1.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo2/gen.sh b/deltarepo/acceptance_tests/repos/repo2/gen.sh deleted file mode 100755 index 941805f..0000000 --- a/deltarepo/acceptance_tests/repos/repo2/gen.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --revision foorevisionbar --distro cpe:/o:fedoraproject:fedora:17,foo --repo abc --content plm . -popd diff --git a/deltarepo/acceptance_tests/repos/repo2/pkglist b/deltarepo/acceptance_tests/repos/repo2/pkglist deleted file mode 100644 index 7a9c7d1..0000000 --- a/deltarepo/acceptance_tests/repos/repo2/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo2_incomplete/gen.sh b/deltarepo/acceptance_tests/repos/repo2_incomplete/gen.sh deleted file mode 100755 index 3749c07..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_incomplete/gen.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -if [[ -z "$MODIFYREPO" ]] -then - MODIFYREPO="modifyrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --revision foorevisionbar --distro cpe:/o:fedoraproject:fedora:17,foo --repo abc --content plm . -$MODIFYREPO --remove primary_db repodata/ -$MODIFYREPO --remove filelists_db repodata/ -$MODIFYREPO --remove other repodata/ -$MODIFYREPO --remove other_db repodata/ -popd diff --git a/deltarepo/acceptance_tests/repos/repo2_incomplete/pkglist b/deltarepo/acceptance_tests/repos/repo2_incomplete/pkglist deleted file mode 100644 index 7a9c7d1..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_incomplete/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo2_incomplete_2/gen.sh b/deltarepo/acceptance_tests/repos/repo2_incomplete_2/gen.sh deleted file mode 100755 index 7283f92..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_incomplete_2/gen.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -if [[ -z "$MODIFYREPO" ]] -then - MODIFYREPO="modifyrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --revision foorevisionbar --distro cpe:/o:fedoraproject:fedora:17,foo --repo abc --content plm . -$MODIFYREPO --remove primary_db repodata/ -$MODIFYREPO --remove filelists repodata/ -$MODIFYREPO --remove filelists_db repodata/ -$MODIFYREPO --remove other repodata/ -$MODIFYREPO --remove other_db repodata/ -popd diff --git a/deltarepo/acceptance_tests/repos/repo2_incomplete_2/pkglist b/deltarepo/acceptance_tests/repos/repo2_incomplete_2/pkglist deleted file mode 100644 index 7a9c7d1..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_incomplete_2/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo2_nodatabase/gen.sh b/deltarepo/acceptance_tests/repos/repo2_nodatabase/gen.sh deleted file mode 100755 index 4dcb48f..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_nodatabase/gen.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --no-database --revision foorevisionbar --distro cpe:/o:fedoraproject:fedora:17,foo --repo abc --content plm . -popd diff --git a/deltarepo/acceptance_tests/repos/repo2_nodatabase/pkglist b/deltarepo/acceptance_tests/repos/repo2_nodatabase/pkglist deleted file mode 100644 index 7a9c7d1..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_nodatabase/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo2_only_pri/gen.sh b/deltarepo/acceptance_tests/repos/repo2_only_pri/gen.sh deleted file mode 100755 index 04e4b0a..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_only_pri/gen.sh +++ /dev/null @@ -1,17 +0,0 @@ -!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --revision foorevisionbar --distro cpe:/o:fedoraproject:fedora:17,foo --repo abc --content plm . -rm repodata/*filelists.sqlite* -rm repodata/*other.sqlite* -rm repodata/*filelists.xml* -rm repodata/*other.xml* -rm repodata/*primary.sqlite* -popd diff --git a/deltarepo/acceptance_tests/repos/repo2_only_pri/pkglist b/deltarepo/acceptance_tests/repos/repo2_only_pri/pkglist deleted file mode 100644 index 7a9c7d1..0000000 --- a/deltarepo/acceptance_tests/repos/repo2_only_pri/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo3/comps.xml b/deltarepo/acceptance_tests/repos/repo3/comps.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/acceptance_tests/repos/repo3/comps.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/acceptance_tests/repos/repo3/foobar b/deltarepo/acceptance_tests/repos/repo3/foobar deleted file mode 100644 index 76fc659..0000000 --- a/deltarepo/acceptance_tests/repos/repo3/foobar +++ /dev/null @@ -1 +0,0 @@ -a content \ No newline at end of file diff --git a/deltarepo/acceptance_tests/repos/repo3/gen.sh b/deltarepo/acceptance_tests/repos/repo3/gen.sh deleted file mode 100755 index 7cf5482..0000000 --- a/deltarepo/acceptance_tests/repos/repo3/gen.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -if [[ -z "$MODIFYREPO" ]] -then - MODIFYREPO="modifyrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --groupfile comps.xml --revision "3th repo" --content "Content tag 123456" . -$MODIFYREPO foobar repodata/ -popd diff --git a/deltarepo/acceptance_tests/repos/repo3/pkglist b/deltarepo/acceptance_tests/repos/repo3/pkglist deleted file mode 100644 index b0436ea..0000000 --- a/deltarepo/acceptance_tests/repos/repo3/pkglist +++ /dev/null @@ -1,5 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/balicek-utf8-1.1.1-1.x86_64.rpm -../packages/empty-0-0.x86_64.rpm -../packages/fake_bash-1.1.1-1.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/acceptance_tests/repos/repo3_md5/comps.xml b/deltarepo/acceptance_tests/repos/repo3_md5/comps.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/acceptance_tests/repos/repo3_md5/comps.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/acceptance_tests/repos/repo3_md5/gen.sh b/deltarepo/acceptance_tests/repos/repo3_md5/gen.sh deleted file mode 100755 index acfd237..0000000 --- a/deltarepo/acceptance_tests/repos/repo3_md5/gen.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -if [[ -z "$MODIFYREPO" ]] -then - MODIFYREPO="modifyrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --checksum "md5" --groupfile comps.xml --revision "3th repo - md5" --content "111" --content "222" --repo "aaa" --repo "bbb" --repo "ccc" --distro="one,foo" --distro="two:bar" . -popd diff --git a/deltarepo/acceptance_tests/repos/repo3_md5/pkglist b/deltarepo/acceptance_tests/repos/repo3_md5/pkglist deleted file mode 100644 index b0436ea..0000000 --- a/deltarepo/acceptance_tests/repos/repo3_md5/pkglist +++ /dev/null @@ -1,5 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/balicek-utf8-1.1.1-1.x86_64.rpm -../packages/empty-0-0.x86_64.rpm -../packages/fake_bash-1.1.1-1.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/acceptance_tests/test.sh b/deltarepo/acceptance_tests/test.sh deleted file mode 100755 index daa0611..0000000 --- a/deltarepo/acceptance_tests/test.sh +++ /dev/null @@ -1,542 +0,0 @@ -#!/bin/bash - -DELTAREPO="../deltarepo.py" - -MY_DIR=`dirname $0` - -pushd $MY_DIR > /dev/null - - -# Prepare outdir - -DATETIME=`date +"%Y%m%d-%H%M%S"` -OUTDIR_TEMPLATE="deltarepo-test-$DATETIME-XXX" -TEST_OUTDIR=`mktemp -d "$OUTDIR_TEMPLATE"` - - -# Repos paths - -REPO1="repos/repo1" -REPO1_ONLY_PRI="repos/repo1_only_pri" -REPO1_ONLY_PRI_FIL="repos/repo1_only_pri_fil" -REPO2="repos/repo2" -REPO2_INCOMPLETE="repos/repo2_incomplete" -REPO2_INCOMPLETE_2="repos/repo2_incomplete_2" -REPO2_ONLY_PRI="repos/repo2_only_pri" -REPO2_NODATABASE="repos/repo2_nodatabase" -REPO3="repos/repo3" -REPO3_MD5="repos/repo3_md5" - - -# Check if repos are available (if not regenrepos.sh must be run) - -if [ ! -d "$REPO1/repodata" ]; then - echo "It seems that the test repositories doesn't have metadata created." - echo "Running metadata regeneration script.." - echo - repos/regenrepos.sh - echo - echo "Metadata regenerated" -fi - - -# Some ugly global variables - -DELTAREPO_QUIET=0 -COMPAREREPOS_IGNORE_REPOMD_CONTENT=0 - - -# Helper functions - -function testcase_outdir { - mktemp -d "$TEST_OUTDIR/testcase-$1-XXX" -} - -function compare_repos { - # Arguments are: repo1 repo2 - - echo "Comparing: $1/repodata $2/repodata" - echo - - OTHERARGUMENTS="" - - if [ "$COMPAREREPOS_IGNORE_REPOMD_CONTENT" = 1 ]; then - OTHERARGUMENTS=" --exclude repomd.xml " - fi - - diff -r $1/repodata $2/repodata \ - -I "[0-9]*" \ - -I ".*" \ - $OTHERARGUMENTS - echo - - return $? -} - -# Test cases - -TESTCASEID=0 - -function testcase01 { - # Arguments are: REPO_old REPO_new - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: $FUNCNAME $1 -> $2 applied on: $1" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --quiet -o $DELTADIR $1 $2 - $DELTAREPO --quiet -a -o $FINALDIR $1 $DELTADIR - else - $DELTAREPO -o $DELTADIR $1 $2 - $DELTAREPO -a -o $FINALDIR $1 $DELTADIR - fi - - compare_repos $2 $FINALDIR -} - - -# Misuse cases - -testcase01 $REPO1 $REPO1 -testcase01 $REPO2 $REPO2 -testcase01 $REPO2_INCOMPLETE $REPO2_INCOMPLETE -testcase01 $REPO2_INCOMPLETE_2 $REPO2_INCOMPLETE_2 -testcase01 $REPO2_NODATABASE $REPO2_NODATABASE -testcase01 $REPO3 $REPO3 -testcase01 $REPO3_MD5 $REPO3_MD5 - - -# Regular cases - -testcase01 $REPO1 $REPO2 -testcase01 $REPO1 $REPO2_INCOMPLETE -testcase01 $REPO1 $REPO2_INCOMPLETE_2 -testcase01 $REPO1 $REPO2_NODATABASE -testcase01 $REPO1 $REPO3 -testcase01 $REPO1 $REPO3_MD5 - -testcase01 $REPO2 $REPO1 -testcase01 $REPO2 $REPO2_INCOMPLETE -testcase01 $REPO2 $REPO2_INCOMPLETE_2 -testcase01 $REPO2 $REPO2_NODATABASE -testcase01 $REPO2 $REPO3 -testcase01 $REPO2 $REPO3_MD5 - -testcase01 $REPO2_INCOMPLETE $REPO1 -testcase01 $REPO2_INCOMPLETE $REPO2 -testcase01 $REPO2_INCOMPLETE $REPO2_INCOMPLETE_2 -testcase01 $REPO2_INCOMPLETE $REPO3 -testcase01 $REPO2_INCOMPLETE $REPO3_MD5 - -testcase01 $REPO2_INCOMPLETE_2 $REPO1 -testcase01 $REPO2_INCOMPLETE_2 $REPO2 -testcase01 $REPO2_INCOMPLETE_2 $REPO2_INCOMPLETE -testcase01 $REPO2_INCOMPLETE_2 $REPO3 -testcase01 $REPO2_INCOMPLETE_2 $REPO3_MD5 - -testcase01 $REPO2_NODATABASE $REPO1 -testcase01 $REPO2_NODATABASE $REPO2 -testcase01 $REPO2_NODATABASE $REPO2_INCOMPLETE -testcase01 $REPO2_NODATABASE $REPO2_INCOMPLETE_2 -testcase01 $REPO2_NODATABASE $REPO3 -testcase01 $REPO2_NODATABASE $REPO3_MD5 - -testcase01 $REPO3 $REPO1 -testcase01 $REPO3 $REPO2 -testcase01 $REPO3 $REPO2_INCOMPLETE -testcase01 $REPO3 $REPO2_INCOMPLETE_2 -testcase01 $REPO3 $REPO3_MD5 - -testcase01 $REPO3_MD5 $REPO1 -testcase01 $REPO3_MD5 $REPO2 -testcase01 $REPO3_MD5 $REPO2_INCOMPLETE -testcase01 $REPO3_MD5 $REPO2_INCOMPLETE_2 -testcase01 $REPO3_MD5 $REPO3 - - -# 1nd test case that shoud failed -# -# Scenario: -# We have a repo where some metadata files are missing. -# We want to do delta to the new repo version. -# In this situation, delta generation should failed as long -# as no --ignore-missing param is passed -# Expected result: -# No deltarepo should be generated and bad return code should -# be returned - -function testcase01_that_should_fail { - # Arguments are: REPO_old REPO_new - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: $FUNCNAME $1 -> $2 applied on: $1" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - $DELTAREPO --quiet -o $DELTADIR $1 $2 &> $TCDIR/output - - if [ "$?" = 0 ]; then - echo "FAILED" - cat $TCDIR/output - fi - - echo -} - -testcase01_that_should_fail $REPO1_ONLY_PRI_FIL $REPO2 -testcase01_that_should_fail $REPO1_ONLY_PRI_FIL $REPO2_NODATABASE -testcase01_that_should_fail $REPO1_ONLY_PRI_FIL $REPO2_INCOMPLETE -testcase01_that_should_fail $REPO1_ONLY_PRI_FIL $REPO2_INCOMPLETE_2 -testcase01_that_should_fail $REPO1_ONLY_PRI_FIL $REPO3 -testcase01_that_should_fail $REPO1_ONLY_PRI_FIL $REPO3_MD5 -testcase01_that_should_fail $REPO1_ONLY_PRI_FIL $REPO1_ONLY_PRI_FIL -testcase01_that_should_fail $REPO1 $REPO1_ONLY_PRI_FIL -testcase01_that_should_fail $REPO2 $REPO1_ONLY_PRI_FIL -testcase01_that_should_fail $REPO2_INCOMPLETE $REPO1_ONLY_PRI_FIL -testcase01_that_should_fail $REPO2_INCOMPLETE_2 $REPO1_ONLY_PRI_FIL -testcase01_that_should_fail $REPO3 $REPO1_ONLY_PRI_FIL -testcase01_that_should_fail $REPO3_MD5 $REPO1_ONLY_PRI_FIL - -# 1nd test case that shoud succeed -# -# Scenario: -# We have a repo where some metadata files are missing. -# We want to do delta to the new repo version. -# In this situation, delta generation should pass because -# --ignore-missing param is used -# Expected result: -# Deltarepo should be generated and 0 return code should -# be returned - -function testcase01_that_should_succeed { - # Arguments are: REPO_old REPO_new - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: $FUNCNAME $1 -> $2 applied on: $1" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --ignore-missing --quiet -o $DELTADIR $1 $2 - $DELTAREPO --ignore-missing --quiet -a -o $FINALDIR $1 $DELTADIR - else - $DELTAREPO --ignore-missing -o $DELTADIR $1 $2 - $DELTAREPO --ignore-missing -a -o $FINALDIR $1 $DELTADIR - fi - - compare_repos $2 $FINALDIR -} - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=1 -DELTAREPO_QUIET=1 -testcase01_that_should_succeed $REPO1_ONLY_PRI_FIL $REPO2 -testcase01_that_should_succeed $REPO1_ONLY_PRI_FIL $REPO2_NODATABASE -testcase01_that_should_succeed $REPO1_ONLY_PRI_FIL $REPO3 -testcase01_that_should_succeed $REPO1_ONLY_PRI_FIL $REPO3_MD5 -testcase01_that_should_succeed $REPO1_ONLY_PRI_FIL $REPO1_ONLY_PRI_FIL -testcase01_that_should_succeed $REPO1_ONLY_PRI_FIL $REPO2_INCOMPLETE -testcase01_that_should_succeed $REPO1_ONLY_PRI_FIL $REPO2_INCOMPLETE_2 -testcase01_that_should_succeed $REPO1 $REPO1_ONLY_PRI_FIL -testcase01_that_should_succeed $REPO2 $REPO1_ONLY_PRI_FIL -testcase01_that_should_succeed $REPO2_INCOMPLETE $REPO1_ONLY_PRI_FIL -testcase01_that_should_succeed $REPO2_INCOMPLETE_2 $REPO1_ONLY_PRI_FIL -testcase01_that_should_succeed $REPO3 $REPO1_ONLY_PRI_FIL -testcase01_that_should_succeed $REPO3_MD5 $REPO1_ONLY_PRI_FIL - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=0 -DELTAREPO_QUIET=0 - -# 2nd test case -# -# Scenario: -# We have a regular delta from R1 -> R2 but our R1 is incomplete -# and some metadata files are missing. --ignore-missing option is used -# Expected result: -# Deltarepo should update the available files and print warning -# about the missing ones. - -function testcase02 { - # Arguments are: REPO_old REPO_new - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: $FUNCNAME $1 -> $2 applied on: $3" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --quiet -o $DELTADIR $1 $2 - $DELTAREPO --ignore-missing --quiet -a -o $FINALDIR $3 $DELTADIR - else - $DELTAREPO -o $DELTADIR $1 $2 - $DELTAREPO --ignore-missing -a -o $FINALDIR $3 $DELTADIR - fi - - compare_repos $2 $FINALDIR -} - - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=1 -DELTAREPO_QUIET=1 - -testcase02 $REPO1 $REPO2 $REPO1_ONLY_PRI_FIL -testcase02 $REPO1 $REPO3 $REPO1_ONLY_PRI_FIL -testcase02 $REPO1 $REPO2 $REPO1_ONLY_PRI -testcase02 $REPO1 $REPO3 $REPO1_ONLY_PRI -testcase02 $REPO2 $REPO1 $REPO2_ONLY_PRI -testcase02 $REPO2 $REPO3 $REPO2_ONLY_PRI - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=0 -DELTAREPO_QUIET=0 - - -# 3th test case -# -# Scenario: -# We have incomplete delta for R1 -> R2. And incomplete R1. -# The delta contains only deltas for the files contained by our R1. -# --ignore-missing option is used -# Expected result: -# Available deltas are applicated on available metadata. - -function testcase03 { - # Arguments are: REPO_old REPO_new - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: incomplete delta $FUNCNAME $1 -> $2 applied on: $3" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - # Gen delta - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --quiet -o $DELTADIR $1 $2 - else - $DELTAREPO -o $DELTADIR $1 $2 - fi - - # Remove some metadata from delta - - rm -f $DELTADIR/repodata/*filelists.sqlite* - rm -f $DELTADIR/repodata/*other* - rm -f $DELTADIR/repodata/*comps* - rm -f $DELTADIR/repodata/*foobar* - - # Apply this delta to incomplete repo - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --ignore-missing --quiet -a -o $FINALDIR $3 $DELTADIR - else - $DELTAREPO --ignore-missing -a -o $FINALDIR $3 $DELTADIR - fi - - compare_repos $2 $FINALDIR -} - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=1 -DELTAREPO_QUIET=1 - -testcase03 $REPO2 $REPO3 $REPO2_ONLY_PRI -testcase03 $REPO1 $REPO3 $REPO1_ONLY_PRI_FIL - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=0 -DELTAREPO_QUIET=0 - - -# 4th test case -# -# Scenario: -# We have delta where databases should not be generated. -# We want the databases. -# Expected result: -# deltarepo --apply with --database argument should generate repo -# with databases - -function testcase04 { - # Arguments are: REPO_old REPO_new_nodbs REPO_new_dbs - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: $FUNCNAME $1 -> $2 applied on: $1" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --quiet -o $DELTADIR $1 $2 - $DELTAREPO --quiet --database -a -o $FINALDIR $1 $DELTADIR - else - $DELTAREPO -o $DELTADIR $1 $2 - $DELTAREPO --database -a -o $FINALDIR $1 $DELTADIR - fi - - compare_repos $3 $FINALDIR -} - -testcase04 $REPO1 $REPO2_NODATABASE $REPO2 - - -# 5th test case -# -# Scenario: -# We want create delta where destination repo doesn't have a databases -# But we want the databases. We use deltarepo with --database argument -# during delta repo generation -# Expected result: -# deltarepo --apply even WITHOUT --database argument should generate repo -# with databases. - -function testcase05 { - # Arguments are: REPO_old REPO_new_nodbs REPO_new_dbs - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: $FUNCNAME $1 -> $2 applied on: $1" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --quiet --database -o $DELTADIR $1 $2 - $DELTAREPO --quiet -a -o $FINALDIR $1 $DELTADIR - else - $DELTAREPO --database -o $DELTADIR $1 $2 - $DELTAREPO -a -o $FINALDIR $1 $DELTADIR - fi - - compare_repos $3 $FINALDIR -} - -testcase05 $REPO1 $REPO2_NODATABASE $REPO2 - - -# 6th test case -# -# Scenario: -# We have incomplete delta for R1 -> R2. And complete R1. -# --ignore-missing option is used -# Expected result: -# Available deltas are applicated on available metadata. -# Other (no updated) metadata are no logner available. - -function testcase06 { - # Arguments are: REPO_old REPO_new REPO_for_apply - - IDSTR=$(printf "%02d\n" $TESTCASEID) - TESTCASEID=$[$TESTCASEID+1] - - TCNAME="$IDSTR: incomplete delta $FUNCNAME $1 -> $2 applied on: $3" - TCDIR=$(testcase_outdir "$IDSTR-$FUNCNAME") - - echo "===============================================" - echo "$TCNAME ($TCDIR)"; - echo "===============================================" - - DELTADIR="$TCDIR/delta" - FINALDIR="$TCDIR/final" - mkdir $DELTADIR - mkdir $FINALDIR - - # Gen delta - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --quiet -o $DELTADIR $1 $2 - else - $DELTAREPO -o $DELTADIR $1 $2 - fi - - # Remove some metadata from delta - - rm -f $DELTADIR/repodata/*filelists.sqlite* - rm -f $DELTADIR/repodata/*other* - rm -f $DELTADIR/repodata/*comps* - rm -f $DELTADIR/repodata/*foobar* - - # Apply this delta to incomplete repo - - if [ "$DELTAREPO_QUIET" = 1 ]; then - $DELTAREPO --ignore-missing --quiet -a -o $FINALDIR $3 $DELTADIR - else - $DELTAREPO --ignore-missing -a -o $FINALDIR $3 $DELTADIR - fi - - compare_repos $2 $FINALDIR -} - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=1 -DELTAREPO_QUIET=1 - -testcase06 $REPO2 $REPO3 $REPO2 -testcase06 $REPO1 $REPO3 $REPO1 - -COMPAREREPOS_IGNORE_REPOMD_CONTENT=0 -DELTAREPO_QUIET=0 - -popd > /dev/null diff --git a/deltarepo/deltarepo.py b/deltarepo/deltarepo.py deleted file mode 100755 index d80cedb..0000000 --- a/deltarepo/deltarepo.py +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import sys -import os.path -import hashlib -import logging -import argparse -import deltarepo - -LOG_FORMAT = "%(message)s" - -# TODO: -# - Support for type of compression (?) -# - Multiple verbose levels - - -def parse_options(): - parser = argparse.ArgumentParser(description="Gen/Apply delta on yum repository.", - usage="%(prog)s [options] \n" \ - " %(prog)s --apply ") - parser.add_argument('path1', help="First repository") - parser.add_argument('path2', help="Second repository or delta repository") - parser.add_argument('--debug', action="store_true", help=argparse.SUPPRESS) - parser.add_argument("--version", action="store_true", - help="Show version number and quit.") - parser.add_argument("-q", "--quiet", action="store_true", - help="Run in quiet mode.") - parser.add_argument("-v", "--verbose", action="store_true", - help="Run in verbose mode.") - #parser.add_option("-l", "--list-datatypes", action="store_true", - # help="List datatypes for which delta is supported.") - parser.add_argument("-o", "--outputdir", action="store", metavar="DIR", - help="Output directory.", default=None) - parser.add_argument("-d", "--database", action="store_true", - help="Force database generation") - parser.add_argument("--ignore-missing", action="store_true", - help="Ignore missing metadata files. (The files that " - "are listed in repomd.xml but physically doesn't " - "exists)") - - group = parser.add_argument_group("Delta generation") - #group.add_argument("--skip", action="append", metavar="DATATYPE", - # help="Skip delta on the DATATYPE. Could be specified "\ - # "multiple times. (E.g., --skip=comps)") - #group.add_argument("--do-only", action="append", metavar="DATATYPE", - # help="Do delta only for the DATATYPE. Could be specified "\ - # "multiple times. (E.g., --do-only=primary)") - group.add_argument("-t", "--id-type", action="store", metavar="HASHTYPE", - help="Hash function for the ids (Contenthash). " \ - "Default is sha256.", default="sha256") - - group = parser.add_argument_group("Delta application") - group.add_argument("-a", "--apply", action="store_true", - help="Enable delta application mode.") - - args = parser.parse_args() - - # Error checks - - if args.version: - return args - - #if len(args) != 2: - # parser.error("Two repository paths have to be specified!") - - if args.id_type not in hashlib.algorithms: - parser.error("Unsupported hash algorithm %s" % args.id_type) - - if args.quiet and args.verbose: - parser.error("Cannot use quiet and verbose simultaneously!") - - if not os.path.isdir(args.path1) or \ - not os.path.isdir(os.path.join(args.path1, "repodata")) or \ - not os.path.isfile(os.path.join(args.path1, "repodata", "repomd.xml")): - parser.error("Not a repository: %s" % args.path1) - - if not os.path.isdir(args.path2) or \ - not os.path.isdir(os.path.join(args.path2, "repodata")) or \ - not os.path.isfile(os.path.join(args.path2, "repodata", "repomd.xml")): - parser.error("Not a repository: %s" % args.path2) - - if not os.path.isdir(args.outputdir): - parser.error("Not a directory: %s" % args.outputdir) - - if args.debug: - args.verbose = True - - return args - -def print_version(): - print("DeltaRepo: {0}".format(deltarepo.VERBOSE_VERSION)) - -def setup_logging(quiet, verbose): - logger = logging.getLogger("deltarepo_logger") - formatter = logging.Formatter(LOG_FORMAT) - logging.basicConfig(format=LOG_FORMAT) - if quiet: - logger.setLevel(logging.ERROR) - elif verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - return logger - -def main(args, logger): - if args.apply: - # Applying delta - da = deltarepo.DeltaRepoApplicator(args.path1, - args.path2, - out_path=args.outputdir, - logger=logger, - force_database=args.database, - ignore_missing=args.ignore_missing) - da.apply() - else: - # Do delta - dg = deltarepo.DeltaRepoGenerator(args.path1, - args.path2, - out_path=args.outputdir, - logger=logger, - contenthash_type=args.id_type, - force_database=args.database, - ignore_missing=args.ignore_missing) - dg.gen() - -if __name__ == "__main__": - args = parse_options() - - if args.version: - print_version() - sys.exit(0) - - logger = setup_logging(args.quiet, args.verbose) - - try: - main(args, logger) - except Exception as err: - if args.debug: - raise - print("Error: {0}".format(err), file=sys.stderr) - sys.exit(1) - - sys.exit(0) diff --git a/deltarepo/deltarepo/__init__.py b/deltarepo/deltarepo/__init__.py deleted file mode 100644 index 238e597..0000000 --- a/deltarepo/deltarepo/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -DeltaRepo package for Python. -This is the library for generation, application and handling of -DeltaRepositories. -The library is builded on the Createrepo_c library and its a part of it. - -Copyright (C) 2013 Tomas Mlcoch - -""" - -import createrepo_c as cr -from .common import LoggingInterface, calculate_contenthash -from .plugins_common import Metadata -from .deltarepos import DeltaRepos, DeltaReposRecord -from .deltametadata import DeltaMetadata, PluginBundle -from .applicator import DeltaRepoApplicator -from .generator import DeltaRepoGenerator -from .plugins import PLUGINS -from .plugins import needed_delta_metadata -from .errors import DeltaRepoError, DeltaRepoPluginError - -__all__ = ['VERSION', 'VERBOSE_VERSION', - 'LoggingInterface', 'calculate_contenthash', - 'Metadata', - 'DeltaRepos', 'DeltaReposRecord', - 'DeltaMetadata', 'PluginBundle', - 'DeltaRepoApplicator', - 'DeltaRepoGenerator', - 'needed_delta_metadata', - 'DeltaRepoError', 'DeltaRepoPluginError'] - -VERSION = "0.0.1" -VERBOSE_VERSION = "%s (createrepo_c: %s)" % (VERSION, cr.VERSION) diff --git a/deltarepo/deltarepo/applicator.py b/deltarepo/deltarepo/applicator.py deleted file mode 100644 index 081a02b..0000000 --- a/deltarepo/deltarepo/applicator.py +++ /dev/null @@ -1,365 +0,0 @@ -""" -DeltaRepo package for Python. -This is the library for generation, application and handling of -DeltaRepositories. -The library is builded on the Createrepo_c library and its a part of it. - -Copyright (C) 2013 Tomas Mlcoch - -""" - -import os -import shutil -import createrepo_c as cr -from .common import LoggingInterface -from .plugins_common import GlobalBundle, Metadata -from .deltametadata import DeltaMetadata -from .common import DEFAULT_CHECKSUM_TYPE, DEFAULT_COMPRESSION_TYPE -from .plugins import GlobalBundle, PLUGINS, GENERAL_PLUGIN -from .util import calculate_content_hash, pkg_id_str -from .errors import DeltaRepoError - -__all__ = ['DeltaRepoApplicator'] - -class DeltaRepoApplicator(LoggingInterface): - - def __init__(self, - old_repo_path, - delta_repo_path, - out_path=None, - logger=None, - force_database=False, - ignore_missing=False): - - # Initialization - - LoggingInterface.__init__(self, logger) - - self.contenthash_type = None - self.unique_md_filenames = False - self.force_database = force_database - self.ignore_missing = ignore_missing - self.deltametadata = DeltaMetadata() - - self.out_path = out_path or "./" - - self.final_path = os.path.join(self.out_path, "repodata") - - self.new_repo_path = out_path - self.new_repodata_path = os.path.join(self.new_repo_path, ".repodata/") - self.new_repomd_path = os.path.join(self.new_repodata_path, "repomd.xml") - - self.old_repo_path = old_repo_path - self.old_repodata_path = os.path.join(self.old_repo_path, "repodata/") - self.old_repomd_path = os.path.join(self.old_repodata_path, "repomd.xml") - - self.delta_repo_path = delta_repo_path - self.delta_repodata_path = os.path.join(self.delta_repo_path, "repodata/") - self.delta_repomd_path = os.path.join(self.delta_repodata_path, "repomd.xml") - - # Prepare repomd objects - self.old_repomd = cr.Repomd(self.old_repomd_path) - self.delta_repomd = cr.Repomd(self.delta_repomd_path) - self.new_repomd = cr.Repomd() - - # Check if delta repo id correspond with the old repo id - if not self.delta_repomd.contenthash or \ - len(self.delta_repomd.contenthash.split('-')) != 2: - raise DeltaRepoError("Bad content hash") - - self.contenthash_type_str = self.delta_repomd.contenthash_type - res = self.delta_repomd.contenthash.split('-') - self.old_contenthash, self.new_contenthash = res - self._debug("Delta %s -> %s" % (self.old_contenthash, - self.new_contenthash)) - - if self.old_repomd.contenthash_type == self.delta_repomd.contenthash_type: - if self.old_repomd.contenthash and self.old_repomd.contenthash != self.old_contenthash: - raise DeltaRepoError("Not suitable delta for current repo " \ - "(Expected: {0} Real: {1})".format( - self.old_contenthash, self.old_repomd.contenthash)) - else: - self._debug("Different contenthash types repo: {0} vs delta: {1}".format( - self.old_repomd.contenthash_type, self.delta_repomd.contenthash_type)) - - # Use revision and tags - self.new_repomd.set_revision(self.delta_repomd.revision) - for tag in self.delta_repomd.distro_tags: - self.new_repomd.add_distro_tag(tag[1], tag[0]) - for tag in self.delta_repomd.repo_tags: - self.new_repomd.add_repo_tag(tag) - for tag in self.delta_repomd.content_tags: - self.new_repomd.add_content_tag(tag) - - # Load records - self.old_records = {} - self.delta_records = {} - for record in self.old_repomd.records: - self.old_records[record.type] = record - for record in self.delta_repomd.records: - self.delta_records[record.type] = record - - old_record_types = set(self.old_records.keys()) - delta_record_types = set(self.delta_records.keys()) - - self.deleted_repomd_record_types = old_record_types - delta_record_types - self.added_repomd_record_types = delta_record_types - old_record_types - - # Important sanity checks (repo without primary is definitely bad) - if not "primary" in self.old_records: - raise DeltaRepoError("Missing \"primary\" metadata in old repo") - - # Detect type of checksum in the delta repomd.xml - self.checksum_type = cr.checksum_type(self.delta_records["deltametadata"].checksum_type) - if self.checksum_type == cr.UNKNOWN_CHECKSUM: - raise DeltaRepoError("Unknown checksum type used in delta repo: %s" % \ - self.delta_records["deltametadata"].checksum_type) - - # Detection if use unique md filenames - if self.delta_records["deltametadata"].location_href.split("deltametadata")[0] != "": - self.unique_md_filenames = True - - # Load removedxml - self.removedxml_path = None - if "deltametadata" in self.delta_records: - self.deltametadata_path = os.path.join(self.delta_repo_path, - self.delta_records["deltametadata"].location_href) - self.deltametadata.xmlparse(self.deltametadata_path) - else: - self._warning("\"deltametadata\" record is missing in repomd.xml "\ - "of delta repo") - - # Prepare global bundle - self.globalbundle = GlobalBundle() - self.globalbundle.contenthash_type_str = self.contenthash_type_str - self.globalbundle.unique_md_filenames = self.unique_md_filenames - self.globalbundle.force_database = self.force_database - self.globalbundle.ignore_missing = self.ignore_missing - - def _new_metadata(self, metadata_type): - """Return Metadata Object for the metadata_type""" - - metadata = Metadata(metadata_type) - - metadata.checksum_type = self.checksum_type - metadata.compression_type = DEFAULT_COMPRESSION_TYPE - - # Set output directory - metadata.out_dir = self.new_repodata_path - - # Properties related to the first (old) repository - old_rec = self.old_records.get(metadata_type) - metadata.old_rec = old_rec - if old_rec: - # Build old filename - metadata.old_fn = os.path.join(self.old_repo_path, old_rec.location_href) - if os.path.isfile(metadata.old_fn): - metadata.old_fn_exists = True - else: - msg = "File {0} doesn't exist in the old repository" \ - " (but it should - delta may rely on " \ - "it)!".format(metadata.old_fn) - self._warning(msg) - if not self.ignore_missing: - raise DeltaRepoError(msg + " Use --ignore-missing option " - "to ignore this error") - - # Properties related to the second (delta) repository - delta_rec = self.delta_records.get(metadata_type) - metadata.delta_rec = delta_rec - if delta_rec: - metadata.delta_fn = os.path.join(self.delta_repo_path, delta_rec.location_href) - if os.path.isfile(metadata.delta_fn): - metadata.delta_fn_exists = True - - # Determine compression type - detected_compression_type = cr.detect_compression(metadata.delta_fn) - if (detected_compression_type != cr.UNKNOWN_COMPRESSION): - metadata.compression_type = detected_compression_type - else: - self._warning("Cannot detect compression type for " - "{0}".format(metadata.delta_fn)) - else: - msg = ("The file {0} doesn't exist in the delta" - "repository!".format(metadata.new_fn)) - self._warning(msg) - if not self.ignore_missing: - raise DeltaRepoError(msg + " Use --ignore-missing option " - "to ignore this error") - - metadata.checksum_type = cr.checksum_type(delta_rec.checksum_type) - - return metadata - - def check_content_hashes(self, pri_md): - self._debug("Checking expected content hashes") - - if not pri_md: - self._warning("Content hashes cannot be checked!") - - c_old_contenthash = self.globalbundle.calculated_old_contenthash - c_new_contenthash = self.globalbundle.calculated_new_contenthash - - if not c_old_contenthash or not c_new_contenthash: - - if not c_old_contenthash: - if not pri_md.old_fn_exists: - raise DeltaRepoError("Old repository doesn't have " - "a primary metadata!") - c_old_contenthash = calculate_content_hash(pri_md.old_fn, - self.contenthash_type_str, - self._get_logger()) - if not c_new_contenthash: - if not pri_md.new_fn_exists: - raise DeltaRepoError("New repository doesn't have " - "a primary metadata!") - c_new_contenthash = calculate_content_hash(pri_md.new_fn, - self.contenthash_type_str, - self._get_logger()) - - self.globalbundle.calculated_old_contenthash = c_old_contenthash - self.globalbundle.calculated_new_contenthash = c_new_contenthash - - self._debug("Calculated content hash of the old repo: {0}".format( - c_old_contenthash)) - self._debug("Calculated content hash of the new repo: {0}".format( - c_new_contenthash)) - - if self.old_contenthash != c_old_contenthash: - message = "Content hash of the old repository doesn't match "\ - "the real one ({0} != {1}).".format(self.old_contenthash, - c_old_contenthash) - self._error(message) - raise DeltaRepoError(message) - else: - self._debug("Calculated content hash of the old repo matches " - "the expected one ({0})".format(self.old_contenthash)) - - if self.new_contenthash != c_new_contenthash: - message = "Content hash of the new repository doesn't match "\ - "the real one ({0} != {1}).".format(self.new_contenthash, - c_new_contenthash) - self._error(message) - raise DeltaRepoError(message) - else: - self._debug("Calculated content hash of the new repo matches " - "the expected one ({0})".format(self.new_contenthash)) - - def apply(self): - - # Prepare output path - os.mkdir(self.new_repodata_path) - - # Set of types of processed metadata records ("primary", "primary_db"...) - processed_metadata = set() - primary_metadata_object = None - - for plugin in PLUGINS: - - # Prepare metadata for the plugin - metadata_objects = {} - for metadata_name in plugin.METADATA: - metadata_object = self._new_metadata(metadata_name) - if metadata_name == "primary": - primary_metadata_object = metadata_object - if metadata_object is not None: - metadata_objects[metadata_name] = metadata_object - - # Skip plugin if no supported metadata available - if not metadata_objects: - self._debug("Plugin {0}: Skipped - None of supported " \ - "metadata {1} available".format( - plugin.NAME, plugin.METADATA)) - continue - - # Prepare plugin bundle - pluginbundle = self.deltametadata.get_pluginbundle(plugin.NAME) - - if not pluginbundle: - self._debug("Skipping {0} because it hasn't record in " - "deltametadata.xml".format(plugin.NAME)) - continue - - if pluginbundle and pluginbundle.version > plugin.VERSION: - raise DeltaRepoError("Delta of {0} metadata is generated by " - "plugin {1} with version: {2}, but locally available " - "is only version: {3}".format(metadata_objects.keys(), - plugin.NAME, pluginbundle.version, plugin.VERSION)) - - # Use the plugin - self._debug("Plugin {0}: Active".format(plugin.NAME)) - plugin_instance = plugin(pluginbundle, self.globalbundle, - logger=self._get_logger()) - repomd_records = plugin_instance.apply(metadata_objects) - - # Put repomd records from processed metadatas to repomd - self._debug("Plugin {0}: Processed {1} delta record(s) " \ - "and produced:".format(plugin.NAME, metadata_objects.keys())) - for rec in repomd_records: - self._debug(" - {0}".format(rec.type)) - self.new_repomd.set_record(rec) - - # Organization stuff - for md in metadata_objects.keys(): - processed_metadata.add(md) - - # Process rest of the metadata files - metadata_objects = {} - all_available_records = set() - all_available_records.update(self.delta_records.items()) - all_available_records.update(self.old_records.items()) - for rectype, rec in all_available_records: - if rectype == "deltametadata": - continue - if rectype in processed_metadata: - continue - - metadata_object = self._new_metadata(rectype) - if metadata_object is not None: - self._debug("To be processed by general delta plugin: " \ - "{0}".format(rectype)) - metadata_objects[rectype] = metadata_object - else: - self._debug("Not processed: {0} - SKIP".format(rectype)) - - if metadata_objects and self.deltametadata.get_pluginbundle(GENERAL_PLUGIN.NAME): - # Use the plugin - pluginbundle = self.deltametadata.get_pluginbundle(GENERAL_PLUGIN.NAME) - - if pluginbundle.version > GENERAL_PLUGIN.VERSION: - raise DeltaRepoError("Delta of {0} metadata is generated by " - "plugin {1} with version: {2}, but locally available " - "is only version: {3}".format(metadata_objects.keys(), - GENERAL_PLUGIN.NAME, pluginbundle.version, GENERAL_PLUGIN.VERSION)) - self._debug("Plugin {0}: Active".format(GENERAL_PLUGIN.NAME)) - plugin_instance = GENERAL_PLUGIN(pluginbundle, self.globalbundle, - logger=self._get_logger()) - repomd_records = plugin_instance.apply(metadata_objects) - - # Put repomd records from processed metadatas to repomd - self._debug("Plugin {0}: Processed {1} delta record(s) " \ - "and produced:".format(GENERAL_PLUGIN.NAME, metadata_objects.keys())) - for rec in repomd_records: - self._debug(" - {0}".format(rec.type)) - self.new_repomd.set_record(rec) - - # Check if calculated contenthashes match - self.check_content_hashes(primary_metadata_object) - - # Prepare and write out the new repomd.xml - self._debug("Preparing repomd.xml ...") - self.new_repomd.set_contenthash(self.new_contenthash, self.contenthash_type_str) - self.new_repomd.sort_records() - new_repomd_xml = self.new_repomd.xml_dump() - - self._debug("Writing repomd.xml ...") - open(self.new_repomd_path, "w").write(new_repomd_xml) - - # Final move - if os.path.exists(self.final_path): - self._debug("Destination dir already exists! Removing %s" % \ - self.final_path) - shutil.rmtree(self.final_path) - self._debug("Moving %s -> %s" % (self.new_repodata_path, self.final_path)) - os.rename(self.new_repodata_path, self.final_path) - diff --git a/deltarepo/deltarepo/common.py b/deltarepo/deltarepo/common.py deleted file mode 100644 index 6d8c8a8..0000000 --- a/deltarepo/deltarepo/common.py +++ /dev/null @@ -1,61 +0,0 @@ -import logging -import hashlib -import createrepo_c as cr - -DEFAULT_CHECKSUM_NAME = "sha256" -DEFAULT_CHECKSUM_TYPE = cr.SHA256 -DEFAULT_COMPRESSION_TYPE = cr.GZ - - -class LoggingInterface(object): - """Base class with logging support. - Other classes inherit this class to obtain - support of logging methods. - """ - - def __init__(self, logger=None): - self.logger = None - self._set_logger(logger) - - def _set_logger(self, logger=None): - if logger is None: - logger = logging.getLogger() - logger.disabled = True - self.logger = logger - - def _get_logger(self): - return self.logger - - def _log(self, level, msg): - self.logger.log(level, msg) - - def _debug(self, msg): - self._log(logging.DEBUG, msg) - - def _info(self, msg): - self._log(logging.INFO, msg) - - def _warning(self, msg): - self._log(logging.WARNING, msg) - - def _error(self, msg): - self._log(logging.ERROR, msg) - - def _critical(self, msg): - self._log(logging.CRITICAL, msg) - -def calculate_contenthash(primary_xml_path, contenthash_type="sha256"): - pkgids = [] - - def pkgcb(pkg): - pkgids.append("{0}{1}{2}".format(pkg.pkgId, - pkg.location_href, - pkg.location_base or '')) - - cr.xml_parse_primary(primary_xml_path, pkgcb=pkgcb) - - contenthash = hashlib.new(contenthash_type) - for pkgid in sorted(pkgids): - contenthash.update(pkgid) - return contenthash.hexdigest() - diff --git a/deltarepo/deltarepo/deltametadata.py b/deltarepo/deltarepo/deltametadata.py deleted file mode 100644 index 7f3fee4..0000000 --- a/deltarepo/deltarepo/deltametadata.py +++ /dev/null @@ -1,242 +0,0 @@ -import os -import tempfile -import createrepo_c as cr -import xml.dom.minidom -from .errors import DeltaRepoError -from lxml import etree -from .xmlcommon import getNode, getAttribute - -class AdditionalXmlData(object): - """Interface to store/load additional data to/from xml. - """ - - ADDITIONAL_XML_DATA = True - - def __init__(self): - self._data = {} - self._lists = {} - - def set(self, key, value): - """Store a single key-value pair to the XML. - Both key and value have to be a string. - Each key could have only single string value. - No multiple keys with same name are allowed.""" - if not isinstance(key, basestring): - raise TypeError("expected string as key") - if not isinstance(value, basestring): - raise TypeError("expected string as value") - self._data[key] = value - - def update(self, dictionary): - """Store multiple key-value pairs to the XML. - All keys and values have to be a strings. - Each key could have only single string value. - No multiple keys with same name are allowed.""" - if not isinstance(dictionary, dict): - raise TypeError("expected dictionary") - - for key, val in dictionary.items(): - self.set(key, val) - - def append(self, listname, dictionary): - """Append a multiple key-value pairs to the XML. - One list/key could have multiple dictionaries.""" - if not isinstance(listname, basestring): - raise TypeError("expected string") - if not isinstance(dictionary, dict): - raise TypeError("expected dict") - - if not listname in self._lists: - self._lists[listname] = [] - - # Validate items first - for key, val in dictionary.items(): - if not isinstance(key, basestring) or not isinstance(val, basestring): - raise TypeError("Dict's keys and values must be string") - - self._lists[listname].append(dictionary) - - def get(self, key, default=None): - """Return a single valued key from the XML""" - return self._data.get(key, default) - - def get_list(self, key, default=None): - """Return list (a key with multiple values) of dictionaries""" - return self._lists.get(key, default) - - def _subelement(self, parent, name, in_attrs=None): - """Generate an XML element from the content of the object. - - :param parent: Parent xml.dom.Node object - :param name: Name of the XML element - :param in_attrs: Dictionary with element attributes. - Both keys and values have to be strings.""" - attrs = {} - attrs.update(self._data) - if in_attrs: - attrs.update(in_attrs) - elem = etree.SubElement(parent, name, attrs) - - for listname, listvalues in self._lists.items(): - for val in listvalues: - etree.SubElement(elem, listname, val) - - return elem - -class PluginBundle(AdditionalXmlData): - """Object that persistently stores plugin configuration - in deltametadata.xml XML file. - To access data use the public methods from AdditionalXmlData object.""" - def __init__(self, name, version): - AdditionalXmlData.__init__(self) - - if not isinstance(name, basestring): - raise TypeError("string expected") - if not isinstance(version, int): - raise TypeError("integer expected") - - self.name = name # Plugin name (string) - self.version = version # Plugin version (integer) - -# -# deltametadata.xml -# - -class DeltaMetadata(object): - """Object that represents deltametadata.xml file in deltarepository. - The deltametadata.xml persistently stores plugin configuration. - """ - - def __init__(self): - self.revision_src = None - self.revision_dst = None - self.contenthash_src = None - self.contenthash_dst = None - self.contenthash_type = None - self.timestamp_src = None - self.timestamp_dst = None - self.usedplugins = {} - - def add_pluginbundle(self, pluginbundle): - """Add new pluginbundle to the object""" - if not isinstance(pluginbundle, PluginBundle): - raise TypeError("PluginBundle object expected") - self.usedplugins[pluginbundle.name] = pluginbundle - - def get_pluginbundle(self, name): - """Get associate PluginBundle object""" - return self.usedplugins.get(name, None) - - def xmldump(self): - """Get XML dump""" - xmltree = etree.Element("deltametadata") - - # Dump metadata - if self.revision_src or self.revision_dst: - attrs = {} - if self.revision_src: - attrs["src"] = str(self.revision_src) - if self.revision_dst: - attrs["dst"] = str(self.revision_dst) - etree.SubElement(xmltree, "revision", attrs) - - if (self.contenthash_src or self.contenthash_dst) and self.contenthash_type: - attrs = {"type": self.contenthash_type} - if self.contenthash_src: - attrs["src"] = self.contenthash_src - if self.contenthash_dst: - attrs["dst"] = self.contenthash_dst - etree.SubElement(xmltree, "contenthash", attrs) - - if self.timestamp_src or self.timestamp_dst: - attrs = {} - if self.timestamp_src: - attrs["src"] = str(self.timestamp_src) - if self.timestamp_dst: - attrs["dst"] = str(self.timestamp_dst) - etree.SubElement(xmltree, "timestamp", attrs) - - # Dump plugins - usedplugins = etree.SubElement(xmltree, "usedplugins") - for plugin in self.usedplugins.values(): - attrs = {"name": plugin.name, "version": str(plugin.version)} - plugin._subelement(usedplugins, "plugin", attrs) - return etree.tostring(xmltree, - pretty_print=True, - encoding="UTF-8", - xml_declaration=True) - - def xmlparse(self, path): - """Parse data from an xml file""" - - _, tmp_path = tempfile.mkstemp() - cr.decompress_file(path, tmp_path, cr.AUTO_DETECT_COMPRESSION) - dom = xml.dom.minidom.parse(tmp_path) - os.remove(tmp_path) - - # Get the root element - deltametadata = getNode(dom, "deltametadata") - if not deltametadata: - raise DeltaRepoError("Cannot parse {0}".format(path)) - - # Parse metadata - node = getNode(deltametadata, "revision") - if node: - self.revision_src = getAttribute(node, "src", None) - self.revision_dst = getAttribute(node, "dst", None) - - node = getNode(deltametadata, "contenthash") - if node: - self.contenthash_src = getAttribute(node, "src", None) - self.contenthash_dst = getAttribute(node, "dst", None) - self.contenthash_type = getAttribute(node, "type", None) - - node = getNode(deltametadata, "timestamp") - if node: - self.timestamp_src = int(getAttribute(node, "src", 0)) - self.timestamp_dst = int(getAttribute(node, "dst", 0)) - - # Parse plugins - usedplugins = deltametadata.getElementsByTagName("plugin") - for plugin_node in usedplugins: - name = None - version = None - other = {} - - # Parse attributes - for x in xrange(plugin_node.attributes.length): - attr = plugin_node.attributes.item(x) - if attr.name == "name": - name = attr.value - elif attr.name == "version": - version = attr.value - else: - other[attr.name] = attr.value - - if not name or not version: - raise DeltaRepoError("Bad XML: name or version attribute " - "of plugin element is missing") - - try: - version_int = int(version) - except ValueError: - raise DeltaRepoError("Version {0} cannot be converted to " - "integer number".format(version)) - - bp = PluginBundle(name, version_int) - bp.update(other) - - # Parse subelements - for list_item_node in plugin_node.childNodes: - if list_item_node.nodeType != xml.dom.minidom.Node.ELEMENT_NODE: - continue; - - dictionary = {} - listname = list_item_node.nodeName - for x in xrange(list_item_node.attributes.length): - attr = list_item_node.attributes.item(x) - dictionary[attr.name] = attr.value - - bp.append(listname, dictionary) - - self.usedplugins[bp.name] = bp diff --git a/deltarepo/deltarepo/deltarepos.py b/deltarepo/deltarepo/deltarepos.py deleted file mode 100644 index 2dcc4dd..0000000 --- a/deltarepo/deltarepo/deltarepos.py +++ /dev/null @@ -1,220 +0,0 @@ -""" -Object representation deltarepos.xml -""" - -import os -import tempfile -import createrepo_c as cr -import xml.dom.minidom -from lxml import etree -from .errors import DeltaRepoError -from .xmlcommon import getNode, getRequiredNode -from .xmlcommon import getAttribute, getRequiredAttribute, getNumAttribute -from .xmlcommon import getValue - -# TODO: Remove total_size (it is redundant) - -class DeltaReposRecord(object): - def __init__(self): - self.location_base = None - self.location_href = None - self.size_total = None - self.revision_src = None - self.revision_dst = None - self.contenthash_src = None - self.contenthash_dst = None - self.contenthash_type = None - self.timestamp_src = None - self.timestamp_dst = None - - self.data = {} # { "primary": {"size": 123}, ... } - - self.repomd_timestamp = None - self.repomd_size = None - self.repomd_checksums = [] # [('type', 'value'), ...] - - #self.plugins = {} - - #def add_plugin(self, name, attrs=None): - # attrs = attrs or {} - # attrs['name'] = name - # for key, val in attrs.items(): - # if not isinstance(key, basestring) or not isinstance(val, basestring): - # raise TypeError("Strings expected, got ({0}, {1})".format(key, val)) - # self.plugins[name] = attrs - - def get_data(self, type): - return self.data.get(type, None) - - def set_data(self, type, size): - self.data[type] = {"size": int(size)} - - def _subelement(self, parent): - """Generate element""" - - attrs = {} - - deltarepo_el = etree.SubElement(parent, "deltarepo", attrs) - - # - if self.location_href: - attrs = { "href": self.location_href } - if self.location_base: - attrs["base"] = self.location_base - etree.SubElement(deltarepo_el, "location", attrs) - - # - if self.size_total: - attrs = { "total": unicode(self.size_total) } - etree.SubElement(deltarepo_el, "size", attrs) - - # - if self.revision_src and self.revision_dst: - attrs = { "src": self.revision_src, "dst": self.revision_dst } - etree.SubElement(deltarepo_el, "revision", attrs) - - # - if self.contenthash_src and self.contenthash_dst and self.contenthash_type: - attrs = { "src": unicode(self.contenthash_src), - "dst": unicode(self.contenthash_dst), - "type": unicode(self.contenthash_type)} - etree.SubElement(deltarepo_el, "contenthash", attrs) - - # - if self.timestamp_src and self.timestamp_dst: - attrs = { "src": unicode(self.timestamp_src), - "dst": unicode(self.timestamp_dst) } - etree.SubElement(deltarepo_el, "timestamp", attrs) - - # - metadata_types = sorted(self.data.keys()) - for mtype in metadata_types: - attrs = { "type": unicode(mtype), - "size": unicode(self.get_data(mtype).get("size", 0)) } - etree.SubElement(deltarepo_el, "data", attrs) - - # - repomd_el = etree.SubElement(deltarepo_el, "repomd", {}) - - # - if self.repomd_timestamp: - time_el = etree.SubElement(repomd_el, "timestamp", {}) - time_el.text = str(self.repomd_timestamp) - - # - if self.repomd_size: - size_el = etree.SubElement(repomd_el, "size", {}) - size_el.text = str(self.repomd_size) - - # - for type, value in self.repomd_checksums: - checksum_el = etree.SubElement(repomd_el, "checksum", {"type": type}) - checksum_el.text = str(value) - - # elements - #for plugin_attrs in self.plugins.values(): - # etree.SubElement(deltarepo_el, "plugin", plugin_attrs) - - return deltarepo_el - -class DeltaRepos(object): - """Object representation of deltarepos.xml""" - - def __init__(self): - self.records = [] - - def add_record(self, rec): - if not isinstance(rec, DeltaReposRecord): - raise TypeError("DeltaReposRecord object expected") - self.records.append(rec) - - def xmlparse(self, path): - """Parse data from an deltarepos.xml file""" - - _, tmp_path = tempfile.mkstemp() - cr.decompress_file(path, tmp_path, cr.AUTO_DETECT_COMPRESSION) - dom = xml.dom.minidom.parse(tmp_path) - os.remove(tmp_path) - - # element - elist_deltarepos = dom.getElementsByTagName("deltarepos") - if not elist_deltarepos or not elist_deltarepos[0]: - raise DeltaRepoError("Cannot parse {0}: No element" - "".format(path)) - - # elements - for node in elist_deltarepos[0].childNodes: - if node.nodeName != "deltarepo": - continue - - # element - rec = DeltaReposRecord() - - subnode = getRequiredNode(node, "location") - rec.location_base = getAttribute(subnode, "base") - rec.location_href = getRequiredAttribute(subnode, "href") - - subnode = getNode(node, "size") - if subnode: - rec.size_total = getNumAttribute(subnode, "total") - - subnode = getNode(node, "revision") - if subnode: - rec.revision_src = getAttribute(subnode, "src") - rec.revision_dst = getAttribute(subnode, "dst") - - subnode = getNode(node, "contenthash") - if subnode: - rec.contenthash_src = getAttribute(subnode, "src") - rec.contenthash_dst = getAttribute(subnode, "dst") - rec.contenthash_type = getAttribute(subnode, "type") - - subnode = getNode(node, "timestamp") - if subnode: - rec.timestamp_src = getNumAttribute(subnode, "src") - rec.timestamp_dst = getNumAttribute(subnode, "dst") - - subnodes = node.getElementsByTagName("data") or [] - for subnode in subnodes: - type = getAttribute(subnode, "type") - size= getNumAttribute(subnode, "size") - rec.set_data(type, size) - - # - repomdnode = getNode(node, "repomd") - if repomdnode: - subnode = getNode(repomdnode, "timestamp") - if subnode and getValue(subnode): - rec.repomd_timestamp = int(getValue(subnode)) - - subnode = getNode(repomdnode, "size") - if subnode and getValue(subnode): - rec.repomd_size = int(getValue(subnode)) - - checksumnodes = repomdnode.getElementsByTagName("checksum") - if checksumnodes: - for subnode in checksumnodes: - type = getAttribute(subnode, "type") - val = getValue(subnode) - if type and val: - rec.repomd_checksums.append((type, val)) - - # elements - #subnodes = node.getElementsByTagName("plugin") or [] - #for subnode in subnodes: - # attrs = {} - # name = getRequiredAttribute(subnode, "name") - # for i in xrange(subnode.attributes.length): - # attr = subnode.attributes.item(i) - # attrs[attr.name] = attr.value - # rec.add_plugin(name, attrs) - - self.records.append(rec) - - def xmldump(self): - """Generate XML""" - xmltree = etree.Element("deltarepos") - for rec in self.records: - rec._subelement(xmltree) - return etree.tostring(xmltree, pretty_print=True, - encoding="UTF-8", xml_declaration=True) diff --git a/deltarepo/deltarepo/errors.py b/deltarepo/deltarepo/errors.py deleted file mode 100644 index ad5eaae..0000000 --- a/deltarepo/deltarepo/errors.py +++ /dev/null @@ -1,10 +0,0 @@ - -__all__ = ["DeltaRepoError", "DeltaRepoPluginError"] - -class DeltaRepoError(Exception): - """Exception raised by deltarepo library""" - pass - -class DeltaRepoPluginError(DeltaRepoError): - """Exception raised by delta plugins of deltarepo library""" - pass diff --git a/deltarepo/deltarepo/generator.py b/deltarepo/deltarepo/generator.py deleted file mode 100644 index bef8b6c..0000000 --- a/deltarepo/deltarepo/generator.py +++ /dev/null @@ -1,382 +0,0 @@ -""" -DeltaRepo package for Python. -This is the library for generation, application and handling of -DeltaRepositories. -The library is builded on the Createrepo_c library and its a part of it. - -Copyright (C) 2013 Tomas Mlcoch - -""" - -import os -import shutil -import createrepo_c as cr -from .common import LoggingInterface -from .plugins_common import GlobalBundle, Metadata -from .deltametadata import DeltaMetadata, PluginBundle -from .common import DEFAULT_CHECKSUM_TYPE, DEFAULT_COMPRESSION_TYPE -from .plugins import GlobalBundle, PLUGINS, GENERAL_PLUGIN -from .util import calculate_content_hash, pkg_id_str -from .errors import DeltaRepoError - -__all__ = ['DeltaRepoGenerator'] - -class DeltaRepoGenerator(LoggingInterface): - - def __init__(self, - old_repo_path, - new_repo_path, - out_path=None, - logger=None, - contenthash_type="sha256", - compression_type="xz", - force_database=False, - ignore_missing=False): - - # Initialization - - self.ignore_missing = ignore_missing - - LoggingInterface.__init__(self, logger) - - self.out_path = out_path or "./" - - self.final_path = os.path.join(self.out_path, "repodata") - - self.new_repo_path = new_repo_path - self.new_repodata_path = os.path.join(self.new_repo_path, "repodata/") - self.new_repomd_path = os.path.join(self.new_repodata_path, "repomd.xml") - - self.old_repo_path = old_repo_path - self.old_repodata_path = os.path.join(self.old_repo_path, "repodata/") - self.old_repomd_path = os.path.join(self.old_repodata_path, "repomd.xml") - - self.delta_repo_path = out_path - self.delta_repodata_path = os.path.join(self.delta_repo_path, ".repodata/") - self.delta_repomd_path = os.path.join(self.delta_repodata_path, "repomd.xml") - - # contenthash type - self.contenthash_type_str = contenthash_type or "sha256" - self.compression_type_str = compression_type or "xz" - self.compression_type = cr.compression_type(self.compression_type_str) - - # Prepare Repomd objects - self.old_repomd = cr.Repomd(self.old_repomd_path) - self.new_repomd = cr.Repomd(self.new_repomd_path) - self.delta_repomd = cr.Repomd() - - # Use revision and tags - self.delta_repomd.set_revision(self.new_repomd.revision) - for tag in self.new_repomd.distro_tags: - self.delta_repomd.add_distro_tag(tag[1], tag[0]) - for tag in self.new_repomd.repo_tags: - self.delta_repomd.add_repo_tag(tag) - for tag in self.new_repomd.content_tags: - self.delta_repomd.add_content_tag(tag) - - # Load records - self.old_records = {} - self.new_records = {} - for record in self.old_repomd.records: - self.old_records[record.type] = record - for record in self.new_repomd.records: - self.new_records[record.type] = record - - old_record_types = set(self.old_records.keys()) - new_record_types = set(self.new_records.keys()) - - self.deleted_repomd_record_types = old_record_types - new_record_types - self.added_repomd_record_types = new_record_types - old_record_types - - # Important sanity checks (repo without primary is definitely bad) - if not "primary" in self.old_records: - raise DeltaRepoError("Missing \"primary\" metadata in old repo") - - if not "primary" in self.new_records: - raise DeltaRepoError("Missing \"primary\" metadata in new repo") - - # Detect type of checksum in the new repomd.xml (global) - self.checksum_type = cr.checksum_type(self.new_records["primary"].checksum_type) - if self.checksum_type == cr.UNKNOWN_CHECKSUM: - raise DeltaRepoError("Unknown checksum type used in new repo: %s" % \ - self.new_records["primary"].checksum_type) - - # TODO: Je treba detekovat typ checksumu, kdyz se stejne pro kazdej - # record nakonec detekuje znova??? - - # Detection if use unique md filenames - if self.new_records["primary"].location_href.split("primary")[0] != "": - self.unique_md_filenames = True - - self.old_contenthash = self.old_repomd.contenthash - self.new_contenthash = self.new_repomd.contenthash - - self.deltametadata = DeltaMetadata() - - # Prepare global bundle - self.globalbundle = GlobalBundle() - self.globalbundle.contenthash_type_str = self.contenthash_type_str - self.globalbundle.unique_md_filenames = self.unique_md_filenames - self.globalbundle.force_database = force_database - self.globalbundle.ignore_missing = ignore_missing - - def fill_deltametadata(self): - if not self.deltametadata: - return - - # Set revisions - self.deltametadata.revision_src = self.old_repomd.revision - self.deltametadata.revision_dst = self.new_repomd.revision - - # Set contenthashes - self.deltametadata.contenthash_type = \ - self.globalbundle.contenthash_type_str - self.deltametadata.contenthash_src = \ - self.globalbundle.calculated_old_contenthash - self.deltametadata.contenthash_dst = \ - self.globalbundle.calculated_new_contenthash - - # Set timestamps - timestamp_src = 0 - timestamp_dst = 0 - for rec in self.old_repomd.records: - timestamp_src = max(rec.timestamp, timestamp_src) - for rec in self.new_repomd.records: - timestamp_dst = max(rec.timestamp, timestamp_dst) - self.deltametadata.timestamp_src = timestamp_src - self.deltametadata.timestamp_dst = timestamp_dst - - def _new_metadata(self, metadata_type): - """Return Metadata Object for the metadata_type""" - - metadata = Metadata(metadata_type) - - metadata.checksum_type = DEFAULT_CHECKSUM_TYPE - metadata.compression_type = DEFAULT_COMPRESSION_TYPE - - # Output directory - metadata.out_dir = self.delta_repodata_path - - # Properties related to the first (old) repository - old_rec = self.old_records.get(metadata_type) - metadata.old_rec = old_rec - if old_rec: - metadata.old_fn = os.path.join(self.old_repo_path, old_rec.location_href) - if os.path.isfile(metadata.old_fn): - metadata.old_fn_exists = True - else: - msg = "File {0} doesn't exist in the old " \ - "repository!".format(metadata.old_fn) - self._warning(msg) - if not self.ignore_missing: - raise DeltaRepoError(msg + " Use --ignore-missing option " - "to ignore this error") - - # Properties related to the second (new) repository - new_rec = self.new_records.get(metadata_type) - metadata.new_rec = new_rec - if new_rec: - metadata.new_fn = os.path.join(self.new_repo_path, new_rec.location_href) - if os.path.isfile(metadata.new_fn): - metadata.new_fn_exists = True - - # Determine compression type - detected_compression_type = cr.detect_compression(metadata.new_fn) - if (detected_compression_type != cr.UNKNOWN_COMPRESSION): - metadata.compression_type = detected_compression_type - else: - self._warning("Cannot detect compression type for " - "{0}".format(metadata.new_fn)) - else: - msg = ("The file {0} doesn't exist in the new" - "repository!".format(metadata.new_fn)) - self._warning(msg) - if not self.ignore_missing: - raise DeltaRepoError(msg + " Use --ignore-missing option " - "to ignore this error") - - metadata.checksum_type = cr.checksum_type(new_rec.checksum_type) - - return metadata - - def check_content_hashes(self): - self._debug("Checking expected content hashes") - - c_old_contenthash = self.globalbundle.calculated_old_contenthash - c_new_contenthash = self.globalbundle.calculated_new_contenthash - - if not c_old_contenthash or not c_new_contenthash: - - pri_md = self._new_metadata("primary") - - if not c_old_contenthash: - if not pri_md.old_fn_exists: - raise DeltaRepoError("Old repository doesn't have " - "a primary metadata!") - c_old_contenthash = calculate_content_hash(pri_md.old_fn, - self.contenthash_type_str, - self._get_logger()) - if not c_new_contenthash: - if not pri_md.new_fn_exists: - raise DeltaRepoError("New repository doesn't have " - "a primary metadata!") - c_new_contenthash = calculate_content_hash(pri_md.new_fn, - self.contenthash_type_str, - self._get_logger()) - - self.globalbundle.calculated_old_contenthash = c_old_contenthash - self.globalbundle.calculated_new_contenthash = c_new_contenthash - - self._debug("Calculated content hash of the old repo: {0}".format( - c_old_contenthash)) - self._debug("Calculated content hash of the new repo: {0}".format( - c_new_contenthash)) - - if self.old_contenthash: - if self.old_contenthash != c_old_contenthash: - message = "Content hash of the \"{0}\" repository doesn't match "\ - "the real one ({1} != {2}).".format( - self.old_repo_path, self.old_contenthash, - self.globalbundle.calculated_old_contenthash) - self._error(message) - raise DeltaRepoError(message) - else: - self._debug("Content hash of the old repo matches ({0})".format( - self.old_contenthash)) - else: - self._debug("Content hash of the \"{0}\" is not part of its "\ - "repomd".format(self.old_repo_path)) - - if self.new_contenthash: - if self.new_contenthash != c_new_contenthash: - message = "Content hash of the \"{0}\" repository doesn't match "\ - "the real one ({1} != {2}).".format( - self.new_repo_path, self.new_contenthash, - self.globalbundle.calculated_new_contenthash) - self._error(message) - raise DeltaRepoError(message) - else: - self._debug("Content hash of the new repo matches ({0})".format( - self.new_contenthash)) - else: - self._debug("Content hash of the \"{0}\" is not part of its "\ - "repomd".format(self.new_repo_path)) - - def gen(self): - - # Prepare output path - os.mkdir(self.delta_repodata_path) - - # Set of types of processed metadata records ("primary", "primary_db"...) - processed_metadata = set() - - for plugin in PLUGINS: - - # Prepare metadata for the plugin - metadata_objects = {} - for metadata_name in plugin.METADATA: - metadata_object = self._new_metadata(metadata_name) - if metadata_object is not None: - metadata_objects[metadata_name] = metadata_object - - # Skip plugin if no supported metadata available - if not metadata_objects: - self._debug("Plugin {0}: Skipped - None of supported " \ - "metadata {1} available".format( - plugin.NAME, plugin.METADATA)) - continue - - # Prepare plugin bundle - pluginbundle = PluginBundle(plugin.NAME, plugin.VERSION) - self.deltametadata.add_pluginbundle(pluginbundle) - - # Use the plugin - self._debug("Plugin {0}: Active".format(plugin.NAME)) - plugin_instance = plugin(pluginbundle, self.globalbundle, - logger=self._get_logger()) - repomd_records = plugin_instance.gen(metadata_objects) - - # Put repomd records from processed metadatas to repomd - self._debug("Plugin {0}: Processed {1} record(s) " \ - "and produced:".format(plugin.NAME, metadata_objects.keys())) - for rec in repomd_records: - self._debug(" - {0}".format(rec.type)) - self.delta_repomd.set_record(rec) - - # Organization stuff - for md in metadata_objects.keys(): - processed_metadata.add(md) - - # Process rest of the metadata files - metadata_objects = {} - for rectype, rec in self.new_records.items(): - if rectype not in processed_metadata: - metadata_object = self._new_metadata(rectype) - if metadata_object is not None: - self._debug("To be processed by general delta plugin: " \ - "{0}".format(rectype)) - metadata_objects[rectype] = metadata_object - else: - self._debug("Not processed - even by general delta " \ - "plugin: {0}".format(rectype)) - - if metadata_objects: - # Use the plugin - pluginbundle = PluginBundle(GENERAL_PLUGIN.NAME, GENERAL_PLUGIN.VERSION) - self.deltametadata.add_pluginbundle(pluginbundle) - self._debug("Plugin {0}: Active".format(GENERAL_PLUGIN.NAME)) - plugin_instance = GENERAL_PLUGIN(pluginbundle, self.globalbundle, - logger=self._get_logger()) - repomd_records = plugin_instance.gen(metadata_objects) - - # Put repomd records from processed metadatas to repomd - self._debug("Plugin {0}: Processed {1} record(s) " \ - "and produced:".format(GENERAL_PLUGIN.NAME, metadata_objects.keys())) - for rec in repomd_records: - self._debug(" - {0}".format(rec.type)) - self.delta_repomd.set_record(rec) - - # Write out deltametadata.xml - self.fill_deltametadata() - deltametadata_xml = self.deltametadata.xmldump() - deltametadata_path = os.path.join(self.delta_repodata_path, "deltametadata.xml") - - if (self.compression_type != cr.UNKNOWN_COMPRESSION): - deltametadata_path += cr.compression_suffix(self.compression_type) - stat = cr.ContentStat(self.checksum_type) - f = cr.CrFile(deltametadata_path, cr.MODE_WRITE, - self.compression_type, stat) - f.write(deltametadata_xml) - f.close() - else: - open(deltametadata_path, "w").write(deltametadata_xml) - - deltametadata_rec = cr.RepomdRecord("deltametadata", deltametadata_path) - deltametadata_rec.load_contentstat(stat) - deltametadata_rec.fill(self.checksum_type) - if self.unique_md_filenames: - deltametadata_rec.rename_file() - self.delta_repomd.set_record(deltametadata_rec) - - # Check if calculated contenthashes match - self.check_content_hashes() - - # Prepare and write out the new repomd.xml - self._debug("Preparing repomd.xml ...") - deltacontenthash = "{0}-{1}".format(self.globalbundle.calculated_old_contenthash, - self.globalbundle.calculated_new_contenthash) - self.delta_repomd.set_contenthash(deltacontenthash, self.contenthash_type_str) - self.delta_repomd.sort_records() - delta_repomd_xml = self.delta_repomd.xml_dump() - - self._debug("Writing repomd.xml ...") - open(self.delta_repomd_path, "w").write(delta_repomd_xml) - - # Final move - if os.path.exists(self.final_path): - self._warning("Destination dir already exists! Removing %s" % \ - self.final_path) - shutil.rmtree(self.final_path) - self._debug("Moving %s -> %s" % (self.delta_repodata_path, self.final_path)) - os.rename(self.delta_repodata_path, self.final_path) - diff --git a/deltarepo/deltarepo/plugins.py b/deltarepo/deltarepo/plugins.py deleted file mode 100644 index 48c4cf1..0000000 --- a/deltarepo/deltarepo/plugins.py +++ /dev/null @@ -1,1091 +0,0 @@ -import os -import os.path -import shutil -import hashlib -import filecmp -import createrepo_c as cr -from .plugins_common import GlobalBundle, Metadata -from .common import LoggingInterface, DEFAULT_CHECKSUM_NAME -from .errors import DeltaRepoPluginError - -# List of available plugins -PLUGINS = [] - -# Mapping - which metadata from deltarepo must be downloaded -# to get desired metadata. -METADATA_MAPPING = {} # { "wanted_metadata_type": ["required_metadata_from_deltarepo", ...] } - -# Plugin to gen/apply deltas over any metadata -# (over data that are not supported by other plugins) -GENERAL_PLUGIN = None - -# Files with this suffixes will be considered as already compressed -# The list is subset of: -# http://en.wikipedia.org/wiki/List_of_archive_formats -# Feel free to extend this list -COMPRESSION_SUFFIXES = [".bz2", ".gz", ".lz", ".lzma", ".lzo", ".xz", - ".7z", ".s7z", ".apk", ".rar", ".sfx", ".tgz", - ".tbz2", ".tlz", ".zip", ".zipx", ".zz"] - -class DeltaRepoPlugin(LoggingInterface): - - # Plugin name - NAME = "" - - # Plugin version (integer number!) - VERSION = 1 - - # List of Metadata this plugin takes care of. - # The plugin HAS TO do deltas for each of listed metadata and be able - # to apply deltas on them! - METADATA = [] - - # Says which delta metadata are needed to get required metadata - # e.g. { "primary": ["primary"], "filelists": ["primary", "filelists"] } - METADATA_MAPPING = {} - - def __init__(self, pluginbundle, globalbundle, logger=None): - - LoggingInterface.__init__(self, logger) - - # PluginBundle object. - # This object store data in persistent way to the generated delta repodata. - # This object is empty when gen() plugin method is called and plugin - # should use it to store necessary information. - # During apply() this object should be filled with data - # previously stored during gen() method - self.pluginbundle = pluginbundle - - # Global bundle carry - self.globalbundle = globalbundle - - # Internal stuff - self.__metadata_notes_cache = None - - def _log(self, level, msg): - new_msg = "{0}: {1}".format(self.NAME, msg) - LoggingInterface._log(self, level, new_msg) - - def _metadata_notes_from_plugin_bundle(self, type): - """From the pluginbundle extract info about specific metadata element""" - - if self.__metadata_notes_cache is None: - self.__metadata_notes_cache = {} - for dict in self.pluginbundle.get_list("metadata", []): - if "type" not in dict: - self._warning("Metadata element in deltametadata.xml hasn't " - "an attribute 'type'") - continue - self.__metadata_notes_cache[dict["type"]] = dict - - return self.__metadata_notes_cache.get(type) - - def _metadata_notes_to_plugin_bundle(self, type, dictionary): - """Store info about metadata persistently to pluginbundle""" - notes = {"type": type} - notes.update(dictionary) - self.pluginbundle.append("metadata", notes) - - def gen_use_original(self, md, compression_type=cr.NO_COMPRESSION): - """Function that takes original metadata file and - copy it to the delta repo unmodified. - Plugins could use this function when they cannot generate delta file - for some reason (eg. file is newly added, so delta is - meaningless/impossible).""" - - md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn)) - - # Compress or copy original file - stat = None - if (compression_type != cr.NO_COMPRESSION): - md.delta_fn += cr.compression_suffix(compression_type) - stat = cr.ContentStat(md.checksum_type) - cr.compress_file(md.new_fn, md.delta_fn, compression_type, stat) - else: - shutil.copy2(md.new_fn, md.delta_fn) - - # Prepare repomd record of xml file - rec = cr.RepomdRecord(md.metadata_type, md.delta_fn) - if stat is not None: - rec.load_contentstat(stat) - rec.fill(md.checksum_type) - if self.globalbundle.unique_md_filenames: - rec.rename_file() - md.delta_fn = rec.location_real - - return rec - - def apply_use_original(self, md, decompress=False): - """Reversal function for the gen_use_original""" - md.new_fn = os.path.join(md.out_dir, os.path.basename(md.delta_fn)) - - if decompress: - md.new_fn = md.new_fn.rsplit('.', 1)[0] - cr.decompress_file(md.delta_fn, md.new_fn, cr.AUTO_DETECT_COMPRESSION) - else: - shutil.copy2(md.delta_fn, md.new_fn) - - # Prepare repomd record of xml file - rec = cr.RepomdRecord(md.metadata_type, md.new_fn) - rec.fill(md.checksum_type) - if self.globalbundle.unique_md_filenames: - rec.rename_file() - md.new_fn = rec.location_real - - return rec - - def _gen_basic_delta(self, md, force_gen=False): - """Resolve some common situation during delta generation. - - There is some situation which could appear during - delta generation: - - # - Metadata file has a record in repomd.xml and the file really exists - O - Metadata file has a record in repomd.xml but the file is missing - X - Metadata file doesn't have a record in repomd.xml - - Old repository | New repository - ---------------|--------------- - # | # - Valid case - # | X - Valid case - metadata was removed - # | O - Invalid case - incomplete repo - X | # - Valid case - metadata was added - X | X - This shouldn't happen - X | O - Invalid case - incomplete repo - O | # - Invalid case - incomplete repo - O | X - Invalid case - incomplete repo - O | O - Invalid case - both repos are incomplete - - By default, Deltarepo should raise an exception when a invalid - case is meet. But user could use --ignore-missing option and - in that case, the Deltarepo should handle all invalid case - like a charm. - - For example: - - O | # - Just copy the new metadata to the delta repo as is - O | X - Just ignore that the old metadata is missing - O | O - Just ignore this - # | O - Just ignore this - X | O - Just ignore this - - Most delta plugins should be only interested to "# | #" use case. - The case where we have the both, old and new, metadata available. - Other cases are mostly not important to the delta plugins. - This is the reason why this function exits. It should solve the - cases when the sophisticated delta is not possible. - - Returns (SC - Status code, - REC - Repomd record, - NOTES - Dict with persistent notes) - - If RC is True, then delta plugin shouldn't continue with - processing of this metadata. - """ - - if not md: - # No metadata - Nothing to do - return (True, None, None) - - md.delta_rec = None - md.delta_fn_exists = False - - if not md.old_rec and not md.new_rec: - # None metadata record exists. - self._debug("\"{0}\": Doesn't exist " - "in any repo".format(md.metadata_type)) - return (True, None, None) - - if not md.new_rec: - # This record doesn't exists in the new version of repodata - # This metadata were removed in the new version of repo - self._debug("\"{0}\": Removed in the new version of repodata" - "".format(md.metadata_type)) - return (True, None, None) - - if not md.new_fn_exists: - # The new metadata file is missing - assert self.globalbundle.ignore_missing - self._warning("\"{0}\": Delta cannot be generated - new metadata " - "are missing".format(md.metadata_type)) - return (True, None, None) - - if not md.old_rec or not md.old_fn_exists or \ - (force_gen and not filecmp.cmp(md.old_fn, md.new_fn)): - # This metadata was newly added in the new version of repodata - # Or we are just missing the old version of this metadata - # Or we have both versions of metadata but the metadata are not - # same and in that case we simply want to gen a delta as a copy - if md.old_fn_exists: - self._debug("\"{0}\": Newly added in the new version of repodata") - elif not md.old_fn_exists: - self._warning("\"{0}\": Delta cannot be generated - old metadata " - "are missing - Using copy of the new one" - "".format(md.metadata_type)) - else: - self._debug("\"{0}\": Delta is just a copy of the new metadata") - - # Suffix based detection of compression - compressed = False - for suffix in COMPRESSION_SUFFIXES: - if md.new_fn.endswith(suffix): - compressed = True - break - - compression = cr.NO_COMPRESSION - if not compressed: - compression = cr.XZ - - # Gen record - rec = self.gen_use_original(md, compression_type=compression) - - notes = {} - notes["original"] = '1' - if compression != cr.NO_COMPRESSION: - notes["compressed"] = "1" - - md.delta_rec = rec - md.delta_fn_exists = True - - return (True, rec, notes) - - # At this point we are sure that we have both metadata files - - if filecmp.cmp(md.old_fn, md.new_fn): - # Both metadata files exists and are the same - self._debug("\"{0}\": Same in both version of repodata" - "".format(md.metadata_type)) - notes = {} - if os.path.basename(md.old_fn) != os.path.basename(md.new_fn): - notes["new_name"] = os.path.basename(md.new_fn) - - notes["unchanged"] = "1" - notes["checksum_name"] = cr.checksum_name_str(md.checksum_type) - return (True, None, notes) - - # Both metadata files exists and are different, - # this is job for a real delta plugin :) - return (False, None, None) - - def _apply_basic_delta(self, md, notes): - """ - - """ - - if not md: - # No metadata - Nothing to do - return (True, None) - - # Init some stuff in md - # This variables should be set only if new record was generated - # Otherwise it should by None/False - md.new_rec = None - md.new_fn_exists = False - - if not notes: - # No notes - Nothing to do - return (True, None) - - if not md.old_rec and not md.delta_rec: - # None metadata record exists. - self._debug("\"{0}\": Doesn't exist " - "in any repo".format(md.metadata_type)) - return (True, None) - - if not md.delta_rec: - # This record is missing in delta repo - if notes.get("unchanged") != "1": - # This metadata were removed in the new version of repo - self._debug("\"{0}\": Removed in the new version of repodata" - "".format(md.metadata_type)) - return (True, None) - - # Copy from the old repo should be used - if not md.old_fn_exists: - # This is missing in the old repo - self._warning("\"{0}\": From old repo should be used, but " - "it is missing".format(md.metadata_type)) - return (True, None) - - # Use copy from the old repo - - # Check if old file should have a new name - basename = notes.get("new_name") - if not basename: - basename = os.path.basename(md.old_fn) - - md.new_fn = os.path.join(md.out_dir, basename) - - checksum_name = notes.get("checksum_name", DEFAULT_CHECKSUM_NAME) - checksum_type = cr.checksum_type(checksum_name) - - # Copy the file and create repomd record - shutil.copy2(md.old_fn, md.new_fn) - rec = cr.RepomdRecord(md.metadata_type, md.new_fn) - rec.fill(checksum_type) - if self.globalbundle.unique_md_filenames: - rec.rename_file() - md.new_fn = rec.location_real - - md.new_rec = rec - md.new_fn_exists = True - - return (True, rec) - - if not md.delta_fn_exists: - # Delta is missing - self._warning("\"{0}\": Delta file is missing" - "".format(md.metadata_type)) - return (True, None) - - # At this point we are sure, we have a delta file - - if notes.get("original") == "1": - # Delta file is the target file - - # Check if file should be uncompressed - decompress = False - if notes.get("compressed") == "1": - decompress = True - - rec = self.apply_use_original(md, decompress) - self._debug("\"{0}\": Used delta is just a copy") - - md.new_rec = rec - md.new_fn_exists = True - - return (True, rec) - - if not md.old_fn_exists: - # Old file is missing - self._warning("\"{0}\": Old file is missing" - "".format(md.metadata_type)) - return (True, None) - - # Delta file exists and it is not a copy nor metadata - # file from old repo should be used. - # this is job for a real delta plugin :) - return (False, None) - - def apply(self, metadata): - raise NotImplementedError("Not implemented") - - def gen(self, metadata): - raise NotImplementedError("Not implemented") - - -class GeneralDeltaRepoPlugin(DeltaRepoPlugin): - - NAME = "GeneralDeltaPlugin" - VERSION = 1 - METADATA = [] - METADATA_MAPPING = {} - - def gen(self, metadata): - - gen_repomd_recs = [] - - for md in metadata.values(): - rc, rec, notes = self._gen_basic_delta(md, force_gen=True) - assert rc - if rec: - gen_repomd_recs.append(rec) - if notes: - self._metadata_notes_to_plugin_bundle(md.metadata_type, notes) - - return gen_repomd_recs - - def apply(self, metadata): - - gen_repomd_recs = [] - - for md in metadata.values(): - notes = self._metadata_notes_from_plugin_bundle(md.metadata_type) - rc, rec = self._apply_basic_delta(md, notes) - assert rc - if rec: - gen_repomd_recs.append(rec) - - return gen_repomd_recs - -GENERAL_PLUGIN = GeneralDeltaRepoPlugin - - -class MainDeltaRepoPlugin(DeltaRepoPlugin): - - NAME = "MainDeltaPlugin" - VERSION = 1 - METADATA = ["primary", "filelists", "other", - "primary_db", "filelists_db", "other_db"] - METADATA_MAPPING = { - "primary": ["primary"], - "filelists": ["primary", "filelists"], - "other": ["primary", "other"], - "primary_db": ["primary"], - "filelists_db": ["primary", "filelists"], - "other_db": ["primary", "other"], - } - - def _pkg_id_tuple(self, pkg): - """Return tuple identifying a package in repodata. - (pkgId, location_href, location_base)""" - return (pkg.pkgId, pkg.location_href, pkg.location_base) - - def _pkg_id_str(self, pkg): - """Return string identifying a package in repodata. - This strings are used for the content hash calculation.""" - if not pkg.pkgId: - self._warning("Missing pkgId in a package!") - if not pkg.location_href: - self._warning("Missing location_href at package %s %s" % \ - (pkg.name, pkg.pkgId)) - - idstr = "%s%s%s" % (pkg.pkgId or '', - pkg.location_href or '', - pkg.location_base or '') - return idstr - - def _gen_db_from_xml(self, md): - """Gen sqlite db from the delta metadata. - """ - mdtype = md.metadata_type - - if mdtype == "primary": - dbclass = cr.PrimarySqlite - parsefunc = cr.xml_parse_primary - elif mdtype == "filelists": - dbclass = cr.FilelistsSqlite - parsefunc = cr.xml_parse_filelists - elif mdtype == "other": - dbclass = cr.OtherSqlite - parsefunc = cr.xml_parse_other - else: - raise DeltaRepoPluginError("Unsupported type of metadata {0}".format(mdtype)) - - src_fn = md.new_fn - src_rec = md.new_rec - - md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(mdtype)) - db = dbclass(md.db_fn) - - def pkgcb(pkg): - db.add_pkg(pkg) - - parsefunc(src_fn, pkgcb=pkgcb) - - db.dbinfo_update(src_rec.checksum) - db.close() - - db_stat = cr.ContentStat(md.checksum_type) - db_compressed = md.db_fn+".bz2" - cr.compress_file(md.db_fn, None, cr.BZ2, db_stat) - os.remove(md.db_fn) - - # Prepare repomd record of database file - db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type), - db_compressed) - db_rec.load_contentstat(db_stat) - db_rec.fill(md.checksum_type) - if self.globalbundle.unique_md_filenames: - db_rec.rename_file() - - return db_rec - - def apply(self, metadata): - # Check input arguments - if "primary" not in metadata: - self._error("primary.xml metadata file is missing") - raise DeltaRepoPluginError("Primary metadata missing") - - gen_repomd_recs = [] - - removed_packages = {} - - pri_md = metadata.get("primary") - fil_md = metadata.get("filelists") - oth_md = metadata.get("other") - - def try_simple_delta(md, dbclass): - if not md: - return - - notes = self._metadata_notes_from_plugin_bundle(md.metadata_type) - if not notes: - self._warning("Metadata \"{0}\" doesn't have a record in " - "deltametadata.xml - Ignoring") - return True - rc, rec = self._apply_basic_delta(md, notes) - if not rc: - return False - if rec: - gen_repomd_recs.append(rec) - - if not md.new_fn_exists: - return True - - # Gen DB here - if self.globalbundle.force_database or notes.get("database") == "1": - rec = self._gen_db_from_xml(md) - gen_repomd_recs.append(rec) - - return True - - # At first try to simple delta - - simple_pri_delta = try_simple_delta(pri_md, cr.PrimarySqlite) - simple_fil_delta = try_simple_delta(fil_md, cr.FilelistsSqlite) - simple_oth_delta = try_simple_delta(oth_md, cr.OtherSqlite) - - if simple_pri_delta: - assert simple_fil_delta - assert simple_oth_delta - return gen_repomd_recs - - # Ignore already processed metadata - if simple_fil_delta: - fil_md = None - if simple_oth_delta: - oth_md = None - - # Make a dict of removed packages key is location_href, - # value is location_base - for record in self.pluginbundle.get_list("removedpackage", []): - location_href = record.get("location_href") - if not location_href: - continue - location_base = record.get("location_base") - removed_packages[location_href] = location_base - - # Prepare output xml files and check if dbs should be generated - # Note: This information are stored directly to the Metadata - # object which someone could see as little hacky. - def prepare_paths_in_metadata(md, xmlclass, dbclass): - if md is None: - return - - notes = self._metadata_notes_from_plugin_bundle(md.metadata_type) - if not notes: - # TODO: Add flag to ignore this kind of warnings (?) - self._warning("Metadata \"{0}\" doesn't have a record in " - "deltametadata.xml - Ignoring") - return - - suffix = cr.compression_suffix(md.compression_type) or "" - md.new_fn = os.path.join(md.out_dir, - "{0}.xml{1}".format( - md.metadata_type, suffix)) - md.new_f_stat = cr.ContentStat(md.checksum_type) - md.new_f = xmlclass(md.new_fn, - md.compression_type, - md.new_f_stat) - - if self.globalbundle.force_database or notes.get("database") == "1": - md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format( - md.metadata_type)) - md.db = dbclass(md.db_fn) - else: - md.db_fn = None - md.db = None - - # Primary - prepare_paths_in_metadata(pri_md, - cr.PrimaryXmlFile, - cr.PrimarySqlite) - - # Filelists - prepare_paths_in_metadata(fil_md, - cr.FilelistsXmlFile, - cr.FilelistsSqlite) - - # Other - prepare_paths_in_metadata(oth_md, - cr.OtherXmlFile, - cr.OtherSqlite) - - # Apply delta - all_packages = {} # dict { 'pkgId': pkg } - - old_contenthash_strings = [] - new_contenthash_strings = [] - - def old_pkgcb(pkg): - old_contenthash_strings.append(self._pkg_id_str(pkg)) - if pkg.location_href in removed_packages: - if removed_packages[pkg.location_href] == pkg.location_base: - # This package won't be in new metadata - return - new_contenthash_strings.append(self._pkg_id_str(pkg)) - all_packages[pkg.pkgId] = pkg - - def delta_pkgcb(pkg): - new_contenthash_strings.append(self._pkg_id_str(pkg)) - all_packages[pkg.pkgId] = pkg - - filelists_from_primary = True - if fil_md: - filelists_from_primary = False - - # Parse both old and delta primary.xml files - cr.xml_parse_primary(pri_md.old_fn, pkgcb=old_pkgcb, - do_files=filelists_from_primary) - cr.xml_parse_primary(pri_md.delta_fn, pkgcb=delta_pkgcb, - do_files=filelists_from_primary) - - # Calculate content hashes - h = hashlib.new(self.globalbundle.contenthash_type_str) - old_contenthash_strings.sort() - for i in old_contenthash_strings: - h.update(i) - self.globalbundle.calculated_old_contenthash = h.hexdigest() - - h = hashlib.new(self.globalbundle.contenthash_type_str) - new_contenthash_strings.sort() - for i in new_contenthash_strings: - h.update(i) - self.globalbundle.calculated_new_contenthash = h.hexdigest() - - # Sort packages - def cmp_pkgs(x, y): - # Compare only by filename - ret = cmp(os.path.basename(x.location_href), - os.path.basename(y.location_href)) - if ret != 0: - return ret - - # Compare by full location_href path - return cmp(x.location_href, y.location_href) - - all_packages_sorted = sorted(all_packages.values(), cmp=cmp_pkgs) - - def newpkgcb(pkgId, name, arch): - return all_packages.get(pkgId, None) - - # Parse filelists - if fil_md: - self._debug("Parsing filelists xmls") - cr.xml_parse_filelists(fil_md.old_fn, newpkgcb=newpkgcb) - cr.xml_parse_filelists(fil_md.delta_fn, newpkgcb=newpkgcb) - - if oth_md: - self._debug("Parsing other xmls") - cr.xml_parse_other(oth_md.old_fn, newpkgcb=newpkgcb) - cr.xml_parse_other(oth_md.delta_fn, newpkgcb=newpkgcb) - - num_of_packages = len(all_packages_sorted) - - # Write out primary - self._debug("Writing primary xml: {0}".format(pri_md.new_fn)) - pri_md.new_f.set_num_of_pkgs(num_of_packages) - for pkg in all_packages_sorted: - pri_md.new_f.add_pkg(pkg) - if pri_md.db: - pri_md.db.add_pkg(pkg) - - # Write out filelists - if fil_md: - self._debug("Writing filelists xml: {0}".format(fil_md.new_fn)) - fil_md.new_f.set_num_of_pkgs(num_of_packages) - for pkg in all_packages_sorted: - fil_md.new_f.add_pkg(pkg) - if fil_md.db: - fil_md.db.add_pkg(pkg) - - # Write out other - if oth_md: - self._debug("Writing other xml: {0}".format(oth_md.new_fn)) - oth_md.new_f.set_num_of_pkgs(num_of_packages) - for pkg in all_packages_sorted: - oth_md.new_f.add_pkg(pkg) - if oth_md.db: - oth_md.db.add_pkg(pkg) - - # Finish metadata - def finish_metadata(md): - if md is None: - return - - # Close XML file - md.new_f.close() - - # Prepare repomd record of xml file - rec = cr.RepomdRecord(md.metadata_type, md.new_fn) - rec.load_contentstat(md.new_f_stat) - rec.fill(md.checksum_type) - if self.globalbundle.unique_md_filenames: - rec.rename_file() - - md.new_rec = rec - md.new_fn_exists = True - - gen_repomd_recs.append(rec) - - # Prepare database - if hasattr(md, "db") and md.db: - self._debug("Generating database: {0}".format(md.db_fn)) - md.db.dbinfo_update(rec.checksum) - md.db.close() - db_stat = cr.ContentStat(md.checksum_type) - db_compressed = md.db_fn+".bz2" - cr.compress_file(md.db_fn, None, cr.BZ2, db_stat) - os.remove(md.db_fn) - - # Prepare repomd record of database file - db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type), - db_compressed) - db_rec.load_contentstat(db_stat) - db_rec.fill(md.checksum_type) - if self.globalbundle.unique_md_filenames: - db_rec.rename_file() - - gen_repomd_recs.append(db_rec) - - # Add records to the bundle - - finish_metadata(pri_md) - finish_metadata(fil_md) - finish_metadata(oth_md) - - return gen_repomd_recs - - def gen(self, metadata): - # Check input arguments - if "primary" not in metadata: - self._error("primary.xml metadata file is missing") - raise DeltaRepoPluginError("Primary metadata missing") - - gen_repomd_recs = [] - - # Medadata info that will be persistently stored - metadata_notes = {} - - pri_md = metadata.get("primary") - fil_md = metadata.get("filelists") - oth_md = metadata.get("other") - - def try_simple_delta(md, force_gen=False): - """Try to do simple delta. If successful, return True""" - rc, rec, notes = self._gen_basic_delta(md, force_gen=force_gen) - if not rc: - return False - if rec: - gen_repomd_recs.append(rec) - if not notes: - notes = {} - if metadata.get(md.metadata_type+"_db").new_fn_exists: - notes["database"] = "1" - else: - notes["database"] = "0" - self._metadata_notes_to_plugin_bundle(md.metadata_type, notes) - return True - - # At first try to do simple delta for primary - # If successful, force simple delta for filelists and other too - - simple_pri_delta = try_simple_delta(pri_md) - simple_fil_delta = try_simple_delta(fil_md, force_gen=simple_pri_delta) - simple_oth_delta = try_simple_delta(oth_md, force_gen=simple_pri_delta) - - if simple_pri_delta: - # Simple delta for primary means that simple deltas were done - # for all other metadata too - return gen_repomd_recs - - # At this point we know that simple delta for the primary wasn't done - # This mean that at lest for primary, both metadata files (the new one - # and the old one) exists, and we have to do a more sophisticated delta - - # Ignore files for which, the simple delta was successful - if simple_fil_delta: - fil_md = None - if simple_oth_delta: - oth_md = None - - # Prepare output xml files and check if dbs should be generated - # Note: This information are stored directly to the Metadata - # object which someone could see as little hacky. - def prepare_paths_in_metadata(md, xmlclass): - if md is None: - return None - - # Make a note about if the database should be generated - db_available = metadata.get(md.metadata_type+"_db").new_fn_exists - if db_available or self.globalbundle.force_database: - metadata_notes.setdefault(md.metadata_type, {})["database"] = "1" - else: - metadata_notes.setdefault(md.metadata_type, {})["database"] = "0" - - suffix = cr.compression_suffix(md.compression_type) or "" - md.delta_fn = os.path.join(md.out_dir, - "{0}.xml{1}".format( - md.metadata_type, suffix)) - md.delta_f_stat = cr.ContentStat(md.checksum_type) - md.delta_f = xmlclass(md.delta_fn, - md.compression_type, - md.delta_f_stat) - return md - - # Primary - pri_md = prepare_paths_in_metadata(pri_md, cr.PrimaryXmlFile) - - # Filelists - fil_md = prepare_paths_in_metadata(fil_md, cr.FilelistsXmlFile) - - # Other - oth_md = prepare_paths_in_metadata(oth_md, cr.OtherXmlFile) - - # Gen delta - - old_packages = set() - added_packages = {} # dict { 'pkgId': pkg } - added_packages_ids = [] # list of package ids - - old_contenthash_strings = [] - new_contenthash_strings = [] - - def old_pkgcb(pkg): - old_packages.add(self._pkg_id_tuple(pkg)) - old_contenthash_strings.append(self._pkg_id_str(pkg)) - - def new_pkgcb(pkg): - new_contenthash_strings.append(self._pkg_id_str(pkg)) - pkg_id_tuple = self._pkg_id_tuple(pkg) - if not pkg_id_tuple in old_packages: - # This package is only in new repodata - added_packages[pkg.pkgId] = pkg - added_packages_ids.append(pkg.pkgId) - else: - # This package is also in the old repodata - old_packages.remove(pkg_id_tuple) - - filelists_from_primary = True - if fil_md: - # Filelists will be parsed from filelists - filelists_from_primary = False - - cr.xml_parse_primary(pri_md.old_fn, pkgcb=old_pkgcb, do_files=False) - cr.xml_parse_primary(pri_md.new_fn, pkgcb=new_pkgcb, - do_files=filelists_from_primary) - - # Calculate content hashes - h = hashlib.new(self.globalbundle.contenthash_type_str) - old_contenthash_strings.sort() - for i in old_contenthash_strings: - h.update(i) - src_contenthash = h.hexdigest() - self.globalbundle.calculated_old_contenthash = src_contenthash - - h = hashlib.new(self.globalbundle.contenthash_type_str) - new_contenthash_strings.sort() - for i in new_contenthash_strings: - h.update(i) - dst_contenthash = h.hexdigest() - self.globalbundle.calculated_new_contenthash = dst_contenthash - - # Set the content hashes to the plugin bundle - self.pluginbundle.set("contenthash_type", self.globalbundle.contenthash_type_str) - self.pluginbundle.set("src_contenthash", src_contenthash) - self.pluginbundle.set("dst_contenthash", dst_contenthash) - - # Prepare list of removed packages - removed_pkgs = sorted(old_packages) - for _, location_href, location_base in removed_pkgs: - dictionary = {"location_href": location_href} - if location_base: - dictionary["location_base"] = location_base - self.pluginbundle.append("removedpackage", dictionary) - - num_of_packages = len(added_packages) - - # Filelists and Other cb - def newpkgcb(pkgId, name, arch): - return added_packages.get(pkgId, None) - - # Parse filelist.xml and write out its delta - if fil_md: - cr.xml_parse_filelists(fil_md.new_fn, newpkgcb=newpkgcb) - fil_md.delta_f.set_num_of_pkgs(num_of_packages) - for pkgid in added_packages_ids: - fil_md.delta_f.add_pkg(added_packages[pkgid]) - fil_md.delta_f.close() - - # Parse other.xml and write out its delta - if oth_md: - cr.xml_parse_other(oth_md.new_fn, newpkgcb=newpkgcb) - oth_md.delta_f.set_num_of_pkgs(num_of_packages) - for pkgid in added_packages_ids: - oth_md.delta_f.add_pkg(added_packages[pkgid]) - oth_md.delta_f.close() - - # Write out primary delta - # Note: Writing of primary delta has to be after parsing of filelists - # Otherwise cause missing files if filelists_from_primary was False - pri_md.delta_f.set_num_of_pkgs(num_of_packages) - for pkgid in added_packages_ids: - pri_md.delta_f.add_pkg(added_packages[pkgid]) - pri_md.delta_f.close() - - # Finish metadata - def finish_metadata(md): - if md is None: - return - - # Close XML file - md.delta_f.close() - - # Prepare repomd record of xml file - rec = cr.RepomdRecord(md.metadata_type, md.delta_fn) - rec.load_contentstat(md.delta_f_stat) - rec.fill(md.checksum_type) - if self.globalbundle.unique_md_filenames: - rec.rename_file() - - md.delta_rec = rec - md.delta_fn_exists = True - - gen_repomd_recs.append(rec) - - # Prepare database - if hasattr(md, "db") and md.db: - md.db.dbinfo_update(rec.checksum) - md.db.close() - db_stat = cr.ContentStat(md.checksum_type) - db_compressed = md.db_fn+".bz2" - cr.compress_file(md.db_fn, None, cr.BZ2, db_stat) - os.remove(md.db_fn) - - # Prepare repomd record of database file - db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type), - db_compressed) - db_rec.load_contentstat(db_stat) - db_rec.fill(md.checksum_type) - if self.globalbundle.unique_md_filenames: - db_rec.rename_file() - - gen_repomd_recs.append(db_rec) - - # Add records to medata objects - finish_metadata(pri_md) - finish_metadata(fil_md) - finish_metadata(oth_md) - - # Store data persistently - for metadata_type, notes in metadata_notes.items(): - self._metadata_notes_to_plugin_bundle(metadata_type, notes) - - return gen_repomd_recs - -PLUGINS.append(MainDeltaRepoPlugin) - -class GroupsDeltaRepoPlugin(DeltaRepoPlugin): - - NAME = "GroupDeltaRepoPlugin" - VERSION = 1 - METADATA = ["group", "group_gz"] - METADATA_MAPPING = { - "group": ["group"], - "group_gz": ["group", "group_gz"] - } - - def gen(self, metadata): - - gen_repomd_recs = [] - - md_group = metadata.get("group") - md_group_gz = metadata.get("group_gz") - - if md_group and not md_group.new_fn_exists: - md_group = None - - if md_group_gz and not md_group_gz.new_fn_exists: - md_group_gz = None - - if md_group: - rc, rec, notes = self._gen_basic_delta(md_group, force_gen=True) - assert rc - if rec: - gen_repomd_recs.append(rec) - if notes: - if md_group_gz: - notes["gen_group_gz"] = "1" - else: - notes["gen_group_gz"] = "0" - self._metadata_notes_to_plugin_bundle(md_group.metadata_type, - notes) - elif md_group_gz: - rc, rec, notes = self._gen_basic_delta(md_group_gz, force_gen=True) - assert rc - if rec: - gen_repomd_recs.append(rec) - if notes: - self._metadata_notes_to_plugin_bundle(md_group_gz.metadata_type, - notes) - - return gen_repomd_recs - - def apply(self, metadata): - - gen_repomd_recs = [] - - md_group = metadata.get("group") - md_group_gz = metadata.get("group_gz") - - if md_group and (not md_group.delta_fn_exists - and not md_group.old_fn_exists): - md_group = None - - if md_group_gz and (not md_group_gz.delta_fn_exists - and not md_group_gz.old_fn_exists): - md_group_gz = None - - if md_group: - notes = self._metadata_notes_from_plugin_bundle(md_group.metadata_type) - rc, rec = self._apply_basic_delta(md_group, notes) - assert rc - if rec: - gen_repomd_recs.append(rec) - if notes.get("gen_group_gz"): - # Gen group_gz metadata from the group metadata - stat = cr.ContentStat(md_group.checksum_type) - group_gz_fn = md_group.new_fn+".gz" - cr.compress_file(md_group.new_fn, group_gz_fn, cr.GZ, stat) - rec = cr.RepomdRecord("group_gz", group_gz_fn) - rec.load_contentstat(stat) - rec.fill(md_group.checksum_type) - if self.globalbundle.unique_md_filenames: - rec.rename_file() - gen_repomd_recs.append(rec) - elif md_group_gz: - notes = self._metadata_notes_from_plugin_bundle(md_group_gz.metadata_type) - rc, rec = self._apply_basic_delta(md_group_gz, notes) - assert rc - if rec: - gen_repomd_recs.append(rec) - - return gen_repomd_recs - -PLUGINS.append(GroupsDeltaRepoPlugin) - -for plugin in PLUGINS: - METADATA_MAPPING.update(plugin.METADATA_MAPPING) - -def needed_delta_metadata(required_metadata): - """ - @param required_metadata List of required metadatas. - @return None if required_metadata is None - List of needed delta metadata files - in case that required_metadata is list - """ - - if required_metadata is None: - return None - - needed_metadata = set(["deltametadata"]) - needed_metadata.add("primary") # Currently, we always need primary.xml - - for required in required_metadata: - if required in METADATA_MAPPING: - needed_metadata |= set(METADATA_MAPPING[required]) - else: - needed_metadata.add(required) - - return list(needed_metadata) \ No newline at end of file diff --git a/deltarepo/deltarepo/plugins_common.py b/deltarepo/deltarepo/plugins_common.py deleted file mode 100644 index 4d29f01..0000000 --- a/deltarepo/deltarepo/plugins_common.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Classes related for communication between plugins -and applicator/generator. -""" - -class GlobalBundle(object): - - __slots__ = ("contenthash_type_str", - "unique_md_filenames", - "calculated_old_contenthash", - "calculated_new_contenthash", - "force_database", - "ignore_missing") - - def __init__(self): - self.contenthash_type_str = "sha256" - self.unique_md_filenames = True - self.force_database = False - self.ignore_missing = False - - # Filled by plugins - self.calculated_old_contenthash = None - self.calculated_new_contenthash = None - -class Metadata(object): - """Metadata file""" - - def __init__(self, metadata_type): - - self.metadata_type = metadata_type - - # Output directory - self.out_dir = None - - # Repomd records (if available in corresponding repomd.xml) - self.old_rec = None - self.delta_rec = None - self.new_rec = None - - # Paths - # If record is available in corresponding repomd.xml - self.old_fn = None # in old (source) repository - self.delta_fn = None # in delta repository - self.new_fn = None # in new (target) repository - - # Exists the file on the filesystem? - self.old_fn_exists = False - self.delta_fn_exists = False - self.new_fn_exists = False - - # Settings - self.checksum_type = None - self.compression_type = None \ No newline at end of file diff --git a/deltarepo/deltarepo/updater_common.py b/deltarepo/deltarepo/updater_common.py deleted file mode 100644 index b7ee195..0000000 --- a/deltarepo/deltarepo/updater_common.py +++ /dev/null @@ -1,642 +0,0 @@ -import shutil -import os -import pprint -import os.path -import time -import librepo -import tempfile -import createrepo_c as cr -from .applicator import DeltaRepoApplicator -from .deltarepos import DeltaRepos -from .common import LoggingInterface, calculate_contenthash -from .errors import DeltaRepoError - -class _Repo(object): - """Base class for LocalRepo and OriginRepo classes.""" - - def __init__ (self): - self.path = None - self.timestamp = None - self.revision = None - self.contenthash = None # Calculated content hash - self.contenthash_type = None # Type of calculated content hash - self.repomd_contenthash = None # Content hash from repomd - self.repomd_contenthash_type = None # Content hash from repomd - self.listed_metadata = [] # ["primary", "filelists", ...] - self.present_metadata = [] # Metadata files which really exist in repo - self._repomd = None # createrepo_c.Repomd() object - - def _fill_from_repomd_object(self, repomd, check_metadata_presence=False): - timestamp = -1 - listed_metadata = [] - for rec in repomd.records: - if rec.timestamp: - timestamp = max(timestamp, rec.timestamp) - listed_metadata.append(rec.type) - - self.revision = repomd.revision - self.timestamp = timestamp - self.listed_metadata = listed_metadata - self._repomd = repomd - - def _fill_from_path(self, path, contenthash=True, contenthash_type="sha256"): - """Fill the repo attributes from a repository specified by path. - @param path path to repository (a dir that contains - repodata/ subdirectory) - @param contenthash calculate content hash? (primary metadata must - be available in the repo) - @param contenthash_type type of the calculated content hash - """ - - if not os.path.isdir(path) or \ - not os.path.isdir(os.path.join(path, "repodata/")) or \ - not os.path.isfile(os.path.join(path, "repodata/repomd.xml")): - raise DeltaRepoError("Not a repository: {0}".format(path)) - - repomd_path = os.path.join(path, "repodata/repomd.xml") - repomd = cr.Repomd(repomd_path) - - self.repomd_contenthash = repomd.contenthash - self.repomd_contenthash_type = repomd.contenthash_type - - self._fill_from_repomd_object(repomd) - - primary_path = None - for rec in repomd.records: - md_path = os.path.join(path, rec.location_href) - if os.path.isfile(md_path): - self.present_metadata.append(rec.type) - if rec.type == "primary": - primary_path = md_path - - if contenthash: - if not primary_path: - raise DeltaRepoError("{0} - primary metadata are missing" - "".format(primary_path)) - self.contenthash = calculate_contenthash(primary_path, contenthash_type) - self.contenthash_type = contenthash_type - - self.path = path - - def cost(self, whitelisted_metadata=None): - cost = 0 # TODO: Include size of repomd.xml (?) - for rec in self._repomd.records: - if whitelisted_metadata and rec.type not in whitelisted_metadata: - continue - cost += rec.size or 0 - return cost - - -class LocalRepo(_Repo): - def __init__ (self): - _Repo.__init__(self) - - @classmethod - def from_path(cls, path, contenthash_type="sha256", calc_contenthash=True): - """Create a LocalRepo object from a path to the repo.""" - lr = cls() - lr._fill_from_path(path, - contenthash=calc_contenthash, - contenthash_type=contenthash_type) - return lr - -class OriginRepo(_Repo): - # TODO: Keep the downloaded repomd.xml - - def __init__ (self): - _Repo.__init__(self) - - self.urls = [] - self.mirrorlist = None - self.metalink = None - - @classmethod - def from_url(cls, urls=None, mirrorlist=None, metalink=None): - if not urls and not mirrorlist and not metalink: - raise AttributeError("At least one argument must be specified") - - tmpdir = tempfile.mkdtemp(prefix="deltarepo-updater-", dir="/tmp") - - h = librepo.Handle() - h.repotype = librepo.YUMREPO - h.urls = urls - h.mirrorlisturl = mirrorlist - h.metalinkurl = metalink - h.yumdlist = [] - h.destdir = tmpdir - - try: - r = h.perform() - except librepo.LibrepoException as e: - shutil.rmtree(tmpdir) - raise DeltaRepoError("Cannot download ({0}, {1}, {2}): {3}".format( - urls, mirrorlist, metalink, e)) - - repo = cls() - repo._fill_from_path(tmpdir, contenthash=False) - - repo.path = None - repo.urls = urls - repo.mirrorlist = mirrorlist - repo.metalink = metalink - - shutil.rmtree(tmpdir) - return repo - - @classmethod - def from_local_repomd(cls, repomd_path): - """Create OriginRepo object from the local repomd.xml. - @param path path to the repomd.xml""" - repomd = cr.Repomd(repomd_path) - repo = cls() - repo._fill_from_repomd_object(repomd) - return repo - -class DRMirror(object): - def __init__(self): - self.url = None - self.records = [] # list of DeltaReposRecord - self.deltarepos = None # DeltaRepos object - - @classmethod - def from_url(cls, url): - # TODO: support for metalink and mirrorlist - fd, fn = tempfile.mkstemp(prefix="deltarepos.xml.xz-", dir="/tmp") - - # Download deltarepos.xml - deltarepos_xml_url = os.path.join(url, "deltarepos.xml.xz") - try: - librepo.download_url(deltarepos_xml_url, fd) - except librepo.LibrepoException as e: - os.remove(fn) - raise DeltaRepoError("Cannot download {0}: {1}".format( - deltarepos_xml_url, e)) - - # Parse deltarepos.xml - dr = DeltaRepos() - try: - dr.xmlparse(fn) - except DeltaRepoError as e: - raise DeltaRepoError("Error while parsing deltarepos.xml " - "from {0}: {1}".format(deltarepos_xml_url, e)) - finally: - os.remove(fn) - - # Fill and return DRMirror object - drm = cls() - drm.url = url # Url of the mirror - drm.records = dr.records # List of DeltaReposRecords - drm.deltarepos = dr # DeltaRepos object - return drm - -class Link(object): - """Graph's link (path) = a delta repository - from one point of history (version) to another. - """ - - def __init__(self): - self._deltareposrecord = None # DeltaReposRecord() - self._drmirror = None # DRMirror() - - #def __getattr__(self, item): - # if hasattr(self.deltareposrecord, item): - # return getattr(self.deltareposrecord, item, None) - # raise AttributeError("object has no attribute '{0}'".format(item)) - - def __repr__(self): - return "\'{1}\' ({2})>".format( - self.src, self.dst, self.cost()) - - @property - def src(self): - """Source content hash""" - return self._deltareposrecord.contenthash_src - - @property - def dst(self): - """Destination content hash.""" - return self._deltareposrecord.contenthash_dst - - @property - def type(self): - """Type of content hash (e.g., sha256, etc.) """ - return self._deltareposrecord.contenthash_type - - @property - def contenthash_src(self): - """Source content hash""" - return self._deltareposrecord.contenthash_src - - @property - def contenthash_dst(self): - """Destination content hash.""" - return self._deltareposrecord.contenthash_dst - - @property - def contenthash_type(self): - """Type of content hash (e.g., sha256, etc.) """ - return self._deltareposrecord.contenthash_type - @property - def revision_src(self): - """Source repo revision""" - return self._deltareposrecord.revision_src - - @property - def revision_dst(self): - """Destination repo revision""" - return self._deltareposrecord.revision_dst - - @property - def timestamp_src(self): - """Source repo timestamp""" - return self._deltareposrecord.timestamp_src - - @property - def timestamp_dst(self): - """Destination repo timestamp""" - return self._deltareposrecord.timestamp_dst - - @property - def mirrorurl(self): - """Mirror url""" - return self._drmirror.url - - @property - def deltarepourl(self): - """Delta repo url""" - if self._deltareposrecord.location_base: - url = os.path.join(self._deltareposrecord.location_base, - self._deltareposrecord.location_href) - else: - url = os.path.join(self.mirrorurl, - self._deltareposrecord.location_href) - return url - - def cost(self, whitelisted_metadata=None): - """Cost (currently just a total size). - In future maybe just sizes of needed delta metadata.""" - if whitelisted_metadata is None: - return self._deltareposrecord.size_total - - cost = self._deltareposrecord.repomd_size - for md_type in whitelisted_metadata: - md = self._deltareposrecord.get_data(md_type) - if md: - cost += md.get("size", 0) - return cost - - @classmethod - def links_from_drmirror(cls, drmirror): - links = [] - for rec in drmirror.records: - link = cls() - link._deltareposrecord = rec - link._drmirror = drmirror - links.append(link) - return links - -class ResolvedPath(): - """Path resolved by solver""" - def __init__(self, resolved_path): - self._path = resolved_path # List of Link objects - - def __str__(self): - return "".format(self._path) - - def __len__(self): - return len(self._path) - - def __iter__(self): - return self._path.__iter__() - - def __getitem__(self, item): - return self._path.__getitem__(item) - - def path(self): - return self._path - - def cost(self, whitelisted_metadata=None): - cost = 0 - for link in self._path: - cost += link.cost(whitelisted_metadata) - return cost - -class Solver(LoggingInterface): - - class Node(object): - """Single graph node""" - def __init__(self, value): - self.value = value # Content hash - self.links = [] # List of all links that belong to the node - # All of them must have self.value as a src value - self.targets = {} # { Node: Link } - self.sources = set() # set(Nodes) - - def __repr__(self): - targets = [x.value for x in self.targets] - return "".format( - id(self), self.value, targets) - - class Graph(LoggingInterface): - def __init__(self, contenthash_type="sha256", logger=None): - LoggingInterface.__init__(self, logger) - - self.nodes = {} # { 'content_hash': Node } - self.contenthash_type = contenthash_type - - def get_node(self, contenthash): - return self.nodes.get(contenthash) - - def graph_from_links(self, links): - already_processed_links = set() # Set of tuples (src, dst) - nodes = {} # { 'content_hash': Node } - - for link in links: - if self.contenthash_type != link.type.lower(): - self._warning("Content hash type mishmash {0} vs {1}" - "".format(self.contenthash_type, link.type)) - continue - - if (link.src, link.dst) in already_processed_links: - self._warning("Duplicated path {0}->{1} from {2} skipped" - "".format(link.src, link.dst, link.mirrorurl)) - continue - - node = nodes.setdefault(link.src, Solver.Node(link.src)) - - if link.dst in node.targets: - # Should not happen (the already_processed_links - # list should avoid this) - self._warning("Duplicated path {0}->{1} from {2} skipped" - "".format(link.src, link.dst, link.mirrorurl)) - continue - - dst_node = nodes.setdefault(link.dst, Solver.Node(link.dst)) - dst_node.sources.add(node) - node.targets[dst_node] = link - - self.links = links - self.nodes = nodes - - def __init__(self, links, source, target, contenthash_type="sha256", - whitelisted_metadata=None, logger=None): - LoggingInterface.__init__(self, logger) - - self.links = links # Links - self.source_ch = source # Source content hash (str) - self.target_ch = target # Target content hash (str) - self.contenthash_type = contenthash_type - self.whitelisted_metadata = whitelisted_metadata - - def solve(self): - # Build the graph - graph = self.Graph(self.contenthash_type, logger=self.logger) - graph.graph_from_links(self.links) - - if self.source_ch == self.target_ch: - raise DeltaRepos("Source and target content hashes are same {0}" - "".format(self.source_ch)) - - # Find start and end node in the graph - source_node = graph.get_node(self.source_ch) - if not source_node: - raise DeltaRepoError("Source repo ({0}) not available".format(self.source_ch)) - target_node = graph.get_node(self.target_ch) - if not target_node: - raise DeltaRepoError("Target repo ({0}) not available".format(self.target_ch)) - - # Dijkstra's algorithm - # http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm - dist = {} # Distance - previous = {} # Predecessor - Q = [] - - for _, node in graph.nodes.items(): - dist[node] = -1 # -1 Stands for infinity here - previous[node] = None - Q.append(node) - - dist[source_node] = 0 - - while Q: - u = None - val = -1 - # Select node from Q with the smallest distance in dist - for node in Q: - if dist[node] == -1: - continue - if val == -1 or dist[node] < val: - val = dist[node] - u = node - - if u: - # Remove the u from the queue - Q.remove(u) - else: - # All remaining nodes are inaccessible from source - break - - if u == target_node: - # Cool! - break - - # Iterate over the u neighbors - for v, link in u.targets.items(): - alt = dist[u] + link.cost(self.whitelisted_metadata) - if alt < dist[v] or dist[v] == -1: - dist[v] = alt - previous[v] = u - - # At this point we have previous and dist lists filled - self._debug("Solver: List of previous nodes:\n{0}" - "".format(pprint.pformat(previous))) - self._debug("Solver: Distances:\n{0}" - "".format(pprint.pformat(dist))) - - resolved_path = [] - u = target_node - while previous[u] is not None: - resolved_path.append(previous[u].targets[u]) - u = previous[u] - resolved_path.reverse() - self._debug("Resolved path {0}".format(resolved_path)) - - if resolved_path: - return ResolvedPath(resolved_path) - return None - -class UpdateSolver(LoggingInterface): - - def __init__(self, drmirrors, whitelisted_metadata=None, logger=None): - LoggingInterface.__init__(self, logger) - - if not isinstance(drmirrors, list): - raise AttributeError("List of drmirrors expected") - - self.whitelisted_metadata = whitelisted_metadata - - self._drmirrors = drmirrors or [] # [DeltaRepos, ...] - self._links = [] # Link objects from the DeltaRepos objects - self._cached_resolved_path = {} # { (src_ch, dst_ch, ch_type): ResolvedPath } - - self._fill_links() - - def _fill_links(self): - for drmirror in self._drmirrors: - links = Link.links_from_drmirror((drmirror)) - self._links.extend(links) - - def find_repo_contenthash(self, repo, contenthash_type="sha256"): - """Find (guess) Link for the OriginRepo. - Note: Currently, none of origin repos has contenthash in repomd.xml, - so we need to combine multiple metrics (revision, timestamp, ..) - - @param repo OriginRepo - @param links list of Link objects - @return (contenthash_type, contenthash) or None""" - - if repo.contenthash and repo.contenthash_type \ - and repo.contenthash_type == contenthash_type: - return (repo.contenthash_type, repo.contenthash) - - for link in self._links: - matches = 0 - if repo.revision and link.revision_src and repo.timestamp and link.timestamp_src: - if repo.revision == link.revision_src and repo.timestamp == link.timestamp_src: - if link.contenthash_type == contenthash_type: - return (contenthash_type, link.contenthash_src) - if repo.revision and link.revision_dst and repo.timestamp and link.timestamp_dst: - if repo.revision == link.revision_dst and repo.timestamp == link.timestamp_dst: - if link.contenthash_type == contenthash_type: - return (contenthash_type, link.contenthash_dst) - - return (contenthash_type, None) - - def resolve_path(self, source_contenthash, target_contenthash, contenthash_type="sha256"): - # Try cache first - key = (source_contenthash, target_contenthash, contenthash_type) - if key in self._cached_resolved_path: - return self._cached_resolved_path[key] - - # Resolve the path - solver = Solver(self._links, source_contenthash, - target_contenthash, - contenthash_type=contenthash_type, - whitelisted_metadata=self.whitelisted_metadata, - logger=self.logger) - resolved_path = solver.solve() - - # Cache result - self._cached_resolved_path[key] = resolved_path - - return resolved_path - -class Updater(LoggingInterface): - - class DownloadedRepo(object): - def __init__(self, urls=[], mirrorlist=None, metalink=None): - # TODO: Downloading only selected metadatas - self.urls = urls - self.mirrorlist = mirrorlist - self.metalink = metalink - self.destdir = None - self.h = None # Librepo Handle() - self.r = None # Librepo Result() - - def download(self, destdir, wanted_metadata=None): - self.destdir = destdir - - h = librepo.Handle() - h.urls = self.urls - h.mirrorlisturl = self.mirrorlist - h.metalinkurl = self.metalink - h.repotype = librepo.YUMREPO - h.interruptible = True - h.destdir = destdir - h.yumdlist = wanted_metadata - r = librepo.Result() - # TODO: Catch exceptions - h.perform(r) - - self.h = h - self.r = r - - def __init__(self, localrepo, logger=None): - LoggingInterface.__init__(self, logger) - self.localrepo = localrepo - - def _get_tmpdir(self): - tmpdir = tempfile.mkdtemp(prefix="deltarepos-", dir="/tmp") - self._debug("Using temporary directory: {0}".format(tmpdir)) - return tmpdir - - def _final_move(self, src, dst, name="repodata"): - # TODO: Try - except and restore original data on error - # TODO: Skip copy if both src and dst are on the same device - dst_dirname = os.path.dirname(dst) - tmp_dst_basename = ".deltarepo-{0}-{1}-{2}".format(name, time.time(), os.getpid()) - tmp_dst = os.path.join(dst_dirname, tmp_dst_basename) - tmp_dst_backup = tmp_dst+"-backup" - - self._debug("Final move - STARTED") - self._debug("Source: {0}".format(src)) - self._debug("Destination: {0}".format(dst)) - self._debug(" + Copying: {0} -> {1}".format(src, tmp_dst)) - shutil.copytree(src, tmp_dst) - self._debug(" + Moving: {0} -> {1}".format(dst, tmp_dst_backup)) - shutil.move(dst, tmp_dst_backup) - self._debug(" + Moving: {0} -> {1}".format(tmp_dst, dst)) - shutil.move(tmp_dst, dst) - self._debug(" + Removing: {0}".format(tmp_dst_backup)) - shutil.rmtree(tmp_dst_backup) - self._debug("Final move - COMPLETE".format(src, dst)) - - def apply_resolved_path(self, resolved_path, whitelisted_metadata=None): - # TODO: Make it look better (progressbar, etc.) - counter = 1 - tmpdir = self._get_tmpdir() - tmprepo = tempfile.mkdtemp(prefix="targetrepo", dir=tmpdir) - prevrepo = self.localrepo.path - - for link in resolved_path: - - # Download repo - self._info("{0:2}/{1:<2} Downloading delta repo {2}".format( - counter, len(resolved_path), link.deltarepourl)) - dirname = "deltarepo_{0:02}".format(counter) - destdir = os.path.join(tmpdir, dirname) - os.mkdir(destdir) - repo = Updater.DownloadedRepo(urls=[link.deltarepourl]) - repo.download(destdir, wanted_metadata=whitelisted_metadata) - - # Apply repo - self._info("{0:2}/{1:<2} Applying delta repo".format( - counter, len(resolved_path))) - da = DeltaRepoApplicator(prevrepo, - destdir, - out_path=tmprepo, - logger=self.logger, - ignore_missing=True) - da.apply() - - counter += 1 - prevrepo = tmprepo - - # Move updated repo to the final destination - src = os.path.join(tmprepo, "repodata") - dst = os.path.join(self.localrepo.path, "repodata") - self._final_move(src, dst) - shutil.rmtree(tmpdir) - - def update_from_origin(self, origin_repo, wanted_metadata=None): - tmpdir = self._get_tmpdir() - downloaded_repo = Updater.DownloadedRepo(urls=origin_repo.urls, - mirrorlist=origin_repo.mirrorlist, - metalink=origin_repo.metalink) - downloaded_repo.download(tmpdir, wanted_metadata=wanted_metadata) - - # Move downloaded repo to the final destination - src = os.path.join(tmpdir, "repodata") - dst = os.path.join(self.localrepo.path, "repodata") - self._final_move(src, dst) - shutil.rmtree(tmpdir) diff --git a/deltarepo/deltarepo/util.py b/deltarepo/deltarepo/util.py deleted file mode 100644 index 084099a..0000000 --- a/deltarepo/deltarepo/util.py +++ /dev/null @@ -1,49 +0,0 @@ -import hashlib -import logging -import createrepo_c as cr - - -def log(logger, level, msg): - if not logger: - return - logger.log(level, msg) - -def pkg_id_str(pkg, logger=None): - """Return string identifying a package in repodata. - This strings are used for the RepoId calculation.""" - if not pkg.pkgId: - log(logger, logging.WARNING, "Missing pkgId in a package!") - if not pkg.location_href: - log(logger, logging.WARNING, "Missing location_href at " - "package %s %s" % (pkg.name, pkg.pkgId)) - - idstr = "%s%s%s" % (pkg.pkgId or '', - pkg.location_href or '', - pkg.location_base or '') - return idstr - -def calculate_content_hash(path_to_primary_xml, type="sha256", logger=None): - pkg_id_strs = [] - - def old_pkgcb(pkg): - pkg_id_strs.append(pkg_id_str(pkg, logger)) - - cr.xml_parse_primary(path_to_primary_xml, pkgcb=old_pkgcb, do_files=False) - - pkg_id_strs.sort() - - packages_hash = [] - h = hashlib.new(type) - for i in pkg_id_strs: - h.update(i) - return h.hexdigest() - -def size_to_human_readable_str(size_in_bytes): - if size_in_bytes < 0: - return "{0}".format(size_in_bytes) - - for x in ['b','KB','MB','GB']: - if size_in_bytes < 1024.0: - return "{0:1.3f} {1}".format(size_in_bytes, x) - size_in_bytes /= 1024.0 - return "{0:1.3f} {1}".format(size_in_bytes, 'TB') \ No newline at end of file diff --git a/deltarepo/deltarepo/xmlcommon.py b/deltarepo/deltarepo/xmlcommon.py deleted file mode 100644 index caf7bd4..0000000 --- a/deltarepo/deltarepo/xmlcommon.py +++ /dev/null @@ -1,50 +0,0 @@ -""" -XML helpers and baseclasses -""" - -from .errors import DeltaRepoError - -# -# Helper function for manipulation with XML dom objects -# - -def getNode(node, name): - """Return the first node with the specified name or None""" - subnode = node.getElementsByTagName(name) - if not subnode or not subnode[0]: - return None - return subnode[0] - -def getRequiredNode(node, name): - """Return the first node with the specified name - or raise DeltaRepoError.""" - subnode = node.getElementsByTagName(name) - if not subnode or not subnode[0]: - raise DeltaRepoError("Required element '{1}' in '{2}' is " - "missing".format(name, node.nodeName)) - return subnode[0] - -def getAttribute(node, name, default=None): - """Get node attribute or value passed in default param (None by default)""" - return node.getAttribute(name) or default - -def getNumAttribute(node, name, default=None): - strval = node.getAttribute(name) - if strval: - try: - return long(strval) - except ValueError: - raise DeltaRepoError("Expected integral number in attribute " - "'{1}' but got '{2}'".format(name, strval)) - return default - -def getRequiredAttribute(node, name): - if not node.hasAttribute(name): - raise DeltaRepoError("Required attribute '{1}' of '{2}' is " - "missing'".format(name, node.nodeName)) - return node.getAttribute(name) - -def getValue(node, default=None): - if node.firstChild: - return node.firstChild.nodeValue - return default diff --git a/deltarepo/example/deltarepos/gen.sh b/deltarepo/example/deltarepos/gen.sh deleted file mode 100755 index 63a6aba..0000000 --- a/deltarepo/example/deltarepos/gen.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -rm -rfv deltarepo-*-* -rm -rfv deltarepos.xml.xz -../../managedeltarepos.py ../repos/repo1/ ../repos/repo2/ -../../managedeltarepos.py ../repos/repo2/ ../repos/repo3/ - diff --git a/deltarepo/example/repos/packages b/deltarepo/example/repos/packages deleted file mode 120000 index f7e6378..0000000 --- a/deltarepo/example/repos/packages +++ /dev/null @@ -1 +0,0 @@ -../../../tests/testdata/packages/ \ No newline at end of file diff --git a/deltarepo/example/repos/pkgs_per_repo b/deltarepo/example/repos/pkgs_per_repo deleted file mode 100644 index d199ee7..0000000 --- a/deltarepo/example/repos/pkgs_per_repo +++ /dev/null @@ -1,16 +0,0 @@ - -[ ./repo1/ ] -Archer-3.4.5-6.x86_64.rpm -fake_bash-1.1.1-1.x86_64.rpm - -[ ./repo2/ ] -Archer-3.4.5-6.x86_64.rpm -super_kernel-6.0.1-2.x86_64.rpm - -[ ./repo3/ ] -Archer-3.4.5-6.x86_64.rpm -balicek-utf8-1.1.1-1.x86_64.rpm -empty-0-0.x86_64.rpm -fake_bash-1.1.1-1.x86_64.rpm -super_kernel-6.0.1-2.x86_64.rpm - diff --git a/deltarepo/example/repos/regenrepos.sh b/deltarepo/example/repos/regenrepos.sh deleted file mode 100755 index 4059c76..0000000 --- a/deltarepo/example/repos/regenrepos.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` -MY_DIR="$MY_DIR/" - -# Note: -# If you wanna use different createrepo tool, set the CREATEREPO -# environ variable -# E.g.: -# $ CREATEREPO="createrepo" ./regenrepos.sh - -for dir in $MY_DIR/repo*/ -do - echo "### Regeneration of $dir" - pushd $dir - ./gen.sh - popd - sleep 1 - echo -done - -echo "" > pkgs_per_repo -for dir in ${MY_DIR}repo*/ -do - echo "[ $dir ]" >> pkgs_per_repo - cat $dir/pkglist | awk '{ split($0,a,"/"); print a[3] }' >> pkgs_per_repo - echo "" >> pkgs_per_repo -done - diff --git a/deltarepo/example/repos/repo1/foobar-1 b/deltarepo/example/repos/repo1/foobar-1 deleted file mode 100644 index 76fc659..0000000 --- a/deltarepo/example/repos/repo1/foobar-1 +++ /dev/null @@ -1 +0,0 @@ -a content \ No newline at end of file diff --git a/deltarepo/example/repos/repo1/gen.sh b/deltarepo/example/repos/repo1/gen.sh deleted file mode 100755 index c3eb7a0..0000000 --- a/deltarepo/example/repos/repo1/gen.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -if [[ -z "$MODIFYREPO" ]] -then - MODIFYREPO="modifyrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --groupfile group.xml --revision "1st repo" --content "A content tag" . -$MODIFYREPO --mdtype="foobar" foobar-1 repodata/ -popd diff --git a/deltarepo/example/repos/repo1/group.xml b/deltarepo/example/repos/repo1/group.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/example/repos/repo1/group.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/example/repos/repo1/pkglist b/deltarepo/example/repos/repo1/pkglist deleted file mode 100644 index 8f68735..0000000 --- a/deltarepo/example/repos/repo1/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/fake_bash-1.1.1-1.x86_64.rpm diff --git a/deltarepo/example/repos/repo1/repodata/06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752-foobar-1.gz b/deltarepo/example/repos/repo1/repodata/06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752-foobar-1.gz deleted file mode 100644 index f2aeb44..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752-foobar-1.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/0e43813b2bf4a47abcafa39072c0bc3279b8ec65ab34b929a6cd234f5b0e3e4f-other.sqlite.bz2 b/deltarepo/example/repos/repo1/repodata/0e43813b2bf4a47abcafa39072c0bc3279b8ec65ab34b929a6cd234f5b0e3e4f-other.sqlite.bz2 deleted file mode 100644 index a4bef99..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/0e43813b2bf4a47abcafa39072c0bc3279b8ec65ab34b929a6cd234f5b0e3e4f-other.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b-group.xml.gz b/deltarepo/example/repos/repo1/repodata/46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b-group.xml.gz deleted file mode 100644 index 6a703ab..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b-group.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/4b4a98a3883ca24ecda5cb105f9c162dfdfc6d074379e7c9e8311f9fde249407-primary.xml.gz b/deltarepo/example/repos/repo1/repodata/4b4a98a3883ca24ecda5cb105f9c162dfdfc6d074379e7c9e8311f9fde249407-primary.xml.gz deleted file mode 100644 index 86817d1..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/4b4a98a3883ca24ecda5cb105f9c162dfdfc6d074379e7c9e8311f9fde249407-primary.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/64e015b633e6cac5193a0a44403162cf90c39bb29cd8cb1df8e5ca0f2ec56436-filelists.sqlite.bz2 b/deltarepo/example/repos/repo1/repodata/64e015b633e6cac5193a0a44403162cf90c39bb29cd8cb1df8e5ca0f2ec56436-filelists.sqlite.bz2 deleted file mode 100644 index 2d4d227..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/64e015b633e6cac5193a0a44403162cf90c39bb29cd8cb1df8e5ca0f2ec56436-filelists.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/a5ad5ae1f43eae9e8e1b3fb6ee93e2cf8950708e0b3db8af3ee68f31dd0a38c2-other.xml.gz b/deltarepo/example/repos/repo1/repodata/a5ad5ae1f43eae9e8e1b3fb6ee93e2cf8950708e0b3db8af3ee68f31dd0a38c2-other.xml.gz deleted file mode 100644 index 9a27fee..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/a5ad5ae1f43eae9e8e1b3fb6ee93e2cf8950708e0b3db8af3ee68f31dd0a38c2-other.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5-group.xml b/deltarepo/example/repos/repo1/repodata/a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5-group.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/example/repos/repo1/repodata/a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5-group.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/example/repos/repo1/repodata/a81604899900f2f58ead1e4a1d903700efe7acf64f684d6e16c9ae5be16094a7-primary.sqlite.bz2 b/deltarepo/example/repos/repo1/repodata/a81604899900f2f58ead1e4a1d903700efe7acf64f684d6e16c9ae5be16094a7-primary.sqlite.bz2 deleted file mode 100644 index c1172b1..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/a81604899900f2f58ead1e4a1d903700efe7acf64f684d6e16c9ae5be16094a7-primary.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/fac6a651423edc252a488b74995641ec4514e6bca12864032c019dcce9614d81-filelists.xml.gz b/deltarepo/example/repos/repo1/repodata/fac6a651423edc252a488b74995641ec4514e6bca12864032c019dcce9614d81-filelists.xml.gz deleted file mode 100644 index 664319b..0000000 Binary files a/deltarepo/example/repos/repo1/repodata/fac6a651423edc252a488b74995641ec4514e6bca12864032c019dcce9614d81-filelists.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo1/repodata/repomd.xml b/deltarepo/example/repos/repo1/repodata/repomd.xml deleted file mode 100644 index 4b75af0..0000000 --- a/deltarepo/example/repos/repo1/repodata/repomd.xml +++ /dev/null @@ -1,80 +0,0 @@ - - - 1st repo - - A content tag - - - 4b4a98a3883ca24ecda5cb105f9c162dfdfc6d074379e7c9e8311f9fde249407 - 9276f812675ba0193df682253ece4b14faa8a1dda77a14217aa95a4a53f96caf - - 1392910623 - 1060 - 3916 - - - fac6a651423edc252a488b74995641ec4514e6bca12864032c019dcce9614d81 - 50c5f1c5645a8e52c16845c321d6b0fcf3bd6ea2227d5d49b1d8f0b585da31ed - - 1392910623 - 370 - 630 - - - a5ad5ae1f43eae9e8e1b3fb6ee93e2cf8950708e0b3db8af3ee68f31dd0a38c2 - db841078c111c4ae9c59ee996f6234d2dd4bf4f61a01892565bdaf6fb1c0879e - - 1392910623 - 438 - 939 - - - a81604899900f2f58ead1e4a1d903700efe7acf64f684d6e16c9ae5be16094a7 - 84d58ab2442be220c192439c5f528eb11997cd0d5bbd075e634ecb912870d3e8 - - 1392910623 - 2376 - 21504 - 10 - - - 64e015b633e6cac5193a0a44403162cf90c39bb29cd8cb1df8e5ca0f2ec56436 - 4d0dfa0564f4e57953ea58f1d5c28c4bd94b18e2ebf26c640c7c4a3051ec838a - - 1392910623 - 927 - 7168 - 10 - - - 0e43813b2bf4a47abcafa39072c0bc3279b8ec65ab34b929a6cd234f5b0e3e4f - 54719f2eeae2623c331373f79fe6ed61c9ef81ad861446f566ff31ab7eeff71f - - 1392910623 - 933 - 6144 - 10 - - - 06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752 - d2d2acf640179223bf9e1eb43c5fbf854c4e50ffb6733bc3a9279d3ff7de9be1 - - 1392910623 - 29 - 9 - - - a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5 - - 1392910623 - 140 - - - 46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b - a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5 - - 1392910623 - 141 - 140 - - diff --git a/deltarepo/example/repos/repo2/gen.sh b/deltarepo/example/repos/repo2/gen.sh deleted file mode 100755 index 941805f..0000000 --- a/deltarepo/example/repos/repo2/gen.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --revision foorevisionbar --distro cpe:/o:fedoraproject:fedora:17,foo --repo abc --content plm . -popd diff --git a/deltarepo/example/repos/repo2/pkglist b/deltarepo/example/repos/repo2/pkglist deleted file mode 100644 index 7a9c7d1..0000000 --- a/deltarepo/example/repos/repo2/pkglist +++ /dev/null @@ -1,2 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/example/repos/repo2/repodata/6ab40c3a6ca6ed8fe5225e0927db0ecf717734e939a3fd13efd21583f6019881-other.sqlite.bz2 b/deltarepo/example/repos/repo2/repodata/6ab40c3a6ca6ed8fe5225e0927db0ecf717734e939a3fd13efd21583f6019881-other.sqlite.bz2 deleted file mode 100644 index fc5387f..0000000 Binary files a/deltarepo/example/repos/repo2/repodata/6ab40c3a6ca6ed8fe5225e0927db0ecf717734e939a3fd13efd21583f6019881-other.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo2/repodata/9ec52119c7d402f207202633c167eeafead7b8967bc8d324bd44159a1a741f31-primary.xml.gz b/deltarepo/example/repos/repo2/repodata/9ec52119c7d402f207202633c167eeafead7b8967bc8d324bd44159a1a741f31-primary.xml.gz deleted file mode 100644 index e7c42dd..0000000 Binary files a/deltarepo/example/repos/repo2/repodata/9ec52119c7d402f207202633c167eeafead7b8967bc8d324bd44159a1a741f31-primary.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo2/repodata/b0bae8ca67a1beec7a9c0e7cd524a64524546bfb6e67017684753b723c05786d-other.xml.gz b/deltarepo/example/repos/repo2/repodata/b0bae8ca67a1beec7a9c0e7cd524a64524546bfb6e67017684753b723c05786d-other.xml.gz deleted file mode 100644 index d799725..0000000 Binary files a/deltarepo/example/repos/repo2/repodata/b0bae8ca67a1beec7a9c0e7cd524a64524546bfb6e67017684753b723c05786d-other.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo2/repodata/c158146c6f1bd169e7ecf957f095347f24543cfa25a2d958effd154adb2f9036-filelists.sqlite.bz2 b/deltarepo/example/repos/repo2/repodata/c158146c6f1bd169e7ecf957f095347f24543cfa25a2d958effd154adb2f9036-filelists.sqlite.bz2 deleted file mode 100644 index dfac23a..0000000 Binary files a/deltarepo/example/repos/repo2/repodata/c158146c6f1bd169e7ecf957f095347f24543cfa25a2d958effd154adb2f9036-filelists.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo2/repodata/d8588001092f42ecf6cd95a14fb0a9adafb62dfc5905403def843c316f2e1ed4-primary.sqlite.bz2 b/deltarepo/example/repos/repo2/repodata/d8588001092f42ecf6cd95a14fb0a9adafb62dfc5905403def843c316f2e1ed4-primary.sqlite.bz2 deleted file mode 100644 index 8c3a591..0000000 Binary files a/deltarepo/example/repos/repo2/repodata/d8588001092f42ecf6cd95a14fb0a9adafb62dfc5905403def843c316f2e1ed4-primary.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo2/repodata/ef90d920195c5ff51755697ae989494aee8231f8e0522b4ae4b6beaf2ec8368b-filelists.xml.gz b/deltarepo/example/repos/repo2/repodata/ef90d920195c5ff51755697ae989494aee8231f8e0522b4ae4b6beaf2ec8368b-filelists.xml.gz deleted file mode 100644 index cf9780e..0000000 Binary files a/deltarepo/example/repos/repo2/repodata/ef90d920195c5ff51755697ae989494aee8231f8e0522b4ae4b6beaf2ec8368b-filelists.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo2/repodata/repomd.xml b/deltarepo/example/repos/repo2/repodata/repomd.xml deleted file mode 100644 index 99e84fd..0000000 --- a/deltarepo/example/repos/repo2/repodata/repomd.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - foorevisionbar - - plm - abc - foo - - - 9ec52119c7d402f207202633c167eeafead7b8967bc8d324bd44159a1a741f31 - e2669510eec24c10d59e3a87372538cf0cc5ba3d2303e1b1652d7685e5bd7e6d - - 1392910624 - 1175 - 4667 - - - ef90d920195c5ff51755697ae989494aee8231f8e0522b4ae4b6beaf2ec8368b - 5158e6a020082865e81ae4a0766f6dcc17ba6c681b944834b2ed180c6dda4186 - - 1392910624 - 381 - 684 - - - b0bae8ca67a1beec7a9c0e7cd524a64524546bfb6e67017684753b723c05786d - 2033cf66edc430baab1a2e34d4805f565e92e1b069c03c65379491bc66e83c33 - - 1392910624 - 461 - 1061 - - - d8588001092f42ecf6cd95a14fb0a9adafb62dfc5905403def843c316f2e1ed4 - c3770704799007588bbe3f76957c2f49c4dce189090f830bc0aec8c7ba052a41 - - 1392910625 - 2581 - 21504 - 10 - - - c158146c6f1bd169e7ecf957f095347f24543cfa25a2d958effd154adb2f9036 - b4d8f432396a6da6b79eabfe13c543241ab1f24e95b2579db666e6ca27ba8790 - - 1392910625 - 992 - 7168 - 10 - - - 6ab40c3a6ca6ed8fe5225e0927db0ecf717734e939a3fd13efd21583f6019881 - 84a9c5d37145165ca62939a0f4c09113e3ca1180a8e3d1f26e094f565f73792d - - 1392910625 - 979 - 6144 - 10 - - diff --git a/deltarepo/example/repos/repo3/comps.xml b/deltarepo/example/repos/repo3/comps.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/example/repos/repo3/comps.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/example/repos/repo3/foobar b/deltarepo/example/repos/repo3/foobar deleted file mode 100644 index 76fc659..0000000 --- a/deltarepo/example/repos/repo3/foobar +++ /dev/null @@ -1 +0,0 @@ -a content \ No newline at end of file diff --git a/deltarepo/example/repos/repo3/gen.sh b/deltarepo/example/repos/repo3/gen.sh deleted file mode 100755 index 7cf5482..0000000 --- a/deltarepo/example/repos/repo3/gen.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -MY_DIR=`dirname $0` - -if [[ -z "$CREATEREPO" ]] -then - CREATEREPO="createrepo_c" -fi - -if [[ -z "$MODIFYREPO" ]] -then - MODIFYREPO="modifyrepo_c" -fi - -pushd "$MY_DIR" -$CREATEREPO $EXTRAARGS --pkglist pkglist --groupfile comps.xml --revision "3th repo" --content "Content tag 123456" . -$MODIFYREPO foobar repodata/ -popd diff --git a/deltarepo/example/repos/repo3/pkglist b/deltarepo/example/repos/repo3/pkglist deleted file mode 100644 index b0436ea..0000000 --- a/deltarepo/example/repos/repo3/pkglist +++ /dev/null @@ -1,5 +0,0 @@ -../packages/Archer-3.4.5-6.x86_64.rpm -../packages/balicek-utf8-1.1.1-1.x86_64.rpm -../packages/empty-0-0.x86_64.rpm -../packages/fake_bash-1.1.1-1.x86_64.rpm -../packages/super_kernel-6.0.1-2.x86_64.rpm diff --git a/deltarepo/example/repos/repo3/repodata/06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752-foobar.gz b/deltarepo/example/repos/repo3/repodata/06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752-foobar.gz deleted file mode 100644 index f2aeb44..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752-foobar.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/07125d910a68f01ac92f37921aa7fb4edd20a05e493c7b9123fd4cd43ed27b45-other.xml.gz b/deltarepo/example/repos/repo3/repodata/07125d910a68f01ac92f37921aa7fb4edd20a05e493c7b9123fd4cd43ed27b45-other.xml.gz deleted file mode 100644 index ffd75b3..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/07125d910a68f01ac92f37921aa7fb4edd20a05e493c7b9123fd4cd43ed27b45-other.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b-comps.xml.gz b/deltarepo/example/repos/repo3/repodata/46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b-comps.xml.gz deleted file mode 100644 index 6a703ab..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b-comps.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/5b18db33715f9cbdd50e72b14af24476bad7b065bfe0cf56f45bbebd6dd92b08-primary.sqlite.bz2 b/deltarepo/example/repos/repo3/repodata/5b18db33715f9cbdd50e72b14af24476bad7b065bfe0cf56f45bbebd6dd92b08-primary.sqlite.bz2 deleted file mode 100644 index a75baa0..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/5b18db33715f9cbdd50e72b14af24476bad7b065bfe0cf56f45bbebd6dd92b08-primary.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/609eca035393772a53575fa2792c238277034760e616e11fbaaf0517e1ea877f-other.sqlite.bz2 b/deltarepo/example/repos/repo3/repodata/609eca035393772a53575fa2792c238277034760e616e11fbaaf0517e1ea877f-other.sqlite.bz2 deleted file mode 100644 index 4bd13d7..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/609eca035393772a53575fa2792c238277034760e616e11fbaaf0517e1ea877f-other.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/7946a0f6e1676a33f763e12eddf0ce805c15a81d5de9d93ca81537eda34ccef4-filelists.sqlite.bz2 b/deltarepo/example/repos/repo3/repodata/7946a0f6e1676a33f763e12eddf0ce805c15a81d5de9d93ca81537eda34ccef4-filelists.sqlite.bz2 deleted file mode 100644 index 885dde0..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/7946a0f6e1676a33f763e12eddf0ce805c15a81d5de9d93ca81537eda34ccef4-filelists.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5-comps.xml b/deltarepo/example/repos/repo3/repodata/a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5-comps.xml deleted file mode 100644 index 068519c..0000000 --- a/deltarepo/example/repos/repo3/repodata/a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5-comps.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/deltarepo/example/repos/repo3/repodata/e6c1900810fb1155e7a29c59df839e9d20a369b5fb2cad87d880f664fa815e3f-filelists.xml.gz b/deltarepo/example/repos/repo3/repodata/e6c1900810fb1155e7a29c59df839e9d20a369b5fb2cad87d880f664fa815e3f-filelists.xml.gz deleted file mode 100644 index d9d311b..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/e6c1900810fb1155e7a29c59df839e9d20a369b5fb2cad87d880f664fa815e3f-filelists.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/f209787b500f81fc59f7babcfaa122db99cbe3c0817edd7336febc96b95ece33-primary.xml.gz b/deltarepo/example/repos/repo3/repodata/f209787b500f81fc59f7babcfaa122db99cbe3c0817edd7336febc96b95ece33-primary.xml.gz deleted file mode 100644 index e00345a..0000000 Binary files a/deltarepo/example/repos/repo3/repodata/f209787b500f81fc59f7babcfaa122db99cbe3c0817edd7336febc96b95ece33-primary.xml.gz and /dev/null differ diff --git a/deltarepo/example/repos/repo3/repodata/repomd.xml b/deltarepo/example/repos/repo3/repodata/repomd.xml deleted file mode 100644 index aa4d77c..0000000 --- a/deltarepo/example/repos/repo3/repodata/repomd.xml +++ /dev/null @@ -1,80 +0,0 @@ - - - 3th repo - - Content tag 123456 - - - f209787b500f81fc59f7babcfaa122db99cbe3c0817edd7336febc96b95ece33 - a09ebfe06c2ab1200ef8f05bb380f9540f1d06e6a1407f16c7f325eb20a86109 - - 1392910626 - 1740 - 8069 - - - e6c1900810fb1155e7a29c59df839e9d20a369b5fb2cad87d880f664fa815e3f - fb32b1e587f0a2499ea15fb2224d0dec9ce615f2661ba4757b3ea18ce56ccc76 - - 1392910626 - 560 - 1217 - - - 07125d910a68f01ac92f37921aa7fb4edd20a05e493c7b9123fd4cd43ed27b45 - cb344434c4f54eca225b3db0202163bd61f7524e3f18df4812f82c9377b0350e - - 1392910626 - 743 - 1863 - - - 5b18db33715f9cbdd50e72b14af24476bad7b065bfe0cf56f45bbebd6dd92b08 - 711c7cd2ab9c93a5a7349c8a1b225a6794fc87fa66232329f33273ce7e117a0c - - 1392910626 - 3439 - 23552 - 10 - - - 7946a0f6e1676a33f763e12eddf0ce805c15a81d5de9d93ca81537eda34ccef4 - ba39a46626a7b413d775ce685d52a81e536d748fa56af0558ecc8d109f48b55c - - 1392910626 - 1217 - 7168 - 10 - - - 609eca035393772a53575fa2792c238277034760e616e11fbaaf0517e1ea877f - d30994831b79ac3cb8a819c4f17cac0b9ae95f093539ffc216845f9f9216cd42 - - 1392910626 - 1322 - 6144 - 10 - - - 06d8204d74de57ed4050a72280d25d2e73dabf8298f94e415d12d3b847682752 - d2d2acf640179223bf9e1eb43c5fbf854c4e50ffb6733bc3a9279d3ff7de9be1 - - 1392910626 - 29 - 9 - - - a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5 - - 1392910626 - 140 - - - 46b296e6f4193dcf5cdc11f10be8472a7085c6d45f05a09da5ef4e4c80c8828b - a5e176f8963047438fee997c4cad6a5388ef85d0b22d72a48712cc91bf2821c5 - - 1392910626 - 141 - 140 - - diff --git a/deltarepo/example/test.sh b/deltarepo/example/test.sh deleted file mode 100755 index 6d9ab01..0000000 --- a/deltarepo/example/test.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -rm -rf test/ -mkdir test/ -cp -r repos/repo1 test/ -cp -r repos/repo3 test/ - -../repoupdater.py test/repo1/ $@ --repo file://`pwd`/test/repo3/ --drmirror file://`pwd`/deltarepos/ - -rm -rf test2/ -mkdir test2/ -cp -r repos/repo1 test2/ -cp -r repos/repo3 test2/ -rm -f test2/repo1/repodata/*sqlite* -rm -f test2/repo1/repodata/*other* -rm -f test2/repo1/repodata/*foobar* - -../repoupdater.py test2/repo1/ $@ --repo file://`pwd`/test/repo3/ --drmirror file://`pwd`/deltarepos/ diff --git a/deltarepo/managedeltarepos.py b/deltarepo/managedeltarepos.py deleted file mode 100755 index 51a198a..0000000 --- a/deltarepo/managedeltarepos.py +++ /dev/null @@ -1,295 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import os -import sys -import os.path -import logging -import argparse -import tempfile -import shutil -import time -import librepo -import hashlib -import deltarepo -import createrepo_c as cr -from deltarepo import DeltaRepoError, DeltaRepoPluginError - -LOG_FORMAT = "%(message)s" - -def parse_options(): - parser = argparse.ArgumentParser(description="Manage deltarepos directory.", - usage="%(prog)s --gendeltareposfile [options] \n" - " %(prog)s [options] [deltarepos_dir]") - parser.add_argument('dirs', nargs='+') - parser.add_argument('--debug', action="store_true", help=argparse.SUPPRESS) - parser.add_argument("--version", action="store_true", - help="Show version number and quit.") - parser.add_argument("-q", "--quiet", action="store_true", - help="Run in quiet mode.") - parser.add_argument("-v", "--verbose", action="store_true", - help="Run in verbose mode.") - parser.add_argument("--gendeltareposfile", action="store_true", - help="Generate the deltarepos.xml file. Walk recursively " - "all specified directories.") - - group = parser.add_argument_group("deltarepos.xml file generation (--gendeltareposfile)") - group.add_argument("-o", "--outputdir", action="store", metavar="DIR", - help="Set different output directory for deltarepos.xml") - group.add_argument("--force", action="store_true", - help="Ignore bad repositories") - - args = parser.parse_args() - - # Error checks - - if args.version: - return args - - if args.gendeltareposfile: - # --gendeltareposfile - if not args.dirs or len(args.dirs) != 1: - parser.error("Exactly one directory must be specified") - else: - # default - for dir in args.dirs: - if not os.path.isdir(dir): - parser.error("{0} is not a directory".format(dir)) - for dir in args.dirs[:2]: - # Fist two arguments must be a repos - if not os.path.isdir(os.path.join(dir, "repodata")) or \ - not os.path.isfile(os.path.join(dir, "repodata", "repomd.xml")): - parser.error("Not a repository: %s" % dir) - if len (args.dirs) > 3: - parser.error("Too much directories specified") - - if args.quiet and args.verbose: - parser.error("Cannot use quiet and verbose simultaneously!") - - if args.outputdir and not args.gendeltareposfile: - parser.error("--outputdir cannot be used") - elif args.outputdir and not os.path.isdir(args.outputdir): - parser.error("--outputdir must be a directory: %s" % args.outputdir) - - if args.debug: - args.verbose = True - - return args - -def print_version(): - print("ManageDeltaRepos: {0} (librepo: %s)".format( - deltarepo.VERBOSE_VERSION, librepo.VERSION)) - -def setup_logging(quiet, verbose): - logger = logging.getLogger("managedeltarepos") - formatter = logging.Formatter(LOG_FORMAT) - logging.basicConfig(format=LOG_FORMAT) - if quiet: - logger.setLevel(logging.ERROR) - elif verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - return logger - -def file_checksum(path, type="sha256"): - """Calculate file checksum""" - h = hashlib.new(type) - with open(path, "rb") as f: - while True: - chunk = f.read(1024**2) - if not chunk: - break - h.update(chunk) - return h.hexdigest() - -def parse_repomd(path, logger): - """Get repo data""" - - # TODO: Switch this function to use createrepo.Repomd() - # instead of librepo - - h = librepo.Handle() - r = librepo.Result() - h.urls = [path] - h.local = True - h.repotype = librepo.LR_YUMREPO - h.perform(r) - - yum_repo_dict = r.yum_repo - yum_repomd_dict = r.yum_repomd - if not yum_repo_dict or "repomd" not in yum_repo_dict: - raise DeltaRepoError("{0} is not a repository".format(path)) - if not yum_repomd_dict or "deltametadata" not in yum_repomd_dict: - raise DeltaRepoError("{0} is not a delta repository".format(path)) - - return yum_repo_dict, yum_repomd_dict - -def repo_size(path, logger): - """Calculate size of the repository's metadata. - """ - - yum_repo_dict, yum_repomd_dict = parse_repomd(path, logger) - - total_size = 0 - - # Sum the sizes - for md_name, details in yum_repomd_dict.items(): - if not "size" in details or not "size_open" in details: - continue - if "size" in details: - total_size += details["size"] - else: - total_size += details["size_open"] - - # Get size of the repomd.xml - repomd_path = yum_repo_dict.get("repomd") - if repomd_path: - total_size += os.path.getsize(repomd_path) - - return total_size - -def deltareposrecord_from_repopath(path, logger, strip_path_prefix=True): - # Parse repo's deltametadata.xml - - path = os.path.abspath(path) - - yum_repo_dict, yum_repomd_dict = parse_repomd(path, logger) - - repomd_path = os.path.join(path, "repodata/repomd.xml") - repomd = cr.Repomd(repomd_path) - - deltametadata_path = os.path.join(path, yum_repomd_dict["deltametadata"]["location_href"]) - dm = deltarepo.DeltaMetadata() - dm.xmlparse(deltametadata_path) - - if strip_path_prefix and path.startswith(os.getcwd()): - path = path[len(os.getcwd())+1:] - - rec = deltarepo.DeltaReposRecord() - rec.location_base = None - rec.location_href = path - rec.size_total = repo_size(path, logger) - rec.revision_src = dm.revision_src - rec.revision_dst = dm.revision_dst - rec.contenthash_src = dm.contenthash_src - rec.contenthash_dst = dm.contenthash_dst - rec.contenthash_type = dm.contenthash_type - rec.timestamp_src = dm.timestamp_src - rec.timestamp_dst = dm.timestamp_dst - - for repomd_rec in repomd.records: - if not repomd_rec.type: - continue - rec.set_data(repomd_rec.type, repomd_rec.size) - - repomd_path = yum_repo_dict["repomd"] - rec.repomd_timestamp = int(os.path.getmtime(repomd_path)) - rec.repomd_size = os.path.getsize(repomd_path) - checksumval = file_checksum(repomd_path) - rec.repomd_checksums = [("sha256", checksumval)] - - return rec - -def write_deltarepos_file(path, records, append=False): - # Add the record to the deltarepos.xml - deltareposxml_path = os.path.join(path, "deltarepos.xml.xz") - drs = deltarepo.DeltaRepos() - if os.path.isfile(deltareposxml_path) and append: - drs.xmlparse(deltareposxml_path) - for rec in records: - drs.add_record(rec) - new_content = drs.xmldump() - - f = cr.CrFile(deltareposxml_path, cr.MODE_WRITE, cr.XZ) - f.write(new_content) - f.close() - -def gen_delta(old_repo_dir, new_repo_dir, logger, deltarepos_dir=None): - # Gen delta to a temporary directory - prefix = "deltarepo-{0}-".format(int(time.time())) - tmp_dir = tempfile.mkdtemp(prefix=prefix, dir="/tmp/") - try: - dg = deltarepo.DeltaRepoGenerator(old_repo_dir, - new_repo_dir, - out_path=tmp_dir, - logger=logger) - dg.gen() - except Exception: - shutil.rmtree(tmp_dir) - raise - - if not deltarepos_dir: - deltarepos_dir = os.getcwd() - - # Move the delta to the deltarepos_dir or to the current working directory - dst_dir = os.path.join(deltarepos_dir, os.path.basename(tmp_dir)) - shutil.copytree(tmp_dir, dst_dir) - shutil.rmtree(tmp_dir) - - # Prepare repo's DeltaReposRecord - rec = deltareposrecord_from_repopath(dst_dir, logger) - - # Add the record to the deltarepos.xml - write_deltarepos_file(deltarepos_dir, [rec], append=True) - - return dst_dir - -def gen_deltarepos_file(workdir, logger, force=False): - cwd = os.getcwdu() - logger.debug("Changing work dir to: {0} (old work dir: {1})".format( - workdir, cwd)) - os.chdir(workdir) - - deltareposrecords = [] - - # Recursivelly walk the directories and search for repositories - for root, dirs, files in os.walk(workdir): - dirs.sort() - if "repodata" in dirs: - try: - rec = deltareposrecord_from_repopath(root, logger) - except DeltaRepoError: - if not force: - os.chdir(cwd) - raise - logger.warning("Bad repository: {0}".format(root)) - deltareposrecords.append(rec) - - write_deltarepos_file(workdir, deltareposrecords, append=False) - os.chdir(cwd) - - -def main(args, logger): - if args.gendeltareposfile: - workdir = args.dirs[0] - gen_deltarepos_file(workdir, logger, force=args.force) - else: - old_repo_dir = args.dirs[0] - new_repo_dir = args.dirs[1] - deltarepos_dir = args.dirs[2] if len(args.dirs) == 3 else None - gen_delta(old_repo_dir, new_repo_dir, logger, deltarepos_dir=deltarepos_dir) - -if __name__ == "__main__": - args = parse_options() - - if args.version: - print_version() - sys.exit(0) - - logger = setup_logging(args.quiet, args.verbose) - - try: - main(args, logger) - except (DeltaRepoError, DeltaRepoPluginError) as err: - if args.debug: - raise - print("Error: {0}".format(err), file=sys.stderr) - sys.exit(1) - - sys.exit(0) - -# TODO: -# - Check for contenthash mishmashes -# - Check for duplicated path (links) diff --git a/deltarepo/repocontenthash.py b/deltarepo/repocontenthash.py deleted file mode 100755 index e3a76e5..0000000 --- a/deltarepo/repocontenthash.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import os -import sys -import hashlib -import logging -import argparse -import deltarepo -from deltarepo import DeltaRepoError -from deltarepo.updater_common import LocalRepo - -LOG_FORMAT = "%(message)s" - -def parse_options(): - parser = argparse.ArgumentParser(description="Get content hash of local repository", - usage="%(prog)s [options] ") - parser.add_argument('path', help="Repository") - parser.add_argument('--debug', action="store_true", help=argparse.SUPPRESS) - parser.add_argument("--version", action="store_true", - help="Show version number and quit.") - parser.add_argument("-q", "--quiet", action="store_true", - help="Run in quiet mode.") - parser.add_argument("-v", "--verbose", action="store_true", - help="Run in verbose mode.") - parser.add_argument("-t", "--id-type", action="append", metavar="HASHTYPE", - help="Hash function for the ids (Contenthash). " \ - "Default is sha256.", default=[]) - parser.add_argument("-c", "--check", action="store_true", - help="Check if content hash in repomd match the real one") - parser.add_argument("--missing-contenthash-in-repomd-is-ok", action="store_true", - help="If --check option is used and contenthash is not specified " - "the repomd.xml then assume that checksums matches") - - args = parser.parse_args() - - # Sanity checks - - if args.version: - return args - - for hash_type in args.id_type: - if hash_type.lower() not in hashlib.algorithms: - parser.error("Unsupported hash algorithm %s" % hash_type) - - if not args.id_type: - args.id_type.append("sha256") - - if args.quiet and args.verbose: - parser.error("Cannot use quiet and verbose simultaneously!") - - if not os.path.isdir(args.path) or \ - not os.path.isdir(os.path.join(args.path, "repodata")) or \ - not os.path.isfile(os.path.join(args.path, "repodata", "repomd.xml")): - parser.error("Not a repository: %s" % args.path) - - if args.debug: - args.verbose = True - - return args - -def print_version(): - print("RepoContentHash: {0}".format(deltarepo.VERBOSE_VERSION)) - -def setup_logging(quiet, verbose): - logger = logging.getLogger("deltarepo_logger") - formatter = logging.Formatter(LOG_FORMAT) - logging.basicConfig(format=LOG_FORMAT) - if quiet: - logger.setLevel(logging.ERROR) - elif verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - return logger - -def print_contenthashes(args, logger): - # Print content hash from the repomd.xml - localrepo = LocalRepo.from_path(args.path, calc_contenthash=False) - if localrepo.repomd_contenthash and localrepo.repomd_contenthash_type: - print("R {0} {1}".format(localrepo.repomd_contenthash_type, localrepo.repomd_contenthash)) - - # Calculate content hashes - for hash_type in args.id_type: - localrepo = LocalRepo.from_path(args.path, contenthash_type=hash_type.lower()) - print("C {0} {1}".format(localrepo.contenthash_type, localrepo.contenthash)) - - return True - -def check(args, logger): - # Get type and value of content hash in repomd - localrepo = LocalRepo.from_path(args.path, calc_contenthash=False) - if not localrepo.repomd_contenthash or not localrepo.repomd_contenthash_type: - if args.missing_contenthash_in_repomd_is_ok: - return True - logger.warning("Content hash is not specified in repomd.xml") - return False - - repomd_contenhash_type = localrepo.repomd_contenthash_type - repomd_contenthash = localrepo.repomd_contenthash - - # Calculate real contenthash - localrepo = LocalRepo.from_path(args.path, contenthash_type=repomd_contenhash_type) - - # Check both contenthashes - if localrepo.contenthash != repomd_contenthash: - logger.error("Content hash from the repomd.xml ({0}) {1} doesn't match the real " - "one ({2}) {3}".format(repomd_contenhash_type, repomd_contenthash, - localrepo.contenthash_type, localrepo.contenthash)) - return False - - return True - -def main(args, logger): - if args.check: - return check(args, logger) - else: - return print_contenthashes(args, logger) - -if __name__ == "__main__": - args = parse_options() - - if args.version: - print_version() - sys.exit(0) - - logger = setup_logging(args.quiet, args.verbose) - - try: - ret = main(args, logger) - except Exception as err: - if args.debug: - raise - print("Error: {0}".format(err), file=sys.stderr) - sys.exit(1) - - if ret: - sys.exit(0) - - sys.exit(1) \ No newline at end of file diff --git a/deltarepo/repoupdater.py b/deltarepo/repoupdater.py deleted file mode 100755 index bd39e5a..0000000 --- a/deltarepo/repoupdater.py +++ /dev/null @@ -1,224 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import os -import sys -import logging -import argparse -import librepo -import deltarepo -from deltarepo import DeltaRepoError, DeltaRepoPluginError -from deltarepo.updater_common import LocalRepo, OriginRepo, DRMirror, UpdateSolver, Updater -from deltarepo import needed_delta_metadata -from deltarepo.util import size_to_human_readable_str - -LOG_FORMAT = "%(message)s" - -# TODO: Multiple levels of verbosity (-v -vv -vvv) - -def parse_options(): - parser = argparse.ArgumentParser(description="Update a local repository", - usage="%(prog)s [options] \n") - parser.add_argument('localrepo', nargs=1) - parser.add_argument('--debug', action="store_true", help=argparse.SUPPRESS) - parser.add_argument("--version", action="store_true", - help="Show version number and quit.") - parser.add_argument("-q", "--quiet", action="store_true", - help="Run in quiet mode.") - parser.add_argument("-v", "--verbose", action="store_true", - help="Run in verbose mode.") - parser.add_argument("--drmirror", action="append", - help="Mirror with delta repositories.") - parser.add_argument("--repo", action="append", - help="Repo baseurl") - parser.add_argument("--repomirrorlist", - help="Repo mirrorlist") - parser.add_argument("--repometalink", - help="Repo metalink") - parser.add_argument("--target-contenthash", - help="Target content hash (if no --repo(mirrorlist|metalink)? used)") - parser.add_argument("--target-contenthash-type", default="sha256", - help="Type of target content hash. 'sha256' is default value.") - parser.add_argument("--update-only-present", action="store_true", - help="Update only metadata that are present in current repo. " - "(Newly added metadata will not be downloaded, missing " - "metadata will be ignored)") - parser.add_argument("--force-deltas", action="store_true", - help="Always use deltas. Origin repo use only to determine " - "target content hash.") - - args = parser.parse_args() - - # Error checks - - if args.version: - return args - - if not args.localrepo: - parser.error("Exactly one local repo must be specified") - - if args.quiet and args.verbose: - parser.error("Cannot use quiet and verbose simultaneously!") - - #if args.outputdir and not args.gendeltareposfile: - # parser.error("--outputdir cannot be used") - #elif args.outputdir and not os.path.isdir(args.outputdir): - # parser.error("--outputdir must be a directory: %s" % args.outputdir) - - if not os.path.isdir(args.localrepo[0]) or not os.path.isdir(os.path.join(args.localrepo[0], "repodata")): - parser.error("{0} is not a repository (a directory containing " - "repodata/ dir expected)".format(args.localrepo[0])) - - origin_repo = False - if args.repo or args.repomirrorlist or args.repometalink: - origin_repo = True - - if not args.drmirror and not origin_repo: - parser.error("Nothing to do. No mirror with deltarepos nor origin repo specified.") - - if origin_repo and args.target_contenthash: - parser.error("Origin repo shouldn't be specified if --target-contenthash is used") - - if args.debug: - args.verbose = True - - return args - -def print_version(): - print("RepoUpdater: {0} (librepo: %s)".format( - deltarepo.VERBOSE_VERSION, librepo.VERSION)) - -def setup_logging(quiet, verbose): - logger = logging.getLogger("repoupdater") - formatter = logging.Formatter(LOG_FORMAT) - logging.basicConfig(format=LOG_FORMAT) - if quiet: - logger.setLevel(logging.ERROR) - elif verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - return logger - -def update_with_deltas(args, drmirros, localrepo, originrepo, logger): - whitelisted_metadata = None - if args.update_only_present: - whitelisted_metadata = needed_delta_metadata(localrepo.present_metadata) - logger.debug("Using metadata whitelist") - logger.debug("Locally available metadata: {0}".format(localrepo.present_metadata)) - logger.debug("Final whitelist: {0}".format(whitelisted_metadata)) - - updatesolver = UpdateSolver(drmirros, - whitelisted_metadata=whitelisted_metadata, - logger=logger) - - # Get source hash - sch_t, sch = updatesolver.find_repo_contenthash(localrepo) - source_contenthash = sch - source_contenthash_type = sch_t - - if not source_contenthash: - raise DeltaRepoError("No deltas available for {0}".format(localrepo.path)) - - # Get target hash - if originrepo: - # Get origin repo's contenthash - tch_t, tch = updatesolver.find_repo_contenthash(originrepo) - target_contenthash = tch - target_contenthash_type = tch_t - else: - # Use content hash specified by user - target_contenthash = args.target_contenthash - target_contenthash_type = args.target_contenthash_type - - if not target_contenthash: - raise DeltaRepoError("No deltas available - Patch for the current " - "version of the remote repo is not available") - - if source_contenthash_type != target_contenthash_type: - raise DeltaRepoError("Types of contenthashes doesn't match {0} != {1}" - "".format(source_contenthash_type, target_contenthash_type)) - - # Resolve path - resolved_path = updatesolver.resolve_path(source_contenthash, target_contenthash) - full_cost = resolved_path.cost() - real_cost = resolved_path.cost(whitelisted_metadata) - - # Some debug output - logger.debug("Resolved path:") - x = 0 - for link in resolved_path: - x += 1 - logger.debug("{0:2} )".format(x)) - logger.debug("URL: {0}".format(link.deltarepourl)) - logger.debug("Src: {0}".format(link.src)) - logger.debug("Dst: {0}".format(link.dst)) - logger.debug("Full cost: {0}".format(size_to_human_readable_str(link.cost()))) - logger.debug("Real cost: {0}".format(size_to_human_readable_str(link.cost(whitelisted_metadata)))) - logger.debug("----------------------------------------------------------") - logger.debug("Total full cost: {0}".format(size_to_human_readable_str(full_cost))) - logger.debug("Total real cost: {0}".format(size_to_human_readable_str(real_cost))) - - # Check cost of download of origin remote repo - if originrepo: - origin_full_cost = originrepo.cost() - origin_real_cost = originrepo.cost(localrepo.present_metadata) - logger.debug("Origin repo full cost: {0}".format(size_to_human_readable_str(origin_full_cost))) - logger.debug("Origin repo real cost: {0}".format(size_to_human_readable_str(origin_real_cost))) - - #Check if download origin repo or use deltas - if origin_real_cost < real_cost and not args.force_deltas: - logger.debug("Using the origin repo - its cost is less then cost of deltas") - updater = Updater(localrepo, logger=logger) - updater.update_from_origin(originrepo, localrepo.present_metadata) - return - - # Download and apply deltarepos - updater = Updater(localrepo, logger=logger) - updater.apply_resolved_path(resolved_path, whitelisted_metadata=whitelisted_metadata) - -def main(args, logger): - localrepo = LocalRepo.from_path(args.localrepo[0]) - originrepo = None - drmirrors = [] - - # TODO: Update to selected revision - source_contenthash = None - source_contenthash_type = None - target_contenthash = None - target_contenthash_type = None - - if args.repo or args.repometalink or args.repomirrorlist: - originrepo = OriginRepo.from_url(urls=args.repo, - mirrorlist=args.repomirrorlist, - metalink=args.repometalink) - - for i in args.drmirror: - drmirror = DRMirror.from_url(i) - drmirrors.append(drmirror) - - if drmirrors: - update_with_deltas(args, drmirrors, localrepo, originrepo, logger) - else: - # TODO: Just download origin repo - pass - -if __name__ == "__main__": - args = parse_options() - - if args.version: - print_version() - sys.exit(0) - - logger = setup_logging(args.quiet, args.verbose) - - try: - main(args, logger) - except (DeltaRepoError, DeltaRepoPluginError) as err: - if args.debug: - raise - print("Error: {0}".format(err), file=sys.stderr) - sys.exit(1) - - sys.exit(0) \ No newline at end of file diff --git a/deltarepo/tests/__init__.py b/deltarepo/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/deltarepo/tests/fixtures.py b/deltarepo/tests/fixtures.py deleted file mode 100644 index ecaf8a7..0000000 --- a/deltarepo/tests/fixtures.py +++ /dev/null @@ -1,7 +0,0 @@ -import os.path - -TEST_DATA_PATH = os.path.normpath(os.path.join(__file__, "../testdata")) - -REPO_01_PATH = os.path.join(TEST_DATA_PATH, "repo_01") - -DELTAREPOS_01_PATH = os.path.join(TEST_DATA_PATH, "deltarepos_01") diff --git a/deltarepo/tests/run_nosetests.sh.in b/deltarepo/tests/run_nosetests.sh.in deleted file mode 100755 index 345fb35..0000000 --- a/deltarepo/tests/run_nosetests.sh.in +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -#LD_LIBRARY_PATH=${CMAKE_BINARY_DIR}/src/: PYTHONPATH=${CMAKE_BINARY_DIR}/src/python/ nosetests -s -v ${CMAKE_CURRENT_SOURCE_DIR}/ -nosetests -s -v \ No newline at end of file diff --git a/deltarepo/tests/test_deltametadata.py b/deltarepo/tests/test_deltametadata.py deleted file mode 100644 index 492ab99..0000000 --- a/deltarepo/tests/test_deltametadata.py +++ /dev/null @@ -1,114 +0,0 @@ -import unittest -import shutil -import tempfile -from deltarepo.deltametadata import DeltaMetadata, PluginBundle - -from fixtures import * - -XML_EMPTY = """ - - - -""" - -XML_01 = """ - - - - - - - - - - - - -""" - - -class TestCasePluginPersistentConfig(unittest.TestCase): - - def setUp(self): - self.tmpdir = tempfile.mkdtemp(prefix="deltarepo-test-") - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def test_dump_empty_deltametadata(self): - dm = DeltaMetadata() - content = dm.xmldump() - self.assertEqual(content, XML_EMPTY) - - def test_dump_deltametadata_01(self): - plugin = PluginBundle("FooBarPlugin", 1) - plugin.set("database", "1") - plugin.set("origincompression", "gz") - plugin.append("removedpackage", {"href": "Pacakges/foo.rpm", - "base": "ftp://foobaar/"}) - plugin.append("removedpackage", {"href": "Packages/bar.rpm"}) - plugin.append("emptylist", {}) - - dm = DeltaMetadata() - dm.revision_src = "123" - dm.revision_dst = "456" - dm.contenthash_src = "abc" - dm.contenthash_dst = "bcd" - dm.contenthash_type = "foobar" - dm.timestamp_src = 120 - dm.timestamp_dst = 450 - dm.add_pluginbundle(plugin) - content = dm.xmldump() - - path = os.path.join(self.tmpdir, "01.xml") - open(path, "w").write(content) - - dm_loaded = DeltaMetadata() - dm_loaded.xmlparse(path) - - self.assertEqual(dm.revision_src, dm_loaded.revision_src) - self.assertEqual(dm.revision_dst, dm_loaded.revision_dst) - self.assertEqual(dm.contenthash_src, dm_loaded.contenthash_src) - self.assertEqual(dm.contenthash_dst, dm_loaded.contenthash_dst) - self.assertEqual(dm.contenthash_type, dm_loaded.contenthash_type) - self.assertEqual(dm.timestamp_src, dm_loaded.timestamp_src) - self.assertEqual(dm.timestamp_dst, dm_loaded.timestamp_dst) - self.assertEqual(len(dm.usedplugins), len(dm_loaded.usedplugins)) - self.assertEqual(dm.usedplugins["FooBarPlugin"].__dict__, - dm_loaded.usedplugins["FooBarPlugin"].__dict__) - - def test_parse_empty_deltametadata(self): - path = os.path.join(self.tmpdir, "empty.xml") - open(path, "w").write(XML_EMPTY) - - dm = DeltaMetadata() - dm.xmlparse(path) - - self.assertEqual(len(dm.usedplugins), 0) - - def test_parse_deltametadata_01(self): - path = os.path.join(self.tmpdir, "01.xml") - open(path, "w").write(XML_01) - - dm = DeltaMetadata() - dm.xmlparse(path) - - self.assertEqual(dm.revision_src, "123") - self.assertEqual(dm.revision_dst, "456") - self.assertEqual(dm.contenthash_src, "abc") - self.assertEqual(dm.contenthash_dst, "bcd") - self.assertEqual(dm.contenthash_type, "foobar") - self.assertEqual(dm.timestamp_src, 120) - self.assertEqual(dm.timestamp_dst, 450) - - self.assertEqual(len(dm.usedplugins), 1) - - bp = dm.usedplugins["FooBarPlugin"] - self.assertEqual(bp.name, "FooBarPlugin") - self.assertEqual(bp.version, 1) - self.assertEqual(bp.get("database"), "1") - self.assertEqual(bp.get("origincompression"), "gz") - self.assertEqual(bp.get_list("removedpackage"), - [{"href": "Pacakges/foo.rpm", "base": "ftp://foobaar/"}, - {"href": "Packages/bar.rpm"}]) - self.assertEqual(bp.get_list("emptylist"), [{}]) \ No newline at end of file diff --git a/deltarepo/tests/test_deltarepos.py b/deltarepo/tests/test_deltarepos.py deleted file mode 100644 index fb41907..0000000 --- a/deltarepo/tests/test_deltarepos.py +++ /dev/null @@ -1,125 +0,0 @@ -import unittest -import shutil -import tempfile -from deltarepo.deltarepos import DeltaRepos, DeltaReposRecord - -from .fixtures import * - -XML_EMPTY = """ - - -""" - -XML_01 = """ - - - - - - - - - - - 123456789 - 963 - foobarchecksum - - - -""" - - -class TestCaseDeltaRepos(unittest.TestCase): - - def setUp(self): - self.tmpdir = tempfile.mkdtemp(prefix="deltarepo-test-") - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def test_dump_empty_deltarepos(self): - dr = DeltaRepos() - content = dr.xmldump() - - path = os.path.join(self.tmpdir, "01.xml") - open(path, "w").write(content) - - dr = DeltaRepos() - dr.xmlparse(path) - - self.assertEqual(len(dr.records), 0) - - def test_dump_deltarepos_01(self): - rec = DeltaReposRecord() - - rec.location_href = "deltarepos/ei7as764ly-043fds4red" - rec.size_total = 15432 - rec.revision_src = "1387077123" - rec.revision_dst = "1387086456" - rec.contenthash_src = "a" - rec.contenthash_dst = "b" - rec.contenthash_type = "md5" - rec.timestamp_src = 1387075111 - rec.timestamp_dst = 1387086222 - - rec.set_data("primary", size=7766) - - rec.repomd_timestamp = 123456789 - rec.repomd_size = 963 - rec.repomd_checksums = [("sha256", "foobarchecksum")] - - #rec.add_plugin('MainDeltaPlugin', {'src_contenthash': 'ei7as764ly', - # 'dst_contenthash': '043fds4red', - # 'contenthash_type': 'sha256'}) - - dr = DeltaRepos() - dr.add_record(rec) - content = dr.xmldump() - - path = os.path.join(self.tmpdir, "01.xml") - open(path, "w").write(content) - - dr = DeltaRepos() - dr.xmlparse(path) - - self.assertEqual(len(dr.records), 1) - self.assertEqual(dr.records[0].__dict__, rec.__dict__) - - def test_parse_empty_deltarepos(self): - path = os.path.join(self.tmpdir, "empty.xml") - open(path, "w").write(XML_EMPTY) - - dr = DeltaRepos() - dr.xmlparse(path) - - self.assertEqual(len(dr.records), 0) - - def test_parse_deltarepos_01(self): - path = os.path.join(self.tmpdir, "01.xml") - open(path, "w").write(XML_01) - - dr = DeltaRepos() - dr.xmlparse(path) - - self.assertEqual(len(dr.records), 1) - - rec = dr.records[0] - - self.assertEqual(rec.location_base, None) - self.assertEqual(rec.location_href, "deltarepos/ei7as764ly-043fds4red") - self.assertEqual(rec.size_total, 15432) - self.assertEqual(rec.revision_src, "1387077123") - self.assertEqual(rec.revision_dst, "1387087456") - self.assertEqual(rec.timestamp_src, 1387075111) - self.assertEqual(rec.timestamp_dst, 1387086222) - - self.assertEqual(rec.get_data("primary").get("size"), 7766) - - #self.assertEqual(len(rec.plugins), 1) - #self.assertTrue("MainDeltaPlugin" in rec.plugins) - #plugin = rec.plugins["MainDeltaPlugin"] - #self.assertEqual(plugin, {'name': 'MainDeltaPlugin', - # 'src_contenthash': 'ei7as764ly', - # 'dst_contenthash': '043fds4red', - # 'contenthash_type': 'sha256'}) \ No newline at end of file diff --git a/deltarepo/tests/test_updater_common.py b/deltarepo/tests/test_updater_common.py deleted file mode 100644 index 5122cf0..0000000 --- a/deltarepo/tests/test_updater_common.py +++ /dev/null @@ -1,254 +0,0 @@ -import os.path -import logging -import unittest -from deltarepo.updater_common import LocalRepo, OriginRepo, DRMirror, Solver, UpdateSolver -from deltarepo.errors import DeltaRepoError - -from .fixtures import * - -class LinkMock(object): - """Mock object""" - def __init__(self, src, dst, type="sha256", mirrorurl="mockedlink", cost=100): - self.src = src - self.dst = dst - self.type = type - self.contenthash_src = src - self.contenthash_dst = dst - self.contenthash_type = type - self.mirrorurl = mirrorurl - self._cost = cost - - # User can set these remaining values by yourself - self.revision_src = None - self.revision_dst = None - self.timestamp_src = None # Integer expected here - self.timestamp_dst = None # Integer expected here - - def __repr__(self): - return "\'{1}\' ({2})>".format( - self.src, self.dst, self.cost()) - - def cost(self, whitelisted_metadata=None): - return self._cost - -class TestCaseLocalRepo(unittest.TestCase): - def localrepo_init(self): - lr = LocalRepo() - self.assertTrue(lr) - - def test_localrepo_from_path(self): - lr = LocalRepo.from_path(REPO_01_PATH) - self.assertEqual(lr.revision, "1378724582") - self.assertEqual(lr.timestamp, 1378724581L) - self.assertEqual(lr.contenthash, "4d1c9f8b7c442adb5f90fda368ec7eb267fa42759a5d125001585bc8928b3967") - self.assertEqual(lr.contenthash_type, "sha256") - - def test_localrepo_from_path_md5(self): - lr = LocalRepo.from_path(REPO_01_PATH, contenthash_type="md5") - self.assertEqual(lr.revision, "1378724582") - self.assertEqual(lr.timestamp, 1378724581L) - self.assertEqual(lr.contenthash_type, "md5") - self.assertEqual(lr.contenthash, "357a4ca1d69f48f2a278158079153211") - -class TestCaseOriginRepo(unittest.TestCase): - def originrepo_init(self): - lr = OriginRepo() - self.assertTrue(lr) - - def test_originrepo_from_url(self): - lr = OriginRepo.from_url(urls=[REPO_01_PATH]) - self.assertEqual(lr.revision, "1378724582") - self.assertEqual(lr.timestamp, 1378724581L) - self.assertEqual(lr.contenthash, None) - self.assertEqual(lr.contenthash_type, None) - self.assertEqual(lr.urls, [REPO_01_PATH]) - self.assertEqual(lr.mirrorlist, None) - self.assertEqual(lr.metalink, None) - - def test_originrepo_from_local_repomd(self): - lr = OriginRepo.from_local_repomd(os.path.join(REPO_01_PATH, "repodata/repomd.xml")) - self.assertEqual(lr.revision, "1378724582") - self.assertEqual(lr.timestamp, 1378724581L) - self.assertEqual(lr.contenthash, None) - self.assertEqual(lr.contenthash_type, None) - self.assertEqual(lr.urls, []) - self.assertEqual(lr.mirrorlist, None) - self.assertEqual(lr.metalink, None) - -class TestCaseDRMirror(unittest.TestCase): - def test_drmirror_init(self): - drm = DRMirror() - self.assertTrue(drm) - - def test_drmirror_from_url(self): - url = "file://" + os.path.abspath(DELTAREPOS_01_PATH) - drm = DRMirror.from_url(url) - self.assertTrue(drm) - self.assertEqual(len(drm.records), 3) - self.assertTrue(drm.deltarepos) - -class TestCaseSolver(unittest.TestCase): - - def path_to_strlist(self, resolved_path): - path = [x.src for x in resolved_path] - path.append(resolved_path[-1].dst) - return path - - def test_solver_graph_build(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - links.append(LinkMock("aaa", "ccc")) - links.append(LinkMock("bbb", "ccc")) - - logger = logging.getLogger("testloger") - graph = Solver.Graph() - graph.graph_from_links(links) - - self.assertTrue(graph) - self.assertEqual(len(graph.nodes), 3) - self.assertTrue("aaa" in graph.nodes) - self.assertTrue("bbb" in graph.nodes) - self.assertTrue("ccc" in graph.nodes) - - self.assertEqual(len(graph.nodes["aaa"].targets), 2) - self.assertEqual(len(graph.nodes["bbb"].targets), 1) - self.assertEqual(len(graph.nodes["ccc"].targets), 0) - - self.assertEqual(len(graph.nodes["aaa"].sources), 0) - self.assertEqual(len(graph.nodes["bbb"].sources), 1) - self.assertEqual(len(graph.nodes["ccc"].sources), 2) - - def test_solver_01(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - links.append(LinkMock("aaa", "ccc")) - links.append(LinkMock("bbb", "ccc")) - - logger = logging.getLogger("testloger") - solver = Solver(links, "aaa", "ccc", logger=logger) - resolved_path = solver.solve() - self.assertTrue(resolved_path) - self.assertTrue(len(resolved_path), 1) - self.assertEqual(self.path_to_strlist(resolved_path), - ["aaa", "ccc"]) - - def test_solver_02(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - links.append(LinkMock("bbb", "ccc")) - - logger = logging.getLogger("testloger") - solver = Solver(links, "aaa", "ccc", logger=logger) - resolved_path = solver.solve() - self.assertTrue(resolved_path) - self.assertEqual(self.path_to_strlist(resolved_path), - ["aaa", "bbb", "ccc"]) - - def test_solver_03(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - links.append(LinkMock("bbb", "ccc")) - links.append(LinkMock("aaa", "ccc", cost=1000)) - - logger = logging.getLogger("testloger") - solver = Solver(links, "aaa", "ccc", logger=logger) - resolved_path = solver.solve() - self.assertTrue(resolved_path) - self.assertEqual(self.path_to_strlist(resolved_path), - ["aaa", "bbb", "ccc"]) - - def test_solver_04(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - links.append(LinkMock("bbb", "aaa")) - links.append(LinkMock("bbb", "ccc")) - links.append(LinkMock("ccc", "bbb")) - - logger = logging.getLogger("testloger") - solver = Solver(links, "aaa", "ccc", logger=logger) - resolved_path = solver.solve() - self.assertTrue(resolved_path) - self.assertEqual(self.path_to_strlist(resolved_path), - ["aaa", "bbb", "ccc"]) - - def test_solver_shouldfail_01(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - links.append(LinkMock("ccc", "ddd")) - - logger = logging.getLogger("testloger") - solver = Solver(links, "aaa", "ccc", logger=logger) - resolved_path = solver.solve() - self.assertFalse(resolved_path) - - def test_solver_shouldfail_02(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - logger = logging.getLogger("testloger") - solver = Solver(links, "aaa", "ccc", logger=logger) - self.assertRaises(DeltaRepoError, solver.solve) - - def test_solver_shouldfail_03(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - logger = logging.getLogger("testloger") - solver = Solver(links, "ccc", "aaa", logger=logger) - self.assertRaises(DeltaRepoError, solver.solve) - -class TestCaseUpdateSolver(unittest.TestCase): - - def test_updatesolver_resolve_path(self): - links = [] - links.append(LinkMock("aaa", "bbb")) - links.append(LinkMock("bbb", "ccc")) - - updatesolver = UpdateSolver([]) - updatesolver._links = links - - resolved_path = updatesolver.resolve_path("aaa", "ccc") - self.assertTrue(resolved_path) - self.assertEqual(len(resolved_path), 2) - - def test_updatesolver_find_repo_contenthash(self): - links = [] - link = LinkMock("aaa", "bbb") - link.revision_src = "aaa_rev" - link.revision_dst = "bbb_rev" - link.timestamp_src = 111 - link.timestamp_dst = 222 - links.append(link) - - updatesolver = UpdateSolver([]) - updatesolver._links = links - - repo = LocalRepo() - - repo.revision = "aaa_rev" - repo.timestamp = 111 - type, hash = updatesolver.find_repo_contenthash(repo) - self.assertEqual(type, "sha256") - self.assertEqual(hash, "aaa") - - repo.revision = "bbb_rev" - repo.timestamp = 222 - type, hash = updatesolver.find_repo_contenthash(repo) - self.assertEqual(type, "sha256") - self.assertEqual(hash, "bbb") - - repo.revision = "aaa_rev" - repo.timestamp = 222 - type, hash = updatesolver.find_repo_contenthash(repo) - self.assertEqual(type, "sha256") - self.assertEqual(hash, None) - - repo.revision = "ccc_rev" - repo.timestamp = 111 - type, hash = updatesolver.find_repo_contenthash(repo) - self.assertEqual(type, "sha256") - self.assertEqual(hash, None) - - repo.revision = "aaa_rev" - repo.timestamp = 111 - type, hash = updatesolver.find_repo_contenthash(repo, contenthash_type="md5") - self.assertEqual(type, "md5") - self.assertEqual(hash, None) diff --git a/deltarepo/tests/testdata/deltarepos_01/deltarepos.xml.xz b/deltarepo/tests/testdata/deltarepos_01/deltarepos.xml.xz deleted file mode 100644 index b61d47b..0000000 Binary files a/deltarepo/tests/testdata/deltarepos_01/deltarepos.xml.xz and /dev/null differ diff --git a/deltarepo/tests/testdata/repo_01/repodata/0d112997f72b78071f561b362268d51fe89d0b247041d312cafc5cb061267f1a-other.sqlite.bz2 b/deltarepo/tests/testdata/repo_01/repodata/0d112997f72b78071f561b362268d51fe89d0b247041d312cafc5cb061267f1a-other.sqlite.bz2 deleted file mode 100644 index c656287..0000000 Binary files a/deltarepo/tests/testdata/repo_01/repodata/0d112997f72b78071f561b362268d51fe89d0b247041d312cafc5cb061267f1a-other.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/tests/testdata/repo_01/repodata/341297672077ef71a5f8db569932d20975e906f192986cdfa8ab535f0c224d4d-primary.xml.gz b/deltarepo/tests/testdata/repo_01/repodata/341297672077ef71a5f8db569932d20975e906f192986cdfa8ab535f0c224d4d-primary.xml.gz deleted file mode 100644 index 39f697b..0000000 Binary files a/deltarepo/tests/testdata/repo_01/repodata/341297672077ef71a5f8db569932d20975e906f192986cdfa8ab535f0c224d4d-primary.xml.gz and /dev/null differ diff --git a/deltarepo/tests/testdata/repo_01/repodata/4d3db9af2873be47e1aefae3c139235294b5775c07d53813b68d1f50b8ea9bf0-filelists.sqlite.bz2 b/deltarepo/tests/testdata/repo_01/repodata/4d3db9af2873be47e1aefae3c139235294b5775c07d53813b68d1f50b8ea9bf0-filelists.sqlite.bz2 deleted file mode 100644 index 02fb71a..0000000 Binary files a/deltarepo/tests/testdata/repo_01/repodata/4d3db9af2873be47e1aefae3c139235294b5775c07d53813b68d1f50b8ea9bf0-filelists.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/tests/testdata/repo_01/repodata/87b269aeb163c1cabf236cd7e503069a25a364db50d0208d35834a5fea9624c2-primary.sqlite.bz2 b/deltarepo/tests/testdata/repo_01/repodata/87b269aeb163c1cabf236cd7e503069a25a364db50d0208d35834a5fea9624c2-primary.sqlite.bz2 deleted file mode 100644 index 2b5431e..0000000 Binary files a/deltarepo/tests/testdata/repo_01/repodata/87b269aeb163c1cabf236cd7e503069a25a364db50d0208d35834a5fea9624c2-primary.sqlite.bz2 and /dev/null differ diff --git a/deltarepo/tests/testdata/repo_01/repodata/e6e145c2e40e66580dd6e2d664421d6039dac022bcfecfa7e8ec5fa59936d8d9-filelists.xml.gz b/deltarepo/tests/testdata/repo_01/repodata/e6e145c2e40e66580dd6e2d664421d6039dac022bcfecfa7e8ec5fa59936d8d9-filelists.xml.gz deleted file mode 100644 index 926a8e4..0000000 Binary files a/deltarepo/tests/testdata/repo_01/repodata/e6e145c2e40e66580dd6e2d664421d6039dac022bcfecfa7e8ec5fa59936d8d9-filelists.xml.gz and /dev/null differ diff --git a/deltarepo/tests/testdata/repo_01/repodata/f8674e5e78b5dc0188d5a86acc8bdc12f7ed5b7be2084d9aa4e1f00ec61e4981-other.xml.gz b/deltarepo/tests/testdata/repo_01/repodata/f8674e5e78b5dc0188d5a86acc8bdc12f7ed5b7be2084d9aa4e1f00ec61e4981-other.xml.gz deleted file mode 100644 index e44440f..0000000 Binary files a/deltarepo/tests/testdata/repo_01/repodata/f8674e5e78b5dc0188d5a86acc8bdc12f7ed5b7be2084d9aa4e1f00ec61e4981-other.xml.gz and /dev/null differ diff --git a/deltarepo/tests/testdata/repo_01/repodata/repomd.xml b/deltarepo/tests/testdata/repo_01/repodata/repomd.xml deleted file mode 100644 index 82684c5..0000000 --- a/deltarepo/tests/testdata/repo_01/repodata/repomd.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - 1378724582 - - 341297672077ef71a5f8db569932d20975e906f192986cdfa8ab535f0c224d4d - cc49d2d2c023dbef86c4f20095bda748d13aefbfe191bd9b7d6c516ff8dee66c - - 1378724580 - 843 - 2711 - - - 0d112997f72b78071f561b362268d51fe89d0b247041d312cafc5cb061267f1a - a36e6723a37682d92b3152ec04de50a132780b0eddc1768d919e557561d3acec - - 1378724580 - 822 - 6144 - 10 - - - e6e145c2e40e66580dd6e2d664421d6039dac022bcfecfa7e8ec5fa59936d8d9 - 2a67c961707e6de30077035f2e47b5084c4687d8b8ba06183e53c2c48e44a822 - - 1378724581 - 294 - 428 - - - 4d3db9af2873be47e1aefae3c139235294b5775c07d53813b68d1f50b8ea9bf0 - 06c5517f5ee60eff93364bf205a14bacf795578e9e001e880d652a0a0e43e3f5 - - 1378724580 - 817 - 7168 - 10 - - - 87b269aeb163c1cabf236cd7e503069a25a364db50d0208d35834a5fea9624c2 - 2ef40faa184db55e06c23e89f78deb4c59a6c63f93e9dc226c4e8f50c851bbb3 - - 1378724580 - 2036 - 21504 - 10 - - - f8674e5e78b5dc0188d5a86acc8bdc12f7ed5b7be2084d9aa4e1f00ec61e4981 - 089af1def4d78096b74dd5b5f40790bc8f582cb34979e55def1cc44fe04d328e - - 1378724580 - 358 - 653 - -