os.path.join(prerelease.dir, prerelease.build_id),
project, base_url)
-def get_published_repo(repo_path_base, time_stamp):
- """ Search publised repository
+def get_published_repos(repo_path_base, time_stamp):
+ """ Search publised repository recursively
Args:
- repo_path_base (str): the repo location on file systems
- time_stamp (str): the time stamp of sbumit tag
+ repo_path_base (str): path to base dir
+ time_stamp (str): time stamp of sbumit tag
"""
- ret_list = []
if os.path.isdir(repo_path_base):
- for dir_name in os.listdir(repo_path_base):
- if dir_name.endswith(time_stamp):
- ret_list.append(os.path.join(repo_path_base, dir_name))
-
- return ret_list
+ for name in os.listdir(repo_path_base):
+ path = os.path.join(repo_path_base, name)
+ if os.path.isdir(path):
+ if name.endswith(time_stamp):
+ yield path
+ else:
+ for npath in get_published_repos(path, time_stamp):
+ yield npath
def project_cleanup(backenddb, build, base_path, base_url, event_dict):
""" request(SR) end of life, this founction should be called to
except SnapshotError, err:
raise LocalError("Error getting prerelease data: %s" % str(err))
- published_repos = get_published_repo(prerelease.path, time_stamp)
- for repo in published_repos:
- print 'Removing the repo: %s' % repo
- shutil.rmtree(repo)
+ # remove latest snapshot from prerelease_path as we need to clean up
+ # published repos from old snapshots too
+ root = prerelease.path.split(prerelease.snapshot)[0]
+
+ for path in get_published_repos(root, time_stamp):
+ print 'Removing the repo: %s' % path
+ shutil.rmtree(path)
def main(action):
"""Script entry point.