Skip to content

Commit

Permalink
* offer access to deleted projects/packages
Browse files Browse the repository at this point in the history
* allow checkout of meta data of packages
* process validators in sorted order
  • Loading branch information
adrianschroeter committed May 22, 2010
1 parent da0c0f6 commit 1e985f3
Show file tree
Hide file tree
Showing 3 changed files with 82 additions and 38 deletions.
1 change: 1 addition & 0 deletions NEWS
Expand Up @@ -14,6 +14,7 @@
- support branch --force to override target
- support for "unresolvable" state of OBS 2.0
- support undelete of project or package
- support for package meta data checkout

0.126
- added VM autosetup to osc. This requires appropriate OBS version and build script version.
Expand Down
18 changes: 13 additions & 5 deletions osc/commandline.py
Expand Up @@ -211,6 +211,8 @@ def do_init(self, subcmd, opts, project, package=None):
help='print extra information')
@cmdln.option('-l', '--long', action='store_true', dest='verbose',
help='print extra information')
@cmdln.option('-D', '--deleted', action='store_true',
help='show only the former deleted projects or packages')
def do_list(self, subcmd, opts, *args):
"""${cmd_name}: List sources or binaries on the server
Expand Down Expand Up @@ -257,7 +259,11 @@ def do_list(self, subcmd, opts, *args):
project = args[0]
if len(args) > 1:
package = args[1]
if opts.deleted:
raise oscerr.WrongArgs("Too many arguments when listing deleted packages")
if len(args) > 2:
if opts.deleted:
raise oscerr.WrongArgs("Too many arguments when listing deleted packages")
if opts.binaries:
if opts.repo:
if opts.repo != args[2]:
Expand Down Expand Up @@ -330,7 +336,7 @@ def do_list(self, subcmd, opts, *args):
# list sources
elif not opts.binaries:
if not args:
print '\n'.join(meta_get_project_list(conf.config['apiurl']))
print '\n'.join(meta_get_project_list(conf.config['apiurl'], opts.deleted))

elif len(args) == 1:
if opts.verbose:
Expand All @@ -339,7 +345,7 @@ def do_list(self, subcmd, opts, *args):
if opts.expand:
raise oscerr.WrongOptions('Sorry, the --expand option is not implemented for projects.')

print '\n'.join(meta_get_packagelist(conf.config['apiurl'], project))
print '\n'.join(meta_get_packagelist(conf.config['apiurl'], project, opts.deleted))

elif len(args) == 2 or len(args) == 3:
link_seen = False
Expand Down Expand Up @@ -2101,6 +2107,8 @@ def do_repourls(self, subcmd, opts, *args):
@cmdln.option('-u', '--unexpand-link', action='store_true',
help='if a package is a link, check out the _link file ' \
'instead of the expanded sources')
@cmdln.option('-m', '--meta', action='store_true',
help='checkout out meta data instead of sources' )
@cmdln.option('-c', '--current-dir', action='store_true',
help='place PACKAGE folder in the current directory' \
'instead of a PROJECT/PACKAGE directory')
Expand Down Expand Up @@ -2178,7 +2186,7 @@ def do_checkout(self, subcmd, opts, *args):
if opts.current_dir:
project_dir = None
checkout_package(apiurl, project, package, rev, expand_link=expand_link, \
prj_dir=project_dir, service_files=service_files, progress_obj=self.download_progress, limit_size=opts.limit_size)
prj_dir=project_dir, service_files=service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta)
print_request_list(apiurl, project, package)

elif project:
Expand All @@ -2198,13 +2206,13 @@ def do_checkout(self, subcmd, opts, *args):
for package in meta_get_packagelist(apiurl, project):
try:
checkout_package(apiurl, project, package, expand_link = expand_link, \
prj_dir = prj_dir, service_files = service_files, progress_obj=self.download_progress, limit_size=opts.limit_size)
prj_dir = prj_dir, service_files = service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta)
except oscerr.LinkExpandError, e:
print >>sys.stderr, 'Link cannot be expanded:\n', e
print >>sys.stderr, 'Use "osc repairlink" for fixing merge conflicts:\n'
# check out in unexpanded form at least
checkout_package(apiurl, project, package, expand_link = False, \
prj_dir = prj_dir, service_files = service_files, progress_obj=self.download_progress, limit_size=opts.limit_size)
prj_dir = prj_dir, service_files = service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta)
print_request_list(apiurl, project)

else:
Expand Down
101 changes: 68 additions & 33 deletions osc/core.py
Expand Up @@ -77,13 +77,8 @@
<arch>x86_64</arch>
<arch>i586</arch>
</repository>
<repository name="openSUSE_11.0">
<path project="openSUSE:11.0" repository="standard"/>
<arch>x86_64</arch>
<arch>i586</arch>
</repository>
<repository name="Fedora_11">
<path project="Fedora:11" repository="standard" />
<repository name="Fedora_12">
<path project="Fedora:12" repository="standard" />
<arch>x86_64</arch>
<arch>i586</arch>
</repository>
Expand All @@ -92,11 +87,6 @@
<arch>x86_64</arch>
<arch>i586</arch>
</repository>
<repository name="SLE_10">
<path project="SUSE:SLE-10:SDK" repository="standard" />
<arch>x86_64</arch>
<arch>i586</arch>
</repository>
-->
</project>
Expand Down Expand Up @@ -185,7 +175,7 @@

buildstatus_symbols = {'succeeded': '.',
'disabled': ' ',
'expansion error': 'E', # obsolete with OBS 2.0
'expansion error': 'U', # obsolete with OBS 2.0
'unresolvable': 'U',
'failed': 'F',
'broken': 'B',
Expand Down Expand Up @@ -760,11 +750,12 @@ def __str__(self):

class Package:
"""represent a package (its directory) and read/keep/write its metadata"""
def __init__(self, workingdir, progress_obj=None, limit_size=None):
def __init__(self, workingdir, progress_obj=None, limit_size=None, meta=None):
self.dir = workingdir
self.absdir = os.path.abspath(self.dir)
self.storedir = os.path.join(self.absdir, store)
self.progress_obj = progress_obj
self.meta = meta
self.limit_size = limit_size
if limit_size and limit_size == 0:
self.limit_size = None
Expand Down Expand Up @@ -850,6 +841,18 @@ def clear_from_conflictlist(self, n):

self.write_conflictlist()

def write_meta_mode(self):
if self.meta:
fname = os.path.join(self.storedir, '_meta_mode')
f = open(fname, 'w')
f.write(str("true"))
f.close()
else:
try:
os.unlink(os.path.join(self.storedir, '_meta_mode'))
except:
pass

def write_sizelimit(self):
if self.size_limit and self.size_limit <= 0:
try:
Expand Down Expand Up @@ -910,10 +913,11 @@ def commit(self, msg='', validators=None):
if validators:
import subprocess
from stat import *
for validator in os.listdir(validators):
for validator in sorted(os.listdir(validators)):
fn=validators+"/"+validator
mode = os.stat(fn)
if S_ISREG(mode[ST_MODE]):
print "run", fn
p = subprocess.Popen([fn], close_fds=True)
if p.wait() != 0:
raise oscerr.RuntimeError(p.stdout, validator )
Expand Down Expand Up @@ -1031,7 +1035,7 @@ def updatefile(self, n, revision):
mtime = self.findfilebyname(n).mtime

get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
revision=revision, progress_obj=self.progress_obj, mtime=mtime)
revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)

shutil.copyfile(filename, storefilename)

Expand All @@ -1045,7 +1049,7 @@ def mergefile(self, n):
mtime = self.findfilebyname(n).mtime
get_source_file(self.apiurl, self.prjname, self.name, n,
revision=self.rev, targetfilename=upfilename,
progress_obj=self.progress_obj, mtime=mtime)
progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)

if binary_file(myfilename) or binary_file(upfilename):
# don't try merging
Expand Down Expand Up @@ -1088,7 +1092,7 @@ def update_local_filesmeta(self, revision=None):
Update the local _files file in the store.
It is replaced with the version pulled from upstream.
"""
meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size, meta=self.meta))
store_write_string(self.absdir, '_files', meta)

def update_datastructs(self):
Expand Down Expand Up @@ -1128,6 +1132,7 @@ def update_datastructs(self):
self.in_conflict = read_inconflict(self.dir)
self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
self.size_limit = read_sizelimit(self.dir)
self.meta = read_meta_mode(self.dir)

# gather unversioned files, but ignore some stuff
self.excluded = [ i for i in os.listdir(self.dir)
Expand Down Expand Up @@ -1947,6 +1952,17 @@ def read_tobedeleted(dir):
return r


def read_meta_mode(dir):
r = None
fname = os.path.join(dir, store, '_meta_mode')

if os.path.exists(fname):
r = open(fname).readline()

if r is None or not r == "true":
return None
return 1

def read_sizelimit(dir):
r = None
fname = os.path.join(dir, store, '_size_limit')
Expand Down Expand Up @@ -2112,7 +2128,7 @@ def init_project_dir(apiurl, dir, project):
if conf.config['do_package_tracking']:
store_write_initial_packages(dir, project, [])

def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None, meta=None):
if not os.path.isdir(store):
os.mkdir(store)
os.chdir(store)
Expand All @@ -2123,14 +2139,19 @@ def init_package_dir(apiurl, project, package, dir, revision=None, files=True, l
f.write(package + '\n')
f.close()

if meta:
f = open('_meta_mode', 'w')
f.write("true")
f.close()

if limit_size:
f = open('_size_limit', 'w')
f.write(str(limit_size))
f.close()

if files:
f = open('_files', 'w')
f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size, meta=meta)))
f.close()
else:
# create dummy
Expand Down Expand Up @@ -2172,9 +2193,13 @@ def check_store_version(dir):
raise oscerr.WorkingCopyWrongVersion, msg


def meta_get_packagelist(apiurl, prj):
def meta_get_packagelist(apiurl, prj, deleted=None):

u = makeurl(apiurl, ['source', prj])
query = {}
if deleted:
query['deleted'] = 1

u = makeurl(apiurl, ['source', prj], query)
f = http_GET(u)
root = ET.parse(f).getroot()
return [ node.get('name') for node in root.findall('entry') ]
Expand Down Expand Up @@ -2213,8 +2238,12 @@ def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revisio
return l


def meta_get_project_list(apiurl):
u = makeurl(apiurl, ['source'])
def meta_get_project_list(apiurl, deleted):
query = {}
if deleted:
query['deleted'] = 1

u = makeurl(apiurl, ['source'], query)
f = http_GET(u)
root = ET.parse(f).getroot()
return sorted([ node.get('name') for node in root ])
Expand Down Expand Up @@ -2454,7 +2483,7 @@ def edit_meta(metatype,
f.sync()


def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None, meta=None):
query = {}
if revision:
query['rev'] = revision
Expand All @@ -2464,6 +2493,8 @@ def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None,
query['linkrev'] = linkrev
elif conf.config['linkcontrol']:
query['linkrev'] = 'base'
if meta:
query['meta'] = 1
if expand:
query['expand'] = 1
if linkrepair:
Expand Down Expand Up @@ -2883,11 +2914,13 @@ def download(url, filename, progress_obj = None, mtime = None):
if mtime:
os.utime(filename, (-1, mtime))

def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None):
def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None, meta=None):
targetfilename = targetfilename or filename
query = None
query = {}
if meta:
query['rev'] = 1
if revision:
query = { 'rev': revision }
query['rev'] = revision
u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
download(u, targetfilename, progress_obj, mtime)

Expand Down Expand Up @@ -3108,7 +3141,7 @@ def make_diff(wc, revision):

def server_diff(apiurl,
old_project, old_package, old_revision,
new_project, new_package, new_revision, unified=False, missingok=False):
new_project, new_package, new_revision, unified=False, missingok=False, meta=None):
query = {'cmd': 'diff', 'expand': '1'}
if old_project:
query['oproject'] = old_project
Expand All @@ -3122,6 +3155,8 @@ def server_diff(apiurl,
query['unified'] = 1
if missingok:
query['missingok'] = 1
if meta:
query['meta'] = 1

u = makeurl(apiurl, ['source', new_project, new_package], query=query)

Expand Down Expand Up @@ -3173,7 +3208,7 @@ def make_dir(apiurl, project, package, pathname=None, prj_dir=None):

def checkout_package(apiurl, project, package,
revision=None, pathname=None, prj_obj=None,
expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None, meta=None):
try:
# the project we're in might be deleted.
# that'll throw an error then.
Expand Down Expand Up @@ -3211,7 +3246,7 @@ def checkout_package(apiurl, project, package,
if x:
revision = x
os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size, meta=meta)
os.chdir(os.pardir)
p = Package(package, progress_obj=progress_obj)
if isfrozen:
Expand Down Expand Up @@ -3527,11 +3562,11 @@ def copy_pac(src_apiurl, src_project, src_package,


def undelete_package(apiurl, prj, pac):
u = makeurl(apiurl, ['source', prj, pac], query={'comment': 'undeleted via osc'})
u = makeurl(apiurl, ['source', prj, pac], query={'comment': 'undeleted via osc', 'cmd': 'undelete'})
http_POST(u)

def undelete_project(apiurl, prj):
u = makeurl(apiurl, ['source', prj], query={'comment': 'undeleted via osc'})
u = makeurl(apiurl, ['source', prj], query={'comment': 'undeleted via osc', 'cmd': 'undelete'})
http_POST(u)


Expand Down

0 comments on commit 1e985f3

Please sign in to comment.