1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.131git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 new_project_templ = """\
39 <project name="%(name)s">
41 <title></title> <!-- Short title of NewProject -->
43 <!-- This is for a longer description of the purpose of the project -->
46 <person role="maintainer" userid="%(user)s" />
47 <person role="bugowner" userid="%(user)s" />
48 <!-- remove this block to publish your packages on the mirrors -->
59 <!-- remove this comment to enable one or more build targets
61 <repository name="openSUSE_Factory">
62 <path project="openSUSE:Factory" repository="standard" />
66 <repository name="openSUSE_11.2">
67 <path project="openSUSE:11.2" repository="standard"/>
71 <repository name="openSUSE_11.1">
72 <path project="openSUSE:11.1" repository="standard"/>
76 <repository name="Fedora_12">
77 <path project="Fedora:12" repository="standard" />
81 <repository name="SLE_11">
82 <path project="SUSE:SLE-11" repository="standard" />
91 new_package_templ = """\
92 <package name="%(name)s">
94 <title></title> <!-- Title of package -->
97 <!-- for long description -->
100 <!-- following roles are inherited from the parent project
101 <person role="maintainer" userid="%(user)s"/>
102 <person role="bugowner" userid="%(user)s"/>
105 <url>PUT_UPSTREAM_URL_HERE</url>
109 use one of the examples below to disable building of this package
110 on a certain architecture, in a certain repository,
111 or a combination thereof:
113 <disable arch="x86_64"/>
114 <disable repository="SUSE_SLE-10"/>
115 <disable repository="SUSE_SLE-10" arch="x86_64"/>
117 Possible sections where you can use the tags above:
127 Please have a look at:
128 http://en.opensuse.org/Restricted_formats
129 Packages containing formats listed there are NOT allowed to
130 be packaged in the openSUSE Buildservice and will be deleted!
137 new_attribute_templ = """\
139 <attribute namespace="" name="">
145 new_user_template = """\
147 <login>%(user)s</login>
148 <email>PUT_EMAIL_ADDRESS_HERE</email>
149 <realname>PUT_REAL_NAME_HERE</realname>
151 <project name="home:%(user)s"/>
167 new_pattern_template = """\
168 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
174 buildstatus_symbols = {'succeeded': '.',
176 'expansion error': 'U', # obsolete with OBS 2.0
190 # os.path.samefile is available only under Unix
191 def os_path_samefile(path1, path2):
193 return os.path.samefile(path1, path2)
195 return os.path.realpath(path1) == os.path.realpath(path2)
198 """represent a file, including its metadata"""
199 def __init__(self, name, md5, size, mtime, skipped=False):
204 self.skipped = skipped
212 """Source service content
215 """creates an empty serviceinfo instance"""
218 def read(self, serviceinfo_node, append=False):
219 """read in the source services <services> element passed as
222 if serviceinfo_node == None:
224 if not append or self.services == None:
226 services = serviceinfo_node.findall('service')
228 for service in services:
229 name = service.get('name')
230 mode = service.get('mode', None)
231 data = { 'name' : name, 'mode' : '' }
235 for param in service.findall('param'):
236 option = param.get('name', None)
238 name += " --" + option + " '" + value + "'"
239 data['command'] = name
240 self.services.append(data)
242 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
243 raise oscerr.APIError(msg)
245 def getProjectGlobalServices(self, apiurl, project, package):
246 # get all project wide services in one file, we don't store it yet
247 u = makeurl(apiurl, ['source', project, package], query='cmd=getprojectservices')
250 root = ET.parse(f).getroot()
251 self.read(root, True)
252 except urllib2.HTTPError, e:
256 def addVerifyFile(self, serviceinfo_node, filename):
259 f = open(filename, 'r')
260 digest = hashlib.sha256(f.read()).hexdigest()
264 s = ET.Element( "service", name="verify_file" )
265 ET.SubElement(s, "param", name="file").text = filename
266 ET.SubElement(s, "param", name="verifier").text = "sha256"
267 ET.SubElement(s, "param", name="checksum").text = digest
273 def addDownloadUrl(self, serviceinfo_node, url_string):
274 from urlparse import urlparse
275 url = urlparse( url_string )
276 protocol = url.scheme
281 s = ET.Element( "service", name="download_url" )
282 ET.SubElement(s, "param", name="protocol").text = protocol
283 ET.SubElement(s, "param", name="host").text = host
284 ET.SubElement(s, "param", name="path").text = path
289 def addGitUrl(self, serviceinfo_node, url_string):
291 s = ET.Element( "service", name="tar_scm" )
292 ET.SubElement(s, "param", name="url").text = url_string
293 ET.SubElement(s, "param", name="scm").text = "git"
297 def addRecompressTar(self, serviceinfo_node):
299 s = ET.Element( "service", name="recompress" )
300 ET.SubElement(s, "param", name="file").text = "*.tar"
301 ET.SubElement(s, "param", name="compression").text = "bz2"
305 def execute(self, dir, callmode = None, singleservice = None):
308 # cleanup existing generated files
309 for filename in os.listdir(dir):
310 if filename.startswith('_service:') or filename.startswith('_service_'):
311 os.unlink(os.path.join(dir, filename))
313 allservices = self.services or []
314 if singleservice and not singleservice in allservices:
315 # set array to the manual specified singleservice, if it is not part of _service file
316 data = { 'name' : singleservice, 'command' : singleservice, 'mode' : '' }
320 for service in allservices:
321 if singleservice and service['name'] != singleservice:
323 if service['mode'] == "disabled" and callmode != "disabled":
325 if service['mode'] != "disabled" and callmode == "disabled":
327 call = service['command']
328 temp_dir = tempfile.mkdtemp()
329 name = call.split(None, 1)[0]
330 if not os.path.exists("/usr/lib/obs/service/"+name):
331 raise oscerr.PackageNotInstalled("obs-service-"+name)
332 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
333 if conf.config['verbose'] > 1:
334 print "Run source service:", c
335 ret = subprocess.call(c, shell=True)
337 print "ERROR: service call failed: " + c
338 # FIXME: addDownloadUrlService calls si.execute after
339 # updating _services.
340 print " (your _services file may be corrupt now)"
342 if service['mode'] == "disabled" or service['mode'] == "trylocal" or service['mode'] == "localonly" or callmode == "local":
343 for filename in os.listdir(temp_dir):
344 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, filename) )
346 for filename in os.listdir(temp_dir):
347 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, "_service:"+name+":"+filename) )
351 """linkinfo metadata (which is part of the xml representing a directory
354 """creates an empty linkinfo instance"""
364 def read(self, linkinfo_node):
365 """read in the linkinfo metadata from the <linkinfo> element passed as
367 If the passed element is None, the method does nothing.
369 if linkinfo_node == None:
371 self.project = linkinfo_node.get('project')
372 self.package = linkinfo_node.get('package')
373 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
374 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
375 self.srcmd5 = linkinfo_node.get('srcmd5')
376 self.error = linkinfo_node.get('error')
377 self.rev = linkinfo_node.get('rev')
378 self.baserev = linkinfo_node.get('baserev')
381 """returns True if the linkinfo is not empty, otherwise False"""
382 if self.xsrcmd5 or self.lsrcmd5:
386 def isexpanded(self):
387 """returns True if the package is an expanded link"""
388 if self.lsrcmd5 and not self.xsrcmd5:
393 """returns True if the link is in error state (could not be applied)"""
399 """return an informatory string representation"""
400 if self.islink() and not self.isexpanded():
401 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
402 % (self.project, self.package, self.xsrcmd5, self.rev)
403 elif self.islink() and self.isexpanded():
405 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
406 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
408 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
409 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
414 # http://effbot.org/zone/element-lib.htm#prettyprint
415 def xmlindent(elem, level=0):
418 if not elem.text or not elem.text.strip():
421 xmlindent(e, level+1)
422 if not e.tail or not e.tail.strip():
424 if not e.tail or not e.tail.strip():
427 if level and (not elem.tail or not elem.tail.strip()):
431 """represent a project directory, holding packages"""
432 REQ_STOREFILES = ('_project', '_apiurl')
433 if conf.config['do_package_tracking']:
434 REQ_STOREFILES += ('_packages',)
435 def __init__(self, dir, getPackageList=True, progress_obj=None, wc_check=True):
438 self.absdir = os.path.abspath(dir)
439 self.progress_obj = progress_obj
441 self.name = store_read_project(self.dir)
442 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
446 dirty_files = self.wc_check()
448 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
449 'Please run \'osc repairwc %s\' and check the state\n' \
450 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
451 raise oscerr.WorkingCopyInconsistent(self.name, None, dirty_files, msg)
454 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
456 self.pacs_available = []
458 if conf.config['do_package_tracking']:
459 self.pac_root = self.read_packages().getroot()
460 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
461 self.pacs_excluded = [ i for i in os.listdir(self.dir)
462 for j in conf.config['exclude_glob']
463 if fnmatch.fnmatch(i, j) ]
464 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
465 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
466 # in the self.pacs_broken list
467 self.pacs_broken = []
468 for p in self.pacs_have:
469 if not os.path.isdir(os.path.join(self.absdir, p)):
470 # all states will be replaced with the '!'-state
471 # (except it is already marked as deleted ('D'-state))
472 self.pacs_broken.append(p)
474 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
476 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
481 for fname in Project.REQ_STOREFILES:
482 if not os.path.exists(os.path.join(self.absdir, store, fname)):
483 dirty_files.append(fname)
486 def wc_repair(self, apiurl=None):
488 if not os.path.exists(os.path.join(self.dir, store, '_apiurl')) or apiurl:
490 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
491 'no \'apiurl\' was passed to wc_repair'
492 # hmm should we raise oscerr.WrongArgs?
493 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
495 conf.parse_apisrv_url(None, apiurl)
496 store_write_apiurl(self.dir, apiurl)
497 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
499 def checkout_missing_pacs(self, expand_link=False):
500 for pac in self.pacs_missing:
502 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
503 # pac is not under version control but a local file/dir exists
504 msg = 'can\'t add package \'%s\': Object already exists' % pac
505 raise oscerr.PackageExists(self.name, pac, msg)
507 print 'checking out new package %s' % pac
508 checkout_package(self.apiurl, self.name, pac, \
509 pathname=getTransActPath(os.path.join(self.dir, pac)), \
510 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
512 def status(self, pac):
513 exists = os.path.exists(os.path.join(self.absdir, pac))
514 st = self.get_state(pac)
515 if st is None and exists:
518 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % pac)
519 elif st in ('A', ' ') and not exists:
521 elif st == 'D' and not exists:
526 def get_status(self, *exclude_states):
528 for pac in self.pacs_have:
529 st = self.status(pac)
530 if not st in exclude_states:
531 res.append((st, pac))
532 if not '?' in exclude_states:
533 res.extend([('?', pac) for pac in self.pacs_unvers])
536 def get_pacobj(self, pac, *pac_args, **pac_kwargs):
538 st = self.status(pac)
539 if st in ('?', '!') or st == 'D' and not os.path.exists(os.path.join(self.dir, pac)):
541 return Package(os.path.join(self.dir, pac), *pac_args, **pac_kwargs)
542 except oscerr.OscIOError:
545 def set_state(self, pac, state):
546 node = self.get_package_node(pac)
548 self.new_package_entry(pac, state)
550 node.set('state', state)
552 def get_package_node(self, pac):
553 for node in self.pac_root.findall('package'):
554 if pac == node.get('name'):
558 def del_package_node(self, pac):
559 for node in self.pac_root.findall('package'):
560 if pac == node.get('name'):
561 self.pac_root.remove(node)
563 def get_state(self, pac):
564 node = self.get_package_node(pac)
566 return node.get('state')
570 def new_package_entry(self, name, state):
571 ET.SubElement(self.pac_root, 'package', name=name, state=state)
573 def read_packages(self):
576 packages_file = os.path.join(self.absdir, store, '_packages')
577 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
578 return ET.parse(packages_file)
580 # scan project for existing packages and migrate them
582 for data in os.listdir(self.dir):
583 pac_dir = os.path.join(self.absdir, data)
584 # we cannot use self.pacs_available because we cannot guarantee that the package list
585 # was fetched from the server
586 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
587 and Package(pac_dir).name == data:
588 cur_pacs.append(ET.Element('package', name=data, state=' '))
589 store_write_initial_packages(self.absdir, self.name, cur_pacs)
590 return ET.parse(os.path.join(self.absdir, store, '_packages'))
592 def write_packages(self):
593 xmlindent(self.pac_root)
594 store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root))
596 def addPackage(self, pac):
598 for i in conf.config['exclude_glob']:
599 if fnmatch.fnmatch(pac, i):
600 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
601 raise oscerr.OscIOError(None, msg)
602 state = self.get_state(pac)
603 if state == None or state == 'D':
604 self.new_package_entry(pac, 'A')
605 self.write_packages()
606 # sometimes the new pac doesn't exist in the list because
607 # it would take too much time to update all data structs regularly
608 if pac in self.pacs_unvers:
609 self.pacs_unvers.remove(pac)
611 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
613 def delPackage(self, pac, force = False):
614 state = self.get_state(pac.name)
616 if state == ' ' or state == 'D':
618 for filename in pac.filenamelist + pac.filenamelist_unvers:
619 filestate = pac.status(filename)
620 if filestate == 'M' or filestate == 'C' or \
621 filestate == 'A' or filestate == '?':
624 del_files.append(filename)
625 if can_delete or force:
626 for filename in del_files:
627 pac.delete_localfile(filename)
628 if pac.status(filename) != '?':
629 # this is not really necessary
630 pac.put_on_deletelist(filename)
631 print statfrmt('D', getTransActPath(os.path.join(pac.dir, filename)))
632 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
633 pac.write_deletelist()
634 self.set_state(pac.name, 'D')
635 self.write_packages()
637 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
640 delete_dir(pac.absdir)
641 self.del_package_node(pac.name)
642 self.write_packages()
643 print statfrmt('D', pac.name)
645 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
647 print 'package is not under version control'
649 print 'unsupported state'
651 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
654 Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj).update()
656 # we need to make sure that the _packages file will be written (even if an exception
659 # update complete project
660 # packages which no longer exists upstream
661 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
663 for pac in upstream_del:
664 p = Package(os.path.join(self.dir, pac))
665 self.delPackage(p, force = True)
666 delete_storedir(p.storedir)
671 self.pac_root.remove(self.get_package_node(p.name))
672 self.pacs_have.remove(pac)
674 for pac in self.pacs_have:
675 state = self.get_state(pac)
676 if pac in self.pacs_broken:
677 if self.get_state(pac) != 'A':
678 checkout_package(self.apiurl, self.name, pac,
679 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
680 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
683 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
685 if expand_link and p.islink() and not p.isexpanded():
688 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
690 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
693 rev = p.linkinfo.xsrcmd5
694 print 'Expanding to rev', rev
695 elif unexpand_link and p.islink() and p.isexpanded():
696 rev = p.linkinfo.lsrcmd5
697 print 'Unexpanding to rev', rev
698 elif p.islink() and p.isexpanded():
700 print 'Updating %s' % p.name
701 p.update(rev, service_files)
705 # TODO: Package::update has to fixed to behave like svn does
706 if pac in self.pacs_broken:
707 checkout_package(self.apiurl, self.name, pac,
708 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
709 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
711 Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj).update()
712 elif state == 'A' and pac in self.pacs_available:
713 # file/dir called pac already exists and is under version control
714 msg = 'can\'t add package \'%s\': Object already exists' % pac
715 raise oscerr.PackageExists(self.name, pac, msg)
720 print 'unexpected state.. package \'%s\'' % pac
722 self.checkout_missing_pacs(expand_link=not unexpand_link)
724 self.write_packages()
726 def validate_pacs(self, validators, verbose_validation=False, *pacs):
728 for pac in self.pacs_broken:
729 if self.get_state(pac) != 'D':
730 msg = 'validation failed: package \'%s\' is missing' % pac
731 raise oscerr.PackageMissing(self.name, pac, msg)
732 pacs = self.pacs_have
734 if pac in self.pacs_broken and self.get_state(pac) != 'D':
735 msg = 'validation failed: package \'%s\' is missing' % pac
736 raise oscerr.PackageMissing(self.name, pac, msg)
737 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
740 p = Package(os.path.join(self.dir, pac))
741 p.validate(validators, verbose_validation)
743 def commit(self, pacs = (), msg = '', files = {}, validators_dir = None, verbose_validation = False):
748 if files.has_key(pac):
750 state = self.get_state(pac)
752 self.commitNewPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
754 self.commitDelPackage(pac)
756 # display the correct dir when sending the changes
757 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
760 p = Package(os.path.join(self.dir, pac))
762 p.commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
763 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
764 print 'osc: \'%s\' is not under version control' % pac
765 elif pac in self.pacs_broken:
766 print 'osc: \'%s\' package not found' % pac
768 self.commitExtPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
770 self.write_packages()
772 # if we have packages marked as '!' we cannot commit
773 for pac in self.pacs_broken:
774 if self.get_state(pac) != 'D':
775 msg = 'commit failed: package \'%s\' is missing' % pac
776 raise oscerr.PackageMissing(self.name, pac, msg)
778 for pac in self.pacs_have:
779 state = self.get_state(pac)
782 Package(os.path.join(self.dir, pac)).commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
784 self.commitDelPackage(pac)
786 self.commitNewPackage(pac, msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
788 self.write_packages()
790 def commitNewPackage(self, pac, msg = '', files = [], validators_dir = None, verbose_validation = False):
791 """creates and commits a new package if it does not exist on the server"""
792 if pac in self.pacs_available:
793 print 'package \'%s\' already exists' % pac
795 user = conf.get_apiurl_usr(self.apiurl)
796 edit_meta(metatype='pkg',
797 path_args=(quote_plus(self.name), quote_plus(pac)),
802 # display the correct dir when sending the changes
804 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
808 p = Package(os.path.join(self.dir, pac))
810 print statfrmt('Sending', os.path.normpath(p.dir))
811 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
812 self.set_state(pac, ' ')
815 def commitDelPackage(self, pac):
816 """deletes a package on the server and in the working copy"""
818 # display the correct dir when sending the changes
819 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
822 pac_dir = os.path.join(self.dir, pac)
823 p = Package(os.path.join(self.dir, pac))
824 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
825 delete_storedir(p.storedir)
831 pac_dir = os.path.join(self.dir, pac)
832 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
833 print statfrmt('Deleting', getTransActPath(pac_dir))
834 delete_package(self.apiurl, self.name, pac)
835 self.del_package_node(pac)
837 def commitExtPackage(self, pac, msg, files = [], validators_dir=None, verbose_validation=False):
838 """commits a package from an external project"""
839 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
842 pac_path = os.path.join(self.dir, pac)
844 project = store_read_project(pac_path)
845 package = store_read_package(pac_path)
846 apiurl = store_read_apiurl(pac_path, defaulturl=False)
847 if not meta_exists(metatype='pkg',
848 path_args=(quote_plus(project), quote_plus(package)),
849 template_args=None, create_new=False, apiurl=apiurl):
850 user = conf.get_apiurl_usr(self.apiurl)
851 edit_meta(metatype='pkg',
852 path_args=(quote_plus(project), quote_plus(package)),
853 template_args=({'name': pac, 'user': user}), apiurl=apiurl)
854 p = Package(pac_path)
856 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
860 r.append('*****************************************************')
861 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
862 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
863 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
864 r.append('*****************************************************')
868 def init_project(apiurl, dir, project, package_tracking=True, getPackageList=True, progress_obj=None, wc_check=True):
871 if not os.path.exists(dir):
872 # use makedirs (checkout_no_colon config option might be enabled)
874 elif not os.path.isdir(dir):
875 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
876 if os.path.exists(os.path.join(dir, store)):
877 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
879 os.mkdir(os.path.join(dir, store))
881 store_write_project(dir, project)
882 store_write_apiurl(dir, apiurl)
884 store_write_initial_packages(dir, project, [])
885 return Project(dir, getPackageList, progress_obj, wc_check)
889 """represent a package (its directory) and read/keep/write its metadata"""
891 # should _meta be a required file?
892 REQ_STOREFILES = ('_project', '_package', '_apiurl', '_files', '_osclib_version')
893 OPT_STOREFILES = ('_to_be_added', '_to_be_deleted', '_in_conflict', '_in_update',
894 '_in_commit', '_meta', '_meta_mode', '_frozenlink', '_pulled', '_linkrepair',
895 '_size_limit', '_commit_msg')
897 def __init__(self, workingdir, progress_obj=None, size_limit=None, wc_check=True):
900 self.dir = workingdir
901 self.absdir = os.path.abspath(self.dir)
902 self.storedir = os.path.join(self.absdir, store)
903 self.progress_obj = progress_obj
904 self.size_limit = size_limit
905 if size_limit and size_limit == 0:
906 self.size_limit = None
908 check_store_version(self.dir)
910 self.prjname = store_read_project(self.dir)
911 self.name = store_read_package(self.dir)
912 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
914 self.update_datastructs()
917 dirty_files = self.wc_check()
919 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
920 'Please run \'osc repairwc %s\' (Note this might _remove_\n' \
921 'files from the .osc/ dir). Please check the state\n' \
922 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
923 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, dirty_files, msg)
929 for fname in self.filenamelist:
930 if not os.path.exists(os.path.join(self.storedir, fname)) and not fname in self.skipped:
931 dirty_files.append(fname)
932 for fname in Package.REQ_STOREFILES:
933 if not os.path.isfile(os.path.join(self.storedir, fname)):
934 dirty_files.append(fname)
935 for fname in os.listdir(self.storedir):
936 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
937 fname.startswith('_build'):
939 elif fname in self.filenamelist and fname in self.skipped:
940 dirty_files.append(fname)
941 elif not fname in self.filenamelist:
942 dirty_files.append(fname)
943 for fname in self.to_be_deleted[:]:
944 if not fname in self.filenamelist:
945 dirty_files.append(fname)
946 for fname in self.in_conflict[:]:
947 if not fname in self.filenamelist:
948 dirty_files.append(fname)
951 def wc_repair(self, apiurl=None):
952 if not os.path.exists(os.path.join(self.storedir, '_apiurl')) or apiurl:
954 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
955 'no \'apiurl\' was passed to wc_repair'
956 # hmm should we raise oscerr.WrongArgs?
957 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
959 conf.parse_apisrv_url(None, apiurl)
960 store_write_apiurl(self.dir, apiurl)
961 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
962 # all files which are present in the filelist have to exist in the storedir
963 for f in self.filelist:
964 # XXX: should we also check the md5?
965 if not os.path.exists(os.path.join(self.storedir, f.name)) and not f.name in self.skipped:
966 # if get_source_file fails we're screwed up...
967 get_source_file(self.apiurl, self.prjname, self.name, f.name,
968 targetfilename=os.path.join(self.storedir, f.name), revision=self.rev,
970 for fname in os.listdir(self.storedir):
971 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
972 fname.startswith('_build'):
974 elif not fname in self.filenamelist or fname in self.skipped:
975 # this file does not belong to the storedir so remove it
976 os.unlink(os.path.join(self.storedir, fname))
977 for fname in self.to_be_deleted[:]:
978 if not fname in self.filenamelist:
979 self.to_be_deleted.remove(fname)
980 self.write_deletelist()
981 for fname in self.in_conflict[:]:
982 if not fname in self.filenamelist:
983 self.in_conflict.remove(fname)
984 self.write_conflictlist()
987 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
988 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
991 def addfile(self, n):
992 if not os.path.exists(os.path.join(self.absdir, n)):
993 raise oscerr.OscIOError(None, 'error: file \'%s\' does not exist' % n)
994 if n in self.to_be_deleted:
995 self.to_be_deleted.remove(n)
996 # self.delete_storefile(n)
997 self.write_deletelist()
998 elif n in self.filenamelist or n in self.to_be_added:
999 raise oscerr.PackageFileConflict(self.prjname, self.name, n, 'osc: warning: \'%s\' is already under version control' % n)
1000 # shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
1002 pathname = os.path.join(self.dir, n)
1005 self.to_be_added.append(n)
1006 self.write_addlist()
1007 print statfrmt('A', pathname)
1009 def delete_file(self, n, force=False):
1010 """deletes a file if possible and marks the file as deleted"""
1013 state = self.status(n)
1014 except IOError, ioe:
1017 if state in ['?', 'A', 'M', 'R', 'C'] and not force:
1018 return (False, state)
1019 # special handling for skipped files: if file exists, simply delete it
1021 exists = os.path.exists(os.path.join(self.dir, n))
1022 self.delete_localfile(n)
1023 return (exists, 'S')
1025 self.delete_localfile(n)
1026 was_added = n in self.to_be_added
1027 if state in ('A', 'R') or state == '!' and was_added:
1028 self.to_be_added.remove(n)
1029 self.write_addlist()
1031 # don't remove "merge files" (*.r, *.mine...)
1032 # that's why we don't use clear_from_conflictlist
1033 self.in_conflict.remove(n)
1034 self.write_conflictlist()
1035 if not state in ('A', '?') and not (state == '!' and was_added):
1036 self.put_on_deletelist(n)
1037 self.write_deletelist()
1038 return (True, state)
1040 def delete_storefile(self, n):
1041 try: os.unlink(os.path.join(self.storedir, n))
1044 def delete_localfile(self, n):
1045 try: os.unlink(os.path.join(self.dir, n))
1048 def put_on_deletelist(self, n):
1049 if n not in self.to_be_deleted:
1050 self.to_be_deleted.append(n)
1052 def put_on_conflictlist(self, n):
1053 if n not in self.in_conflict:
1054 self.in_conflict.append(n)
1056 def put_on_addlist(self, n):
1057 if n not in self.to_be_added:
1058 self.to_be_added.append(n)
1060 def clear_from_conflictlist(self, n):
1061 """delete an entry from the file, and remove the file if it would be empty"""
1062 if n in self.in_conflict:
1064 filename = os.path.join(self.dir, n)
1065 storefilename = os.path.join(self.storedir, n)
1066 myfilename = os.path.join(self.dir, n + '.mine')
1067 if self.islinkrepair() or self.ispulled():
1068 upfilename = os.path.join(self.dir, n + '.new')
1070 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1073 os.unlink(myfilename)
1074 # the working copy may be updated, so the .r* ending may be obsolete...
1075 # then we don't care
1076 os.unlink(upfilename)
1077 if self.islinkrepair() or self.ispulled():
1078 os.unlink(os.path.join(self.dir, n + '.old'))
1082 self.in_conflict.remove(n)
1084 self.write_conflictlist()
1086 # XXX: this isn't used at all
1087 def write_meta_mode(self):
1088 # XXX: the "elif" is somehow a contradiction (with current and the old implementation
1089 # it's not possible to "leave" the metamode again) (except if you modify pac.meta
1090 # which is really ugly:) )
1092 store_write_string(self.absdir, '_meta_mode', '')
1093 elif self.ismetamode():
1094 os.unlink(os.path.join(self.storedir, '_meta_mode'))
1096 def write_sizelimit(self):
1097 if self.size_limit and self.size_limit <= 0:
1099 os.unlink(os.path.join(self.storedir, '_size_limit'))
1103 store_write_string(self.absdir, '_size_limit', str(self.size_limit) + '\n')
1105 def write_addlist(self):
1106 self.__write_storelist('_to_be_added', self.to_be_added)
1108 def write_deletelist(self):
1109 self.__write_storelist('_to_be_deleted', self.to_be_deleted)
1111 def delete_source_file(self, n):
1112 """delete local a source file"""
1113 self.delete_localfile(n)
1114 self.delete_storefile(n)
1116 def delete_remote_source_file(self, n):
1117 """delete a remote source file (e.g. from the server)"""
1118 query = 'rev=upload'
1119 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1122 def put_source_file(self, n, copy_only=False):
1123 cdir = os.path.join(self.storedir, '_in_commit')
1125 if not os.path.isdir(cdir):
1127 query = 'rev=repository'
1128 tmpfile = os.path.join(cdir, n)
1129 shutil.copyfile(os.path.join(self.dir, n), tmpfile)
1130 # escaping '+' in the URL path (note: not in the URL query string) is
1131 # only a workaround for ruby on rails, which swallows it otherwise
1133 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1134 http_PUT(u, file = os.path.join(self.dir, n))
1135 os.rename(tmpfile, os.path.join(self.storedir, n))
1137 if os.path.isdir(cdir):
1139 if n in self.to_be_added:
1140 self.to_be_added.remove(n)
1142 def __generate_commitlist(self, todo_send):
1143 root = ET.Element('directory')
1144 keys = todo_send.keys()
1147 ET.SubElement(root, 'entry', name=i, md5=todo_send[i])
1150 def __send_commitlog(self, msg, local_filelist):
1151 """send the commitlog and the local filelist to the server"""
1152 query = {'cmd' : 'commitfilelist',
1153 'user' : conf.get_apiurl_usr(self.apiurl),
1155 if self.islink() and self.isexpanded():
1156 query['keeplink'] = '1'
1157 if conf.config['linkcontrol'] or self.isfrozen():
1158 query['linkrev'] = self.linkinfo.srcmd5
1160 query['repairlink'] = '1'
1161 query['linkrev'] = self.get_pulled_srcmd5()
1162 if self.islinkrepair():
1163 query['repairlink'] = '1'
1164 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
1165 f = http_POST(u, data=ET.tostring(local_filelist))
1166 root = ET.parse(f).getroot()
1169 def __get_todo_send(self, server_filelist):
1170 """parse todo from a previous __send_commitlog call"""
1171 error = server_filelist.get('error')
1174 elif error != 'missing':
1175 raise oscerr.PackageInternalError(self.prjname, self.name,
1176 '__get_todo_send: unexpected \'error\' attr: \'%s\'' % error)
1178 for n in server_filelist.findall('entry'):
1179 name = n.get('name')
1181 raise oscerr.APIError('missing \'name\' attribute:\n%s\n' % ET.tostring(server_filelist))
1182 todo.append(n.get('name'))
1185 def validate(self, validators_dir, verbose_validation=False):
1188 if validators_dir is None or self.name.startswith('_'):
1190 for validator in sorted(os.listdir(validators_dir)):
1191 if validator.startswith('.'):
1193 fn = os.path.join(validators_dir, validator)
1194 mode = os.stat(fn).st_mode
1195 if stat.S_ISREG(mode):
1196 if verbose_validation:
1197 print 'osc runs source validator: %s' % fn
1198 p = subprocess.Popen([fn, '--verbose'], close_fds=True)
1200 p = subprocess.Popen([fn], close_fds=True)
1202 raise oscerr.ExtRuntimeError('ERROR: source_validator failed:\n%s' % p.stdout, validator)
1204 def commit(self, msg='', validators_dir=None, verbose_validation=False):
1205 # commit only if the upstream revision is the same as the working copy's
1206 upstream_rev = self.latest_rev()
1207 if self.rev != upstream_rev:
1208 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
1210 if not validators_dir is None:
1211 self.validate(validators_dir, verbose_validation)
1214 self.todo = [i for i in self.to_be_added if not i in self.filenamelist] + self.filenamelist
1216 pathn = getTransActPath(self.dir)
1221 for filename in self.filenamelist + [i for i in self.to_be_added if not i in self.filenamelist]:
1222 if filename.startswith('_service:') or filename.startswith('_service_'):
1224 st = self.status(filename)
1226 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
1228 elif filename in self.todo:
1229 if st in ('A', 'R', 'M'):
1230 todo_send[filename] = dgst(os.path.join(self.absdir, filename))
1231 real_send.append(filename)
1232 print statfrmt('Sending', os.path.join(pathn, filename))
1233 elif st in (' ', '!', 'S'):
1234 if st == '!' and filename in self.to_be_added:
1235 print 'file \'%s\' is marked as \'A\' but does not exist' % filename
1237 f = self.findfilebyname(filename)
1239 raise oscerr.PackageInternalError(self.prjname, self.name,
1240 'error: file \'%s\' with state \'%s\' is not known by meta' \
1242 todo_send[filename] = f.md5
1244 todo_delete.append(filename)
1245 print statfrmt('Deleting', os.path.join(pathn, filename))
1246 elif st in ('R', 'M', 'D', ' ', '!', 'S'):
1247 # ignore missing new file (it's not part of the current commit)
1248 if st == '!' and filename in self.to_be_added:
1250 f = self.findfilebyname(filename)
1252 raise oscerr.PackageInternalError(self.prjname, self.name,
1253 'error: file \'%s\' with state \'%s\' is not known by meta' \
1255 todo_send[filename] = f.md5
1257 if not real_send and not todo_delete and not self.islinkrepair() and not self.ispulled():
1258 print 'nothing to do for package %s' % self.name
1261 print 'Transmitting file data ',
1262 filelist = self.__generate_commitlist(todo_send)
1263 sfilelist = self.__send_commitlog(msg, filelist)
1264 send = self.__get_todo_send(sfilelist)
1265 real_send = [i for i in real_send if not i in send]
1266 # abort after 3 tries
1268 while len(send) and tries:
1269 for filename in send[:]:
1270 sys.stdout.write('.')
1272 self.put_source_file(filename)
1273 send.remove(filename)
1275 sfilelist = self.__send_commitlog(msg, filelist)
1276 send = self.__get_todo_send(sfilelist)
1278 raise oscerr.PackageInternalError(self.prjname, self.name,
1279 'server does not accept filelist:\n%s\nmissing:\n%s\n' \
1280 % (ET.tostring(filelist), ET.tostring(sfilelist)))
1281 # these files already exist on the server
1282 # just copy them into the storedir
1283 for filename in real_send:
1284 self.put_source_file(filename, copy_only=True)
1286 self.rev = sfilelist.get('rev')
1288 print 'Committed revision %s.' % self.rev
1291 os.unlink(os.path.join(self.storedir, '_pulled'))
1292 if self.islinkrepair():
1293 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1294 self.linkrepair = False
1295 # XXX: mark package as invalid?
1296 print 'The source link has been repaired. This directory can now be removed.'
1298 if self.islink() and self.isexpanded():
1300 li.read(sfilelist.find('linkinfo'))
1301 if li.xsrcmd5 is None:
1302 raise oscerr.APIError('linkinfo has no xsrcmd5 attr:\n%s\n' % ET.tostring(sfilelist))
1303 sfilelist = ET.fromstring(self.get_files_meta(revision=li.xsrcmd5))
1304 for i in sfilelist.findall('entry'):
1305 if i.get('name') in self.skipped:
1306 i.set('skipped', 'true')
1307 store_write_string(self.absdir, '_files', ET.tostring(sfilelist) + '\n')
1308 for filename in todo_delete:
1309 self.to_be_deleted.remove(filename)
1310 self.delete_storefile(filename)
1311 self.write_deletelist()
1312 self.write_addlist()
1313 self.update_datastructs()
1315 print_request_list(self.apiurl, self.prjname, self.name)
1317 if self.findfilebyname("_service"):
1318 print 'Waiting for server side source service run',
1319 u = makeurl(self.apiurl, ['source', self.prjname, self.name])
1322 sfilelist = ET.parse(f).getroot()
1323 s = sfilelist.find('serviceinfo')
1324 if s.get('code') == "running":
1332 def __write_storelist(self, name, data):
1335 os.unlink(os.path.join(self.storedir, name))
1339 store_write_string(self.absdir, name, '%s\n' % '\n'.join(data))
1341 def write_conflictlist(self):
1342 self.__write_storelist('_in_conflict', self.in_conflict)
1344 def updatefile(self, n, revision, mtime=None):
1345 filename = os.path.join(self.dir, n)
1346 storefilename = os.path.join(self.storedir, n)
1347 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1348 origfile = os.path.join(self.storedir, '_in_update', n)
1349 if os.path.isfile(filename):
1350 shutil.copyfile(filename, origfile_tmp)
1351 os.rename(origfile_tmp, origfile)
1355 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=storefilename,
1356 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1358 shutil.copyfile(storefilename, filename)
1359 if not origfile is None:
1362 def mergefile(self, n, revision, mtime=None):
1363 filename = os.path.join(self.dir, n)
1364 storefilename = os.path.join(self.storedir, n)
1365 myfilename = os.path.join(self.dir, n + '.mine')
1366 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1367 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1368 origfile = os.path.join(self.storedir, '_in_update', n)
1369 shutil.copyfile(filename, origfile_tmp)
1370 os.rename(origfile_tmp, origfile)
1371 os.rename(filename, myfilename)
1373 get_source_file(self.apiurl, self.prjname, self.name, n,
1374 revision=revision, targetfilename=upfilename,
1375 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1377 if binary_file(myfilename) or binary_file(upfilename):
1379 shutil.copyfile(upfilename, filename)
1380 shutil.copyfile(upfilename, storefilename)
1382 self.in_conflict.append(n)
1383 self.write_conflictlist()
1387 # diff3 OPTIONS... MINE OLDER YOURS
1388 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1389 # we would rather use the subprocess module, but it is not availablebefore 2.4
1390 ret = subprocess.call(merge_cmd, shell=True)
1392 # "An exit status of 0 means `diff3' was successful, 1 means some
1393 # conflicts were found, and 2 means trouble."
1395 # merge was successful... clean up
1396 shutil.copyfile(upfilename, storefilename)
1397 os.unlink(upfilename)
1398 os.unlink(myfilename)
1402 # unsuccessful merge
1403 shutil.copyfile(upfilename, storefilename)
1405 self.in_conflict.append(n)
1406 self.write_conflictlist()
1409 raise oscerr.ExtRuntimeError('diff3 failed with exit code: %s' % ret, merge_cmd)
1411 def update_local_filesmeta(self, revision=None):
1413 Update the local _files file in the store.
1414 It is replaced with the version pulled from upstream.
1416 meta = self.get_files_meta(revision=revision)
1417 store_write_string(self.absdir, '_files', meta + '\n')
1419 def get_files_meta(self, revision='latest', skip_service=True):
1420 fm = show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, meta=self.meta)
1421 # look for "too large" files according to size limit and mark them
1422 root = ET.fromstring(fm)
1423 for e in root.findall('entry'):
1424 size = e.get('size')
1425 if size and self.size_limit and int(size) > self.size_limit \
1426 or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
1427 e.set('skipped', 'true')
1428 return ET.tostring(root)
1430 def update_datastructs(self):
1432 Update the internal data structures if the local _files
1433 file has changed (e.g. update_local_filesmeta() has been
1437 files_tree = read_filemeta(self.dir)
1438 files_tree_root = files_tree.getroot()
1440 self.rev = files_tree_root.get('rev')
1441 self.srcmd5 = files_tree_root.get('srcmd5')
1443 self.linkinfo = Linkinfo()
1444 self.linkinfo.read(files_tree_root.find('linkinfo'))
1446 self.filenamelist = []
1449 for node in files_tree_root.findall('entry'):
1451 f = File(node.get('name'),
1453 int(node.get('size')),
1454 int(node.get('mtime')))
1455 if node.get('skipped'):
1456 self.skipped.append(f.name)
1459 # okay, a very old version of _files, which didn't contain any metadata yet...
1460 f = File(node.get('name'), '', 0, 0)
1461 self.filelist.append(f)
1462 self.filenamelist.append(f.name)
1464 self.to_be_added = read_tobeadded(self.absdir)
1465 self.to_be_deleted = read_tobedeleted(self.absdir)
1466 self.in_conflict = read_inconflict(self.absdir)
1467 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1468 self.size_limit = read_sizelimit(self.dir)
1469 self.meta = self.ismetamode()
1471 # gather unversioned files, but ignore some stuff
1473 for i in os.listdir(self.dir):
1474 for j in conf.config['exclude_glob']:
1475 if fnmatch.fnmatch(i, j):
1476 self.excluded.append(i)
1478 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1479 if i not in self.excluded
1480 if i not in self.filenamelist ]
1483 """tells us if the package is a link (has 'linkinfo').
1484 A package with linkinfo is a package which links to another package.
1485 Returns True if the package is a link, otherwise False."""
1486 return self.linkinfo.islink()
1488 def isexpanded(self):
1489 """tells us if the package is a link which is expanded.
1490 Returns True if the package is expanded, otherwise False."""
1491 return self.linkinfo.isexpanded()
1493 def islinkrepair(self):
1494 """tells us if we are repairing a broken source link."""
1495 return self.linkrepair
1498 """tells us if we have pulled a link."""
1499 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1502 """tells us if the link is frozen."""
1503 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1505 def ismetamode(self):
1506 """tells us if the package is in meta mode"""
1507 return os.path.isfile(os.path.join(self.storedir, '_meta_mode'))
1509 def get_pulled_srcmd5(self):
1511 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1512 pulledrev = line.strip()
1515 def haslinkerror(self):
1517 Returns True if the link is broken otherwise False.
1518 If the package is not a link it returns False.
1520 return self.linkinfo.haserror()
1522 def linkerror(self):
1524 Returns an error message if the link is broken otherwise None.
1525 If the package is not a link it returns None.
1527 return self.linkinfo.error
1529 def update_local_pacmeta(self):
1531 Update the local _meta file in the store.
1532 It is replaced with the version pulled from upstream.
1534 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1535 store_write_string(self.absdir, '_meta', meta + '\n')
1537 def findfilebyname(self, n):
1538 for i in self.filelist:
1542 def get_status(self, excluded=False, *exclude_states):
1546 todo = self.filenamelist + self.to_be_added + \
1547 [i for i in self.filenamelist_unvers if not os.path.isdir(os.path.join(self.absdir, i))]
1549 todo.extend([i for i in self.excluded if i != store])
1552 for fname in sorted(todo):
1553 st = self.status(fname)
1554 if not st in exclude_states:
1555 res.append((st, fname))
1558 def status(self, n):
1562 file storefile file present STATUS
1563 exists exists in _files
1565 x - - 'A' and listed in _to_be_added
1566 x x - 'R' and listed in _to_be_added
1567 x x x ' ' if digest differs: 'M'
1568 and if in conflicts file: 'C'
1570 - x x 'D' and listed in _to_be_deleted
1571 x x x 'D' and listed in _to_be_deleted (e.g. if deleted file was modified)
1572 x x x 'C' and listed in _in_conflict
1573 x - x 'S' and listed in self.skipped
1574 - - x 'S' and listed in self.skipped
1580 known_by_meta = False
1582 exists_in_store = False
1583 if n in self.filenamelist:
1584 known_by_meta = True
1585 if os.path.exists(os.path.join(self.absdir, n)):
1587 if os.path.exists(os.path.join(self.storedir, n)):
1588 exists_in_store = True
1590 if n in self.to_be_deleted:
1592 elif n in self.in_conflict:
1594 elif n in self.skipped:
1596 elif n in self.to_be_added and exists and exists_in_store:
1598 elif n in self.to_be_added and exists:
1600 elif exists and exists_in_store and known_by_meta:
1601 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1605 elif n in self.to_be_added and not exists:
1607 elif not exists and exists_in_store and known_by_meta and not n in self.to_be_deleted:
1609 elif exists and not exists_in_store and not known_by_meta:
1611 elif not exists_in_store and known_by_meta:
1612 # XXX: this codepath shouldn't be reached (we restore the storefile
1613 # in update_datastructs)
1614 raise oscerr.PackageInternalError(self.prjname, self.name,
1615 'error: file \'%s\' is known by meta but no storefile exists.\n'
1616 'This might be caused by an old wc format. Please backup your current\n'
1617 'wc and checkout the package again. Afterwards copy all files (except the\n'
1618 '.osc/ dir) into the new package wc.' % n)
1620 # this case shouldn't happen (except there was a typo in the filename etc.)
1621 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % n)
1625 def get_diff(self, revision=None, ignoreUnversioned=False):
1627 diff_hdr = 'Index: %s\n'
1628 diff_hdr += '===================================================================\n'
1632 def diff_add_delete(fname, add, revision):
1634 diff.append(diff_hdr % fname)
1638 diff.append('--- %s\t(revision 0)\n' % fname)
1640 if revision and not fname in self.to_be_added:
1641 rev = 'working copy'
1642 diff.append('+++ %s\t(%s)\n' % (fname, rev))
1643 fname = os.path.join(self.absdir, fname)
1645 diff.append('--- %s\t(revision %s)\n' % (fname, revision or self.rev))
1646 diff.append('+++ %s\t(working copy)\n' % fname)
1647 fname = os.path.join(self.storedir, fname)
1650 if revision is not None and not add:
1651 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1652 get_source_file(self.apiurl, self.prjname, self.name, origname, tmpfile, revision)
1654 if binary_file(fname):
1659 diff.append('Binary file \'%s\' %s.\n' % (origname, what))
1662 ltmpl = '@@ -0,0 +1,%d @@\n'
1665 ltmpl = '@@ -1,%d +0,0 @@\n'
1666 lines = [tmpl % i for i in open(fname, 'r').readlines()]
1668 diff.append(ltmpl % len(lines))
1669 if not lines[-1].endswith('\n'):
1670 lines.append('\n\\ No newline at end of file\n')
1673 if tmpfile is not None:
1678 if revision is None:
1679 todo = self.todo or [i for i in self.filenamelist if not i in self.to_be_added]+self.to_be_added
1681 if fname in self.to_be_added and self.status(fname) == 'A':
1683 elif fname in self.to_be_deleted:
1684 deleted.append(fname)
1685 elif fname in self.filenamelist:
1686 kept.append(self.findfilebyname(fname))
1687 elif fname in self.to_be_added and self.status(fname) == '!':
1688 raise oscerr.OscIOError(None, 'file \'%s\' is marked as \'A\' but does not exist\n'\
1689 '(either add the missing file or revert it)' % fname)
1690 elif not ignoreUnversioned:
1691 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % fname)
1693 fm = self.get_files_meta(revision=revision)
1694 root = ET.fromstring(fm)
1695 rfiles = self.__get_files(root)
1696 # swap added and deleted
1697 kept, deleted, added, services = self.__get_rev_changes(rfiles)
1698 added = [f.name for f in added]
1699 added.extend([f for f in self.to_be_added if not f in kept])
1700 deleted = [f.name for f in deleted]
1701 deleted.extend(self.to_be_deleted)
1706 # print kept, added, deleted
1708 state = self.status(f.name)
1709 if state in ('S', '?', '!'):
1711 elif state == ' ' and revision is None:
1713 elif revision and self.findfilebyname(f.name).md5 == f.md5 and state != 'M':
1715 yield [diff_hdr % f.name]
1716 if revision is None:
1717 yield get_source_file_diff(self.absdir, f.name, self.rev)
1722 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1723 get_source_file(self.apiurl, self.prjname, self.name, f.name, tmpfile, revision)
1724 diff = get_source_file_diff(self.absdir, f.name, revision,
1725 os.path.basename(tmpfile), os.path.dirname(tmpfile), f.name)
1727 if tmpfile is not None:
1733 yield diff_add_delete(f, True, revision)
1735 yield diff_add_delete(f, False, revision)
1737 def merge(self, otherpac):
1738 self.todo += otherpac.todo
1752 '\n '.join(self.filenamelist),
1760 def read_meta_from_spec(self, spec = None):
1765 # scan for spec files
1766 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1767 if len(speclist) == 1:
1768 specfile = speclist[0]
1769 elif len(speclist) > 1:
1770 print 'the following specfiles were found:'
1771 for filename in speclist:
1773 print 'please specify one with --specfile'
1776 print 'no specfile was found - please specify one ' \
1780 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1781 self.summary = data.get('Summary', '')
1782 self.url = data.get('Url', '')
1783 self.descr = data.get('%description', '')
1786 def update_package_meta(self, force=False):
1788 for the updatepacmetafromspec subcommand
1789 argument force supress the confirm question
1792 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1794 root = ET.fromstring(m)
1795 root.find('title').text = self.summary
1796 root.find('description').text = ''.join(self.descr)
1797 url = root.find('url')
1799 url = ET.SubElement(root, 'url')
1802 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1803 mf = metafile(u, ET.tostring(root))
1806 print '*' * 36, 'old', '*' * 36
1808 print '*' * 36, 'new', '*' * 36
1809 print ET.tostring(root)
1811 repl = raw_input('Write? (y/N/e) ')
1822 def mark_frozen(self):
1823 store_write_string(self.absdir, '_frozenlink', '')
1825 print "The link in this package is currently broken. Checking"
1826 print "out the last working version instead; please use 'osc pull'"
1827 print "to repair the link."
1830 def unmark_frozen(self):
1831 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1832 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1834 def latest_rev(self):
1835 if self.islinkrepair():
1836 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1, meta=self.meta)
1837 elif self.islink() and self.isexpanded():
1838 if self.isfrozen() or self.ispulled():
1839 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1842 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, meta=self.meta)
1845 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1847 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base", meta=self.meta)
1850 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name, meta=self.meta)
1853 def __get_files(self, fmeta_root):
1855 if fmeta_root.get('rev') is None and len(fmeta_root.findall('entry')) > 0:
1856 raise oscerr.APIError('missing rev attribute in _files:\n%s' % ''.join(ET.tostring(fmeta_root)))
1857 for i in fmeta_root.findall('entry'):
1858 skipped = i.get('skipped') is not None
1859 f.append(File(i.get('name'), i.get('md5'),
1860 int(i.get('size')), int(i.get('mtime')), skipped))
1863 def __get_rev_changes(self, revfiles):
1870 revfilenames.append(f.name)
1871 # treat skipped like deleted files
1873 if f.name.startswith('_service:'):
1878 # treat skipped like added files
1879 # problem: this overwrites existing files during the update
1880 # (because skipped files aren't in self.filenamelist_unvers)
1881 if f.name in self.filenamelist and not f.name in self.skipped:
1885 for f in self.filelist:
1886 if not f.name in revfilenames:
1889 return kept, added, deleted, services
1891 def update(self, rev = None, service_files = False, size_limit = None):
1894 # size_limit is only temporary for this update
1895 old_size_limit = self.size_limit
1896 if not size_limit is None:
1897 self.size_limit = int(size_limit)
1898 if os.path.isfile(os.path.join(self.storedir, '_in_update', '_files')):
1899 print 'resuming broken update...'
1900 root = ET.parse(os.path.join(self.storedir, '_in_update', '_files')).getroot()
1901 rfiles = self.__get_files(root)
1902 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1903 # check if we aborted in the middle of a file update
1904 broken_file = os.listdir(os.path.join(self.storedir, '_in_update'))
1905 broken_file.remove('_files')
1906 if len(broken_file) == 1:
1907 origfile = os.path.join(self.storedir, '_in_update', broken_file[0])
1908 wcfile = os.path.join(self.absdir, broken_file[0])
1909 origfile_md5 = dgst(origfile)
1910 origfile_meta = self.findfilebyname(broken_file[0])
1911 if origfile.endswith('.copy'):
1912 # ok it seems we aborted at some point during the copy process
1913 # (copy process == copy wcfile to the _in_update dir). remove file+continue
1915 elif self.findfilebyname(broken_file[0]) is None:
1916 # should we remove this file from _in_update? if we don't
1917 # the user has no chance to continue without removing the file manually
1918 raise oscerr.PackageInternalError(self.prjname, self.name,
1919 '\'%s\' is not known by meta but exists in \'_in_update\' dir')
1920 elif os.path.isfile(wcfile) and dgst(wcfile) != origfile_md5:
1921 (fd, tmpfile) = tempfile.mkstemp(dir=self.absdir, prefix=broken_file[0]+'.')
1923 os.rename(wcfile, tmpfile)
1924 os.rename(origfile, wcfile)
1925 print 'warning: it seems you modified \'%s\' after the broken ' \
1926 'update. Restored original file and saved modified version ' \
1927 'to \'%s\'.' % (wcfile, tmpfile)
1928 elif not os.path.isfile(wcfile):
1929 # this is strange... because it existed before the update. restore it
1930 os.rename(origfile, wcfile)
1932 # everything seems to be ok
1934 elif len(broken_file) > 1:
1935 raise oscerr.PackageInternalError(self.prjname, self.name, 'too many files in \'_in_update\' dir')
1938 if os.path.exists(os.path.join(self.storedir, f.name)):
1939 if dgst(os.path.join(self.storedir, f.name)) == f.md5:
1947 if not service_files:
1949 self.__update(kept, added, deleted, services, ET.tostring(root), root.get('rev'))
1950 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1951 os.rmdir(os.path.join(self.storedir, '_in_update'))
1952 # ok everything is ok (hopefully)...
1953 fm = self.get_files_meta(revision=rev)
1954 root = ET.fromstring(fm)
1955 rfiles = self.__get_files(root)
1956 store_write_string(self.absdir, '_files', fm + '\n', subdir='_in_update')
1957 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1958 if not service_files:
1960 self.__update(kept, added, deleted, services, fm, root.get('rev'))
1961 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1962 if os.path.isdir(os.path.join(self.storedir, '_in_update')):
1963 os.rmdir(os.path.join(self.storedir, '_in_update'))
1964 self.size_limit = old_size_limit
1966 def __update(self, kept, added, deleted, services, fm, rev):
1967 pathn = getTransActPath(self.dir)
1968 # check for conflicts with existing files
1970 if f.name in self.filenamelist_unvers:
1971 raise oscerr.PackageFileConflict(self.prjname, self.name, f.name,
1972 'failed to add file \'%s\' file/dir with the same name already exists' % f.name)
1973 # ok, the update can't fail due to existing files
1975 self.updatefile(f.name, rev, f.mtime)
1976 print statfrmt('A', os.path.join(pathn, f.name))
1978 # if the storefile doesn't exist we're resuming an aborted update:
1979 # the file was already deleted but we cannot know this
1980 # OR we're processing a _service: file (simply keep the file)
1981 if os.path.isfile(os.path.join(self.storedir, f.name)) and self.status(f.name) != 'M':
1982 # if self.status(f.name) != 'M':
1983 self.delete_localfile(f.name)
1984 self.delete_storefile(f.name)
1985 print statfrmt('D', os.path.join(pathn, f.name))
1986 if f.name in self.to_be_deleted:
1987 self.to_be_deleted.remove(f.name)
1988 self.write_deletelist()
1991 state = self.status(f.name)
1992 # print f.name, state
1993 if state == 'M' and self.findfilebyname(f.name).md5 == f.md5:
1994 # remote file didn't change
1997 # try to merge changes
1998 merge_status = self.mergefile(f.name, rev, f.mtime)
1999 print statfrmt(merge_status, os.path.join(pathn, f.name))
2001 self.updatefile(f.name, rev, f.mtime)
2002 print 'Restored \'%s\'' % os.path.join(pathn, f.name)
2004 get_source_file(self.apiurl, self.prjname, self.name, f.name,
2005 targetfilename=os.path.join(self.storedir, f.name), revision=rev,
2006 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
2007 print 'skipping \'%s\' (this is due to conflicts)' % f.name
2008 elif state == 'D' and self.findfilebyname(f.name).md5 != f.md5:
2009 # XXX: in the worst case we might end up with f.name being
2010 # in _to_be_deleted and in _in_conflict... this needs to be checked
2011 if os.path.exists(os.path.join(self.absdir, f.name)):
2012 merge_status = self.mergefile(f.name, rev, f.mtime)
2013 print statfrmt(merge_status, os.path.join(pathn, f.name))
2014 if merge_status == 'C':
2015 # state changes from delete to conflict
2016 self.to_be_deleted.remove(f.name)
2017 self.write_deletelist()
2019 # XXX: we cannot recover this case because we've no file
2021 self.updatefile(f.name, rev, f.mtime)
2022 print statfrmt('U', os.path.join(pathn, f.name))
2023 elif state == ' ' and self.findfilebyname(f.name).md5 != f.md5:
2024 self.updatefile(f.name, rev, f.mtime)
2025 print statfrmt('U', os.path.join(pathn, f.name))
2027 # checkout service files
2029 get_source_file(self.apiurl, self.prjname, self.name, f.name,
2030 targetfilename=os.path.join(self.absdir, f.name), revision=rev,
2031 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
2032 print statfrmt('A', os.path.join(pathn, f.name))
2033 store_write_string(self.absdir, '_files', fm + '\n')
2035 self.update_local_pacmeta()
2036 self.update_datastructs()
2038 print 'At revision %s.' % self.rev
2040 def run_source_services(self, mode=None, singleservice=None):
2041 curdir = os.getcwd()
2042 os.chdir(self.absdir) # e.g. /usr/lib/obs/service/verify_file fails if not inside the project dir.
2044 if self.filenamelist.count('_service') or self.filenamelist_unvers.count('_service'):
2045 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
2047 si.getProjectGlobalServices(self.apiurl, self.prjname, self.name)
2048 si.execute(self.absdir, mode, singleservice)
2051 def prepare_filelist(self):
2052 """Prepare a list of files, which will be processed by process_filelist
2053 method. This allows easy modifications of a file list in commit
2057 self.todo = self.filenamelist + self.filenamelist_unvers
2061 for f in [f for f in self.todo if not os.path.isdir(f)]:
2063 status = self.status(f)
2068 ret += "%s %s %s\n" % (action, status, f)
2071 # Edit a filelist for package \'%s\'
2073 # l, leave = leave a file as is
2074 # r, remove = remove a file
2075 # a, add = add a file
2077 # If you remove file from a list, it will be unchanged
2078 # If you remove all, commit will be aborted""" % self.name
2082 def edit_filelist(self):
2083 """Opens a package list in editor for editing. This allows easy
2084 modifications of it just by simple text editing
2088 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
2089 f = os.fdopen(fd, 'w')
2090 f.write(self.prepare_filelist())
2092 mtime_orig = os.stat(filename).st_mtime
2095 run_editor(filename)
2096 mtime = os.stat(filename).st_mtime
2097 if mtime_orig < mtime:
2098 filelist = open(filename).readlines()
2102 raise oscerr.UserAbort()
2104 return self.process_filelist(filelist)
2106 def process_filelist(self, filelist):
2107 """Process a filelist - it add/remove or leave files. This depends on
2108 user input. If no file is processed, it raises an ValueError
2112 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
2114 foo = line.split(' ')
2116 action, state, name = (foo[0], ' ', foo[3])
2118 action, state, name = (foo[0], foo[1], foo[2])
2121 action = action.lower()
2124 if action in ('r', 'remove'):
2125 if self.status(name) == '?':
2127 if name in self.todo:
2128 self.todo.remove(name)
2130 self.delete_file(name, True)
2131 elif action in ('a', 'add'):
2132 if self.status(name) != '?':
2133 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
2136 elif action in ('l', 'leave'):
2139 raise ValueError("Unknow action `%s'" % action)
2142 raise ValueError("Empty filelist")
2144 def revert(self, filename):
2145 if not filename in self.filenamelist and not filename in self.to_be_added:
2146 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % filename)
2147 elif filename in self.skipped:
2148 raise oscerr.OscIOError(None, 'file \'%s\' is marked as skipped and cannot be reverted' % filename)
2149 if filename in self.filenamelist and not os.path.exists(os.path.join(self.storedir, filename)):
2150 raise oscerr.PackageInternalError('file \'%s\' is listed in filenamelist but no storefile exists' % filename)
2151 state = self.status(filename)
2152 if not (state == 'A' or state == '!' and filename in self.to_be_added):
2153 shutil.copyfile(os.path.join(self.storedir, filename), os.path.join(self.absdir, filename))
2155 self.to_be_deleted.remove(filename)
2156 self.write_deletelist()
2158 self.clear_from_conflictlist(filename)
2159 elif state in ('A', 'R') or state == '!' and filename in self.to_be_added:
2160 self.to_be_added.remove(filename)
2161 self.write_addlist()
2164 def init_package(apiurl, project, package, dir, size_limit=None, meta=False, progress_obj=None):
2167 if not os.path.exists(dir):
2169 elif not os.path.isdir(dir):
2170 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
2171 if os.path.exists(os.path.join(dir, store)):
2172 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
2174 os.mkdir(os.path.join(dir, store))
2175 store_write_project(dir, project)
2176 store_write_string(dir, '_package', package + '\n')
2177 store_write_apiurl(dir, apiurl)
2179 store_write_string(dir, '_meta_mode', '')
2181 store_write_string(dir, '_size_limit', str(size_limit) + '\n')
2182 store_write_string(dir, '_files', '<directory />' + '\n')
2183 store_write_string(dir, '_osclib_version', __store_version__ + '\n')
2184 return Package(dir, progress_obj=progress_obj, size_limit=size_limit)
2187 class AbstractState:
2189 Base class which represents state-like objects (<review />, <state />).
2191 def __init__(self, tag):
2194 def get_node_attrs(self):
2195 """return attributes for the tag/element"""
2196 raise NotImplementedError()
2198 def get_node_name(self):
2199 """return tag/element name"""
2202 def get_comment(self):
2203 """return data from <comment /> tag"""
2204 raise NotImplementedError()
2207 """serialize object to XML"""
2208 root = ET.Element(self.get_node_name())
2209 for attr in self.get_node_attrs():
2210 val = getattr(self, attr)
2213 if self.get_comment():
2214 ET.SubElement(root, 'comment').text = self.get_comment()
2218 """return "pretty" XML data"""
2219 root = self.to_xml()
2221 return ET.tostring(root)
2224 class ReviewState(AbstractState):
2225 """Represents the review state in a request"""
2226 def __init__(self, review_node):
2227 if not review_node.get('state'):
2228 raise oscerr.APIError('invalid review node (state attr expected): %s' % \
2229 ET.tostring(review_node))
2230 AbstractState.__init__(self, review_node.tag)
2231 self.state = review_node.get('state')
2232 self.by_user = review_node.get('by_user')
2233 self.by_group = review_node.get('by_group')
2234 self.by_project = review_node.get('by_project')
2235 self.by_package = review_node.get('by_package')
2236 self.who = review_node.get('who')
2237 self.when = review_node.get('when')
2239 if not review_node.find('comment') is None and \
2240 review_node.find('comment').text:
2241 self.comment = review_node.find('comment').text.strip()
2243 def get_node_attrs(self):
2244 return ('state', 'by_user', 'by_group', 'by_project', 'by_package', 'who', 'when')
2246 def get_comment(self):
2250 class RequestState(AbstractState):
2251 """Represents the state of a request"""
2252 def __init__(self, state_node):
2253 if not state_node.get('name'):
2254 raise oscerr.APIError('invalid request state node (name attr expected): %s' % \
2255 ET.tostring(state_node))
2256 AbstractState.__init__(self, state_node.tag)
2257 self.name = state_node.get('name')
2258 self.who = state_node.get('who')
2259 self.when = state_node.get('when')
2261 if not state_node.find('comment') is None and \
2262 state_node.find('comment').text:
2263 self.comment = state_node.find('comment').text.strip()
2265 def get_node_attrs(self):
2266 return ('name', 'who', 'when')
2268 def get_comment(self):
2274 Represents a <action /> element of a Request.
2275 This class is quite common so that it can be used for all different
2276 action types. Note: instances only provide attributes for their specific
2279 r = Action('set_bugowner', tgt_project='foo', person_name='buguser')
2280 # available attributes: r.type (== 'set_bugowner'), r.tgt_project (== 'foo'), r.tgt_package (== None)
2282 <action type="set_bugowner">
2283 <target project="foo" />
2284 <person name="buguser" />
2287 r = Action('delete', tgt_project='foo', tgt_package='bar')
2288 # available attributes: r.type (== 'delete'), r.tgt_project (== 'foo'), r.tgt_package (=='bar')
2290 <action type="delete">
2291 <target package="bar" project="foo" />
2295 # allowed types + the corresponding (allowed) attributes
2296 type_args = {'submit': ('src_project', 'src_package', 'src_rev', 'tgt_project', 'tgt_package', 'opt_sourceupdate',
2297 'acceptinfo_rev', 'acceptinfo_srcmd5', 'acceptinfo_xsrcmd5', 'acceptinfo_osrcmd5',
2298 'acceptinfo_oxsrcmd5', 'opt_updatelink'),
2299 'add_role': ('tgt_project', 'tgt_package', 'person_name', 'person_role', 'group_name', 'group_role'),
2300 'set_bugowner': ('tgt_project', 'tgt_package', 'person_name'), # obsoleted by add_role
2301 'maintenance_release': ('src_project', 'src_package', 'src_rev', 'tgt_project', 'tgt_package', 'person_name'),
2302 'maintenance_incident': ('src_project', 'tgt_project', 'person_name'),
2303 'delete': ('tgt_project', 'tgt_package'),
2304 'change_devel': ('src_project', 'src_package', 'tgt_project', 'tgt_package')}
2305 # attribute prefix to element name map (only needed for abbreviated attributes)
2306 prefix_to_elm = {'src': 'source', 'tgt': 'target', 'opt': 'options'}
2308 def __init__(self, type, **kwargs):
2309 if not type in Action.type_args.keys():
2310 raise oscerr.WrongArgs('invalid action type: \'%s\'' % type)
2312 for i in kwargs.keys():
2313 if not i in Action.type_args[type]:
2314 raise oscerr.WrongArgs('invalid argument: \'%s\'' % i)
2315 # set all type specific attributes
2316 for i in Action.type_args[type]:
2317 if kwargs.has_key(i):
2318 setattr(self, i, kwargs[i])
2320 setattr(self, i, None)
2324 Serialize object to XML.
2325 The xml tag names and attributes are constructed from the instance's attributes.
2327 self.group_name -> tag name is "group", attribute name is "name"
2328 self.src_project -> tag name is "source" (translated via prefix_to_elm dict),
2329 attribute name is "project"
2330 Attributes prefixed with "opt_" need a special handling, the resulting xml should
2331 look like this: opt_updatelink -> <options><updatelink>value</updatelink></options>.
2332 Attributes which are "None" will be skipped.
2334 root = ET.Element('action', type=self.type)
2335 for i in Action.type_args[self.type]:
2336 prefix, attr = i.split('_', 1)
2337 val = getattr(self, i)
2340 elm = root.find(Action.prefix_to_elm.get(prefix, prefix))
2342 elm = ET.Element(Action.prefix_to_elm.get(prefix, prefix))
2345 ET.SubElement(elm, attr).text = val
2351 """return "pretty" XML data"""
2352 root = self.to_xml()
2354 return ET.tostring(root)
2357 def from_xml(action_node):
2358 """create action from XML"""
2359 if action_node is None or \
2360 not action_node.get('type') in Action.type_args.keys() or \
2361 not action_node.tag in ('action', 'submit'):
2362 raise oscerr.WrongArgs('invalid argument')
2363 elm_to_prefix = dict([(i[1], i[0]) for i in Action.prefix_to_elm.items()])
2365 for node in action_node:
2366 prefix = elm_to_prefix.get(node.tag, node.tag)
2368 data = [('opt_%s' % opt.tag, opt.text.strip()) for opt in node if opt.text]
2370 data = [('%s_%s' % (prefix, k), v) for k, v in node.items()]
2371 kwargs.update(dict(data))
2372 return Action(action_node.get('type'), **kwargs)
2376 """Represents a request (<request />)"""
2379 self._init_attributes()
2381 def _init_attributes(self):
2382 """initialize attributes with default values"""
2385 self.description = ''
2388 self.statehistory = []
2391 def read(self, root):
2392 """read in a request"""
2393 self._init_attributes()
2394 if not root.get('id'):
2395 raise oscerr.APIError('invalid request: %s\n' % ET.tostring(root))
2396 self.reqid = root.get('id')
2397 if root.find('state') is None:
2398 raise oscerr.APIError('invalid request (state expected): %s\n' % ET.tostring(root))
2399 self.state = RequestState(root.find('state'))
2400 action_nodes = root.findall('action')
2401 if not action_nodes:
2402 # check for old-style requests
2403 for i in root.findall('submit'):
2404 i.set('type', 'submit')
2405 action_nodes.append(i)
2406 for action in action_nodes:
2407 self.actions.append(Action.from_xml(action))
2408 for review in root.findall('review'):
2409 self.reviews.append(ReviewState(review))
2410 for hist_state in root.findall('history'):
2411 self.statehistory.append(RequestState(hist_state))
2412 if not root.find('title') is None:
2413 self.title = root.find('title').text.strip()
2414 if not root.find('description') is None and root.find('description').text:
2415 self.description = root.find('description').text.strip()
2417 def add_action(self, type, **kwargs):
2418 """add a new action to the request"""
2419 self.actions.append(Action(type, **kwargs))
2421 def get_actions(self, *types):
2423 get all actions with a specific type
2424 (if types is empty return all actions)
2428 return [i for i in self.actions if i.type in types]
2430 def get_creator(self):
2431 """return the creator of the request"""
2432 if len(self.statehistory):
2433 return self.statehistory[0].who
2434 return self.state.who
2437 """serialize object to XML"""
2438 root = ET.Element('request')
2439 if not self.reqid is None:
2440 root.set('id', self.reqid)
2441 for action in self.actions:
2442 root.append(action.to_xml())
2443 if not self.state is None:
2444 root.append(self.state.to_xml())
2445 for review in self.reviews:
2446 root.append(review.to_xml())
2447 for hist in self.statehistory:
2448 root.append(hist.to_xml())
2450 ET.SubElement(root, 'title').text = self.title
2451 if self.description:
2452 ET.SubElement(root, 'description').text = self.description
2456 """return "pretty" XML data"""
2457 root = self.to_xml()
2459 return ET.tostring(root)
2462 def format_review(review, show_srcupdate=False):
2464 format a review depending on the reviewer's type.
2465 A dict which contains the formatted str's is returned.
2468 d = {'state': '%s:' % review.state}
2469 if review.by_package:
2470 d['by'] = '%s/%s' % (review.by_project, review.by_package)
2471 d['type'] = 'Package'
2472 elif review.by_project:
2473 d['by'] = '%s' % review.by_project
2474 d['type'] = 'Project'
2475 elif review.by_group:
2476 d['by'] = '%s' % review.by_group
2479 d['by'] = '%s' % review.by_user
2482 d['by'] += '(%s)' % review.who
2486 def format_action(action, show_srcupdate=False):
2488 format an action depending on the action's type.
2489 A dict which contains the formatted str's is returned.
2491 def prj_pkg_join(prj, pkg):
2494 return '%s/%s' % (prj, pkg)
2496 d = {'type': '%s:' % action.type}
2497 if action.type == 'set_bugowner':
2498 d['source'] = action.person_name
2499 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2500 elif action.type == 'change_devel':
2501 d['source'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2502 d['target'] = 'developed in %s' % prj_pkg_join(action.src_project, action.src_package)
2503 elif action.type == 'maintenance_incident':
2504 d['source'] = '%s ->' % action.src_project
2505 d['target'] = action.tgt_project
2506 elif action.type == 'maintenance_release':
2507 d['source'] = '%s ->' % prj_pkg_join(action.src_project, action.src_package)
2508 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2509 elif action.type == 'submit':
2511 if action.opt_sourceupdate and show_srcupdate:
2512 srcupdate = '(%s)' % action.opt_sourceupdate
2513 d['source'] = '%s%s ->' % (prj_pkg_join(action.src_project, action.src_package), srcupdate)
2514 tgt_package = action.tgt_package
2515 if action.src_package == action.tgt_package:
2517 d['target'] = prj_pkg_join(action.tgt_project, tgt_package)
2518 elif action.type == 'add_role':
2520 if action.person_name and action.person_role:
2521 roles.append('person: %s as %s' % (action.person_name, action.person_role))
2522 if action.group_name and action.group_role:
2523 roles.append('group: %s as %s' % (action.group_name, action.group_role))
2524 d['source'] = ', '.join(roles)
2525 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2526 elif action.type == 'delete':
2528 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2531 def list_view(self):
2532 """return "list view" format"""
2534 lines = ['%6s State:%-10s By:%-12s When:%-19s' % (self.reqid, self.state.name, self.state.who, self.state.when)]
2535 tmpl = ' %(type)-16s %(source)-50s %(target)s'
2536 for action in self.actions:
2537 lines.append(tmpl % Request.format_action(action))
2538 tmpl = ' Review by %(type)-10s is %(state)-10s %(by)-50s'
2539 for review in self.reviews:
2540 lines.append(tmpl % Request.format_review(review))
2541 history = ['%s(%s)' % (hist.name, hist.who) for hist in self.statehistory]
2543 lines.append(' From: %s' % ' -> '.join(history))
2544 if self.description:
2545 lines.append(textwrap.fill(self.description, width=80, initial_indent=' Descr: ',
2546 subsequent_indent=' '))
2547 return '\n'.join(lines)
2550 """return "detailed" format"""
2551 lines = ['Request: #%s\n' % self.reqid]
2552 for action in self.actions:
2553 tmpl = ' %(type)-13s %(source)s %(target)s'
2554 if action.type == 'delete':
2555 # remove 1 whitespace because source is empty
2556 tmpl = ' %(type)-12s %(source)s %(target)s'
2557 lines.append(tmpl % Request.format_action(action, show_srcupdate=True))
2558 lines.append('\n\nMessage:')
2559 if self.description:
2560 lines.append(self.description)
2562 lines.append('<no message>')
2564 lines.append('\nState: %-10s %-12s %s' % (self.state.name, self.state.when, self.state.who))
2565 lines.append('Comment: %s' % (self.state.comment or '<no comment>'))
2568 tmpl = '%(state)-10s %(by)-50s %(when)-12s %(who)-20s %(comment)s'
2570 for review in reversed(self.reviews):
2571 d = {'state': review.state}
2573 d['by'] = "User: " + review.by_user
2575 d['by'] = "Group: " + review.by_group
2576 if review.by_package:
2577 d['by'] = "Package: " + review.by_project + "/" + review.by_package
2578 elif review.by_project:
2579 d['by'] = "Project: " + review.by_project
2580 d['when'] = review.when or ''
2581 d['who'] = review.who or ''
2582 d['comment'] = review.comment or ''
2583 reviews.append(tmpl % d)
2585 lines.append('\nReview: %s' % indent.join(reviews))
2587 tmpl = '%(name)-10s %(when)-12s %(who)s'
2589 for hist in reversed(self.statehistory):
2590 d = {'name': hist.name, 'when': hist.when,
2592 histories.append(tmpl % d)
2594 lines.append('\nHistory: %s' % indent.join(histories))
2596 return '\n'.join(lines)
2598 def __cmp__(self, other):
2599 return cmp(int(self.reqid), int(other.reqid))
2601 def create(self, apiurl):
2602 """create a new request"""
2603 u = makeurl(apiurl, ['request'], query='cmd=create')
2604 f = http_POST(u, data=self.to_str())
2605 root = ET.fromstring(f.read())
2609 """format time as Apr 02 18:19
2611 depending on whether it is in the current year
2615 if time.localtime()[0] == time.localtime(t)[0]:
2617 return time.strftime('%b %d %H:%M',time.localtime(t))
2619 return time.strftime('%b %d %Y',time.localtime(t))
2622 def is_project_dir(d):
2625 return os.path.exists(os.path.join(d, store, '_project')) and not \
2626 os.path.exists(os.path.join(d, store, '_package'))
2629 def is_package_dir(d):
2632 return os.path.exists(os.path.join(d, store, '_project')) and \
2633 os.path.exists(os.path.join(d, store, '_package'))
2635 def parse_disturl(disturl):
2636 """Parse a disturl, returns tuple (apiurl, project, source, repository,
2637 revision), else raises an oscerr.WrongArgs exception
2642 m = DISTURL_RE.match(disturl)
2644 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
2646 apiurl = m.group('apiurl')
2647 if apiurl.split('.')[0] != 'api':
2648 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
2649 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
2651 def parse_buildlogurl(buildlogurl):
2652 """Parse a build log url, returns a tuple (apiurl, project, package,
2653 repository, arch), else raises oscerr.WrongArgs exception"""
2655 global BUILDLOGURL_RE
2657 m = BUILDLOGURL_RE.match(buildlogurl)
2659 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
2661 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
2664 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
2665 This is handy to allow copy/paste a project/package combination in this form.
2667 Trailing slashes are removed before the split, because the split would
2668 otherwise give an additional empty string.
2676 def expand_proj_pack(args, idx=0, howmany=0):
2677 """looks for occurance of '.' at the position idx.
2678 If howmany is 2, both proj and pack are expanded together
2679 using the current directory, or none of them, if not possible.
2680 If howmany is 0, proj is expanded if possible, then, if there
2681 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
2682 expanded, if possible.
2683 If howmany is 1, only proj is expanded if possible.
2685 If args[idx] does not exists, an implicit '.' is assumed.
2686 if not enough elements up to idx exist, an error is raised.
2688 See also parseargs(args), slash_split(args), findpacs(args)
2689 All these need unification, somehow.
2692 # print args,idx,howmany
2695 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
2697 if len(args) == idx:
2699 if args[idx+0] == '.':
2700 if howmany == 0 and len(args) > idx+1:
2701 if args[idx+1] == '.':
2703 # remove one dot and make sure to expand both proj and pack
2708 # print args,idx,howmany
2710 args[idx+0] = store_read_project('.')
2713 package = store_read_package('.')
2714 args.insert(idx+1, package)
2718 package = store_read_package('.')
2719 args.insert(idx+1, package)
2723 def findpacs(files, progress_obj=None):
2724 """collect Package objects belonging to the given files
2725 and make sure each Package is returned only once"""
2728 p = filedir_to_pac(f, progress_obj)
2731 if i.name == p.name:
2741 def filedir_to_pac(f, progress_obj=None):
2742 """Takes a working copy path, or a path to a file inside a working copy,
2743 and returns a Package object instance
2745 If the argument was a filename, add it onto the "todo" list of the Package """
2747 if os.path.isdir(f):
2749 p = Package(wd, progress_obj=progress_obj)
2751 wd = os.path.dirname(f) or os.curdir
2752 p = Package(wd, progress_obj=progress_obj)
2753 p.todo = [ os.path.basename(f) ]
2757 def read_filemeta(dir):
2760 msg = '\'%s\' is not a valid working copy.' % dir
2761 filesmeta = os.path.join(dir, store, '_files')
2762 if not is_package_dir(dir):
2763 raise oscerr.NoWorkingCopy(msg)
2764 if not os.path.isfile(filesmeta):
2765 raise oscerr.NoWorkingCopy('%s (%s does not exist)' % (msg, filesmeta))
2768 r = ET.parse(filesmeta)
2769 except SyntaxError, e:
2770 raise oscerr.NoWorkingCopy('%s\nWhen parsing .osc/_files, the following error was encountered:\n%s' % (msg, e))
2773 def store_readlist(dir, name):
2777 if os.path.exists(os.path.join(dir, store, name)):
2778 r = [line.strip() for line in open(os.path.join(dir, store, name), 'r')]
2781 def read_tobeadded(dir):
2782 return store_readlist(dir, '_to_be_added')
2784 def read_tobedeleted(dir):
2785 return store_readlist(dir, '_to_be_deleted')
2787 def read_sizelimit(dir):
2791 fname = os.path.join(dir, store, '_size_limit')
2793 if os.path.exists(fname):
2794 r = open(fname).readline().strip()
2796 if r is None or not r.isdigit():
2800 def read_inconflict(dir):
2801 return store_readlist(dir, '_in_conflict')
2803 def parseargs(list_of_args):
2804 """Convenience method osc's commandline argument parsing.
2806 If called with an empty tuple (or list), return a list containing the current directory.
2807 Otherwise, return a list of the arguments."""
2809 return list(list_of_args)
2814 def statfrmt(statusletter, filename):
2815 return '%s %s' % (statusletter, filename)
2818 def pathjoin(a, *p):
2819 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2820 path = os.path.join(a, *p)
2821 if path.startswith('./'):
2826 def makeurl(baseurl, l, query=[]):
2827 """Given a list of path compoments, construct a complete URL.
2829 Optional parameters for a query string can be given as a list, as a
2830 dictionary, or as an already assembled string.
2831 In case of a dictionary, the parameters will be urlencoded by this
2832 function. In case of a list not -- this is to be backwards compatible.
2835 if conf.config['verbose'] > 1:
2836 print 'makeurl:', baseurl, l, query
2838 if type(query) == type(list()):
2839 query = '&'.join(query)
2840 elif type(query) == type(dict()):
2841 query = urlencode(query)
2843 scheme, netloc = urlsplit(baseurl)[0:2]
2844 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2847 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2848 """wrapper around urllib2.urlopen for error handling,
2849 and to support additional (PUT, DELETE) methods"""
2853 if conf.config['http_debug']:
2854 print >>sys.stderr, '\n\n--', method, url
2856 if method == 'POST' and not file and not data:
2857 # adding data to an urllib2 request transforms it into a POST
2860 req = urllib2.Request(url)
2861 api_host_options = {}
2862 if conf.is_known_apiurl(url):
2863 # ok no external request
2864 urllib2.install_opener(conf._build_opener(url))
2865 api_host_options = conf.get_apiurl_api_host_options(url)
2866 for header, value in api_host_options['http_headers']:
2867 req.add_header(header, value)
2869 req.get_method = lambda: method
2871 # POST requests are application/x-www-form-urlencoded per default
2872 # since we change the request into PUT, we also need to adjust the content type header
2873 if method == 'PUT' or (method == 'POST' and data):
2874 req.add_header('Content-Type', 'application/octet-stream')
2876 if type(headers) == type({}):
2877 for i in headers.keys():
2879 req.add_header(i, headers[i])
2881 if file and not data:
2882 size = os.path.getsize(file)
2884 data = open(file, 'rb').read()
2887 filefd = open(file, 'rb')
2889 if sys.platform[:3] != 'win':
2890 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2892 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2894 except EnvironmentError, e:
2896 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2897 '\non a filesystem which does not support this.' % (e, file))
2898 elif hasattr(e, 'winerror') and e.winerror == 5:
2899 # falling back to the default io
2900 data = open(file, 'rb').read()
2904 if conf.config['debug']: print >>sys.stderr, method, url
2906 old_timeout = socket.getdefaulttimeout()
2907 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2908 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2909 socket.setdefaulttimeout(timeout)
2911 fd = urllib2.urlopen(req, data=data)
2913 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2914 socket.setdefaulttimeout(old_timeout)
2915 if hasattr(conf.cookiejar, 'save'):
2916 conf.cookiejar.save(ignore_discard=True)
2918 if filefd: filefd.close()
2923 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2924 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2925 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2926 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2929 def check_store_version(dir):
2932 versionfile = os.path.join(dir, store, '_osclib_version')
2934 v = open(versionfile).read().strip()
2939 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2940 if os.path.exists(os.path.join(dir, '.svn')):
2941 msg = msg + '\nTry svn instead of osc.'
2942 raise oscerr.NoWorkingCopy(msg)
2944 if v != __store_version__:
2945 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2946 # version is fine, no migration needed
2947 f = open(versionfile, 'w')
2948 f.write(__store_version__ + '\n')
2951 msg = 'The osc metadata of your working copy "%s"' % dir
2952 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2953 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2954 raise oscerr.WorkingCopyWrongVersion, msg
2957 def meta_get_packagelist(apiurl, prj, deleted=None):
2961 query['deleted'] = 1
2963 u = makeurl(apiurl, ['source', prj], query)
2965 root = ET.parse(f).getroot()
2966 return [ node.get('name') for node in root.findall('entry') ]
2969 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2970 """return a list of file names,
2971 or a list File() instances if verbose=True"""
2977 query['rev'] = revision
2979 query['rev'] = 'latest'
2981 u = makeurl(apiurl, ['source', prj, package], query=query)
2983 root = ET.parse(f).getroot()
2986 return [ node.get('name') for node in root.findall('entry') ]
2990 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2991 rev = root.get('rev')
2992 for node in root.findall('entry'):
2993 f = File(node.get('name'),
2995 int(node.get('size')),
2996 int(node.get('mtime')))
3002 def meta_get_project_list(apiurl, deleted=None):
3005 query['deleted'] = 1
3007 u = makeurl(apiurl, ['source'], query)
3009 root = ET.parse(f).getroot()
3010 return sorted([ node.get('name') for node in root if node.get('name')])
3013 def show_project_meta(apiurl, prj):
3014 url = makeurl(apiurl, ['source', prj, '_meta'])
3016 return f.readlines()
3019 def show_project_conf(apiurl, prj):
3020 url = makeurl(apiurl, ['source', prj, '_config'])
3022 return f.readlines()
3025 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
3026 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
3030 except urllib2.HTTPError, e:
3031 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
3035 def show_package_meta(apiurl, prj, pac, meta=False):
3040 # packages like _pattern and _project do not have a _meta file
3041 if pac.startswith('_pattern') or pac.startswith('_project'):
3044 url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
3047 return f.readlines()
3048 except urllib2.HTTPError, e:
3049 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
3053 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
3055 path.append('source')
3061 path.append('_attribute')
3063 path.append(attribute)
3066 query.append("with_default=1")
3068 query.append("with_project=1")
3069 url = makeurl(apiurl, path, query)
3072 return f.readlines()
3073 except urllib2.HTTPError, e:
3074 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
3078 def show_develproject(apiurl, prj, pac, xml_node=False):
3079 m = show_package_meta(apiurl, prj, pac)
3080 node = ET.fromstring(''.join(m)).find('devel')
3081 if not node is None:
3084 return node.get('project')
3088 def show_package_disabled_repos(apiurl, prj, pac):
3089 m = show_package_meta(apiurl, prj, pac)
3090 #FIXME: don't work if all repos of a project are disabled and only some are enabled since <disable/> is empty
3092 root = ET.fromstring(''.join(m))
3093 elm = root.find('build')
3094 r = [ node.get('repository') for node in elm.findall('disable')]
3100 def show_pattern_metalist(apiurl, prj):
3101 url = makeurl(apiurl, ['source', prj, '_pattern'])
3105 except urllib2.HTTPError, e:
3106 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
3108 r = [ node.get('name') for node in tree.getroot() ]
3113 def show_pattern_meta(apiurl, prj, pattern):
3114 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
3117 return f.readlines()
3118 except urllib2.HTTPError, e:
3119 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
3124 """metafile that can be manipulated and is stored back after manipulation."""
3125 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
3129 self.change_is_required = change_is_required
3130 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
3131 f = os.fdopen(fd, 'w')
3132 f.write(''.join(input))
3134 self.hash_orig = dgst(self.filename)
3137 if self.change_is_required and self.hash_orig == dgst(self.filename):
3138 print 'File unchanged. Not saving.'
3139 os.unlink(self.filename)
3142 print 'Sending meta data...'
3143 # don't do any exception handling... it's up to the caller what to do in case
3145 http_PUT(self.url, file=self.filename)
3146 os.unlink(self.filename)
3152 run_editor(self.filename)
3156 except urllib2.HTTPError, e:
3157 error_help = "%d" % e.code
3158 if e.headers.get('X-Opensuse-Errorcode'):
3159 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
3161 print >>sys.stderr, 'BuildService API error:', error_help
3162 # examine the error - we can't raise an exception because we might want
3165 if '<summary>' in data:
3166 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
3167 ri = raw_input('Try again? ([y/N]): ')
3168 if ri not in ['y', 'Y']:
3174 if os.path.exists(self.filename):
3175 print 'discarding %s' % self.filename
3176 os.unlink(self.filename)
3179 # different types of metadata
3180 metatypes = { 'prj': { 'path': 'source/%s/_meta',
3181 'template': new_project_templ,
3184 'pkg': { 'path' : 'source/%s/%s/_meta',
3185 'template': new_package_templ,
3188 'attribute': { 'path' : 'source/%s/%s/_meta',
3189 'template': new_attribute_templ,
3192 'prjconf': { 'path': 'source/%s/_config',
3196 'user': { 'path': 'person/%s',
3197 'template': new_user_template,
3200 'pattern': { 'path': 'source/%s/_pattern/%s',
3201 'template': new_pattern_template,
3206 def meta_exists(metatype,
3215 apiurl = conf.config['apiurl']
3216 url = make_meta_url(metatype, path_args, apiurl)
3218 data = http_GET(url).readlines()
3219 except urllib2.HTTPError, e:
3220 if e.code == 404 and create_new:
3221 data = metatypes[metatype]['template']
3223 data = StringIO(data % template_args).readlines()
3229 def make_meta_url(metatype, path_args=None, apiurl=None, force=False):
3233 apiurl = conf.config['apiurl']
3234 if metatype not in metatypes.keys():
3235 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
3236 path = metatypes[metatype]['path']
3239 path = path % path_args
3243 query = { 'force': '1' }
3245 return makeurl(apiurl, [path], query)
3248 def edit_meta(metatype,
3254 change_is_required=False,
3260 apiurl = conf.config['apiurl']
3262 data = meta_exists(metatype,
3265 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
3269 change_is_required = True
3271 url = make_meta_url(metatype, path_args, apiurl, force)
3272 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
3280 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, meta=False):
3283 query['rev'] = revision
3285 query['rev'] = 'latest'
3287 query['linkrev'] = linkrev
3288 elif conf.config['linkcontrol']:
3289 query['linkrev'] = 'base'
3295 query['emptylink'] = 1
3296 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
3299 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False):
3300 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta)
3301 return ET.fromstring(''.join(m)).get('srcmd5')
3304 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False):
3305 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta)
3307 # only source link packages have a <linkinfo> element.
3308 li_node = ET.fromstring(''.join(m)).find('linkinfo')
3316 raise oscerr.LinkExpandError(prj, pac, li.error)
3320 def show_upstream_rev(apiurl, prj, pac, revision=None, expand=False, linkrev=None, meta=False):