1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 new_project_templ = """\
39 <project name="%(name)s">
41 <title></title> <!-- Short title of NewProject -->
43 <!-- This is for a longer description of the purpose of the project -->
46 <person role="maintainer" userid="%(user)s" />
47 <person role="bugowner" userid="%(user)s" />
48 <!-- remove this block to publish your packages on the mirrors -->
59 <!-- remove this comment to enable one or more build targets
61 <repository name="openSUSE_Factory">
62 <path project="openSUSE:Factory" repository="standard" />
66 <repository name="openSUSE_11.2">
67 <path project="openSUSE:11.2" repository="standard"/>
71 <repository name="openSUSE_11.1">
72 <path project="openSUSE:11.1" repository="standard"/>
76 <repository name="Fedora_12">
77 <path project="Fedora:12" repository="standard" />
81 <repository name="SLE_11">
82 <path project="SUSE:SLE-11" repository="standard" />
91 new_package_templ = """\
92 <package name="%(name)s">
94 <title></title> <!-- Title of package -->
97 <!-- for long description -->
100 <!-- following roles are inherited from the parent project
101 <person role="maintainer" userid="%(user)s"/>
102 <person role="bugowner" userid="%(user)s"/>
105 <url>PUT_UPSTREAM_URL_HERE</url>
109 use one of the examples below to disable building of this package
110 on a certain architecture, in a certain repository,
111 or a combination thereof:
113 <disable arch="x86_64"/>
114 <disable repository="SUSE_SLE-10"/>
115 <disable repository="SUSE_SLE-10" arch="x86_64"/>
117 Possible sections where you can use the tags above:
127 Please have a look at:
128 http://en.opensuse.org/Restricted_formats
129 Packages containing formats listed there are NOT allowed to
130 be packaged in the openSUSE Buildservice and will be deleted!
137 new_attribute_templ = """\
139 <attribute namespace="" name="">
145 new_user_template = """\
147 <login>%(user)s</login>
148 <email>PUT_EMAIL_ADDRESS_HERE</email>
149 <realname>PUT_REAL_NAME_HERE</realname>
151 <project name="home:%(user)s"/>
167 new_pattern_template = """\
168 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
174 buildstatus_symbols = {'succeeded': '.',
176 'expansion error': 'U', # obsolete with OBS 2.0
190 # os.path.samefile is available only under Unix
191 def os_path_samefile(path1, path2):
193 return os.path.samefile(path1, path2)
195 return os.path.realpath(path1) == os.path.realpath(path2)
198 """represent a file, including its metadata"""
199 def __init__(self, name, md5, size, mtime, skipped=False):
204 self.skipped = skipped
212 """Source service content
215 """creates an empty serviceinfo instance"""
218 def read(self, serviceinfo_node, append=False):
219 """read in the source services <services> element passed as
222 if serviceinfo_node == None:
224 if not append or self.services == None:
226 services = serviceinfo_node.findall('service')
228 for service in services:
229 name = service.get('name')
230 mode = service.get('mode', None)
231 data = { 'name' : name, 'mode' : '' }
235 for param in service.findall('param'):
236 option = param.get('name', None)
238 name += " --" + option + " '" + value + "'"
239 data['command'] = name
240 self.services.append(data)
242 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
243 raise oscerr.APIError(msg)
245 def getProjectGlobalServices(self, apiurl, project, package):
246 # get all project wide services in one file, we don't store it yet
247 u = makeurl(apiurl, ['source', project, package], query='cmd=getprojectservices')
250 root = ET.parse(f).getroot()
251 self.read(root, True)
252 except urllib2.HTTPError, e:
256 def addVerifyFile(self, serviceinfo_node, filename):
259 f = open(filename, 'r')
260 digest = hashlib.sha256(f.read()).hexdigest()
264 s = ET.Element( "service", name="verify_file" )
265 ET.SubElement(s, "param", name="file").text = filename
266 ET.SubElement(s, "param", name="verifier").text = "sha256"
267 ET.SubElement(s, "param", name="checksum").text = digest
273 def addDownloadUrl(self, serviceinfo_node, url_string):
274 from urlparse import urlparse
275 url = urlparse( url_string )
276 protocol = url.scheme
281 s = ET.Element( "service", name="download_url" )
282 ET.SubElement(s, "param", name="protocol").text = protocol
283 ET.SubElement(s, "param", name="host").text = host
284 ET.SubElement(s, "param", name="path").text = path
289 def addGitUrl(self, serviceinfo_node, url_string):
291 s = ET.Element( "service", name="tar_scm" )
292 ET.SubElement(s, "param", name="url").text = url_string
293 ET.SubElement(s, "param", name="scm").text = "git"
297 def addRecompressTar(self, serviceinfo_node):
299 s = ET.Element( "service", name="recompress" )
300 ET.SubElement(s, "param", name="file").text = "*.tar"
301 ET.SubElement(s, "param", name="compression").text = "bz2"
305 def execute(self, dir, callmode = None, singleservice = None):
308 # cleanup existing generated files
309 for filename in os.listdir(dir):
310 if filename.startswith('_service:') or filename.startswith('_service_'):
311 os.unlink(os.path.join(dir, filename))
313 allservices = self.services or []
314 if singleservice and not singleservice in allservices:
315 # set array to the manual specified singleservice, if it is not part of _service file
316 data = { 'name' : singleservice, 'command' : singleservice, 'mode' : '' }
320 for service in allservices:
321 if singleservice and service['name'] != singleservice:
323 if service['mode'] == "disabled" and callmode != "disabled":
325 if service['mode'] != "disabled" and callmode == "disabled":
327 call = service['command']
328 temp_dir = tempfile.mkdtemp()
329 name = call.split(None, 1)[0]
330 if not os.path.exists("/usr/lib/obs/service/"+name):
331 raise oscerr.PackageNotInstalled("obs-service-"+name)
332 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
333 if conf.config['verbose'] > 1:
334 print "Run source service:", c
335 ret = subprocess.call(c, shell=True)
337 print "ERROR: service call failed: " + c
338 # FIXME: addDownloadUrlService calls si.execute after
339 # updating _services.
340 print " (your _services file may be corrupt now)"
342 if service['mode'] == "disabled" or service['mode'] == "trylocal" or service['mode'] == "localonly" or callmode == "local":
343 for filename in os.listdir(temp_dir):
344 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, filename) )
346 for filename in os.listdir(temp_dir):
347 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, "_service:"+name+":"+filename) )
351 """linkinfo metadata (which is part of the xml representing a directory
354 """creates an empty linkinfo instance"""
364 def read(self, linkinfo_node):
365 """read in the linkinfo metadata from the <linkinfo> element passed as
367 If the passed element is None, the method does nothing.
369 if linkinfo_node == None:
371 self.project = linkinfo_node.get('project')
372 self.package = linkinfo_node.get('package')
373 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
374 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
375 self.srcmd5 = linkinfo_node.get('srcmd5')
376 self.error = linkinfo_node.get('error')
377 self.rev = linkinfo_node.get('rev')
378 self.baserev = linkinfo_node.get('baserev')
381 """returns True if the linkinfo is not empty, otherwise False"""
382 if self.xsrcmd5 or self.lsrcmd5:
386 def isexpanded(self):
387 """returns True if the package is an expanded link"""
388 if self.lsrcmd5 and not self.xsrcmd5:
393 """returns True if the link is in error state (could not be applied)"""
399 """return an informatory string representation"""
400 if self.islink() and not self.isexpanded():
401 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
402 % (self.project, self.package, self.xsrcmd5, self.rev)
403 elif self.islink() and self.isexpanded():
405 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
406 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
408 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
409 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
414 # http://effbot.org/zone/element-lib.htm#prettyprint
415 def xmlindent(elem, level=0):
418 if not elem.text or not elem.text.strip():
421 xmlindent(e, level+1)
422 if not e.tail or not e.tail.strip():
424 if not e.tail or not e.tail.strip():
427 if level and (not elem.tail or not elem.tail.strip()):
431 """represent a project directory, holding packages"""
432 REQ_STOREFILES = ('_project', '_apiurl')
433 if conf.config['do_package_tracking']:
434 REQ_STOREFILES += ('_packages',)
435 def __init__(self, dir, getPackageList=True, progress_obj=None, wc_check=True):
438 self.absdir = os.path.abspath(dir)
439 self.progress_obj = progress_obj
441 self.name = store_read_project(self.dir)
442 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
446 dirty_files = self.wc_check()
448 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
449 'Please run \'osc repairwc %s\' and check the state\n' \
450 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
451 raise oscerr.WorkingCopyInconsistent(self.name, None, dirty_files, msg)
454 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
456 self.pacs_available = []
458 if conf.config['do_package_tracking']:
459 self.pac_root = self.read_packages().getroot()
460 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
461 self.pacs_excluded = [ i for i in os.listdir(self.dir)
462 for j in conf.config['exclude_glob']
463 if fnmatch.fnmatch(i, j) ]
464 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
465 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
466 # in the self.pacs_broken list
467 self.pacs_broken = []
468 for p in self.pacs_have:
469 if not os.path.isdir(os.path.join(self.absdir, p)):
470 # all states will be replaced with the '!'-state
471 # (except it is already marked as deleted ('D'-state))
472 self.pacs_broken.append(p)
474 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
476 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
481 for fname in Project.REQ_STOREFILES:
482 if not os.path.exists(os.path.join(self.absdir, store, fname)):
483 dirty_files.append(fname)
486 def wc_repair(self, apiurl=None):
488 if not os.path.exists(os.path.join(self.dir, store, '_apiurl')) or apiurl:
490 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
491 'no \'apiurl\' was passed to wc_repair'
492 # hmm should we raise oscerr.WrongArgs?
493 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
495 conf.parse_apisrv_url(None, apiurl)
496 store_write_apiurl(self.dir, apiurl)
497 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
499 def checkout_missing_pacs(self, expand_link=False):
500 for pac in self.pacs_missing:
502 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
503 # pac is not under version control but a local file/dir exists
504 msg = 'can\'t add package \'%s\': Object already exists' % pac
505 raise oscerr.PackageExists(self.name, pac, msg)
507 print 'checking out new package %s' % pac
508 checkout_package(self.apiurl, self.name, pac, \
509 pathname=getTransActPath(os.path.join(self.dir, pac)), \
510 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
512 def status(self, pac):
513 exists = os.path.exists(os.path.join(self.absdir, pac))
514 st = self.get_state(pac)
515 if st is None and exists:
518 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % pac)
519 elif st in ('A', ' ') and not exists:
521 elif st == 'D' and not exists:
526 def get_status(self, *exclude_states):
528 for pac in self.pacs_have:
529 st = self.status(pac)
530 if not st in exclude_states:
531 res.append((st, pac))
532 if not '?' in exclude_states:
533 res.extend([('?', pac) for pac in self.pacs_unvers])
536 def get_pacobj(self, pac, *pac_args, **pac_kwargs):
538 st = self.status(pac)
539 if st in ('?', '!') or st == 'D' and not os.path.exists(os.path.join(self.dir, pac)):
541 return Package(os.path.join(self.dir, pac), *pac_args, **pac_kwargs)
542 except oscerr.OscIOError:
545 def set_state(self, pac, state):
546 node = self.get_package_node(pac)
548 self.new_package_entry(pac, state)
550 node.set('state', state)
552 def get_package_node(self, pac):
553 for node in self.pac_root.findall('package'):
554 if pac == node.get('name'):
558 def del_package_node(self, pac):
559 for node in self.pac_root.findall('package'):
560 if pac == node.get('name'):
561 self.pac_root.remove(node)
563 def get_state(self, pac):
564 node = self.get_package_node(pac)
566 return node.get('state')
570 def new_package_entry(self, name, state):
571 ET.SubElement(self.pac_root, 'package', name=name, state=state)
573 def read_packages(self):
576 packages_file = os.path.join(self.absdir, store, '_packages')
577 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
578 return ET.parse(packages_file)
580 # scan project for existing packages and migrate them
582 for data in os.listdir(self.dir):
583 pac_dir = os.path.join(self.absdir, data)
584 # we cannot use self.pacs_available because we cannot guarantee that the package list
585 # was fetched from the server
586 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
587 and Package(pac_dir).name == data:
588 cur_pacs.append(ET.Element('package', name=data, state=' '))
589 store_write_initial_packages(self.absdir, self.name, cur_pacs)
590 return ET.parse(os.path.join(self.absdir, store, '_packages'))
592 def write_packages(self):
593 xmlindent(self.pac_root)
594 store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root))
596 def addPackage(self, pac):
598 for i in conf.config['exclude_glob']:
599 if fnmatch.fnmatch(pac, i):
600 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
601 raise oscerr.OscIOError(None, msg)
602 state = self.get_state(pac)
603 if state == None or state == 'D':
604 self.new_package_entry(pac, 'A')
605 self.write_packages()
606 # sometimes the new pac doesn't exist in the list because
607 # it would take too much time to update all data structs regularly
608 if pac in self.pacs_unvers:
609 self.pacs_unvers.remove(pac)
611 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
613 def delPackage(self, pac, force = False):
614 state = self.get_state(pac.name)
616 if state == ' ' or state == 'D':
618 for filename in pac.filenamelist + pac.filenamelist_unvers:
619 filestate = pac.status(filename)
620 if filestate == 'M' or filestate == 'C' or \
621 filestate == 'A' or filestate == '?':
624 del_files.append(filename)
625 if can_delete or force:
626 for filename in del_files:
627 pac.delete_localfile(filename)
628 if pac.status(filename) != '?':
629 # this is not really necessary
630 pac.put_on_deletelist(filename)
631 print statfrmt('D', getTransActPath(os.path.join(pac.dir, filename)))
632 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
633 pac.write_deletelist()
634 self.set_state(pac.name, 'D')
635 self.write_packages()
637 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
640 delete_dir(pac.absdir)
641 self.del_package_node(pac.name)
642 self.write_packages()
643 print statfrmt('D', pac.name)
645 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
647 print 'package is not under version control'
649 print 'unsupported state'
651 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
654 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
656 # we need to make sure that the _packages file will be written (even if an exception
659 # update complete project
660 # packages which no longer exists upstream
661 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
663 for pac in upstream_del:
664 p = Package(os.path.join(self.dir, pac))
665 self.delPackage(p, force = True)
666 delete_storedir(p.storedir)
671 self.pac_root.remove(self.get_package_node(p.name))
672 self.pacs_have.remove(pac)
674 for pac in self.pacs_have:
675 state = self.get_state(pac)
676 if pac in self.pacs_broken:
677 if self.get_state(pac) != 'A':
678 checkout_package(self.apiurl, self.name, pac,
679 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
680 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
683 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
685 if expand_link and p.islink() and not p.isexpanded():
688 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
690 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
693 rev = p.linkinfo.xsrcmd5
694 print 'Expanding to rev', rev
695 elif unexpand_link and p.islink() and p.isexpanded():
696 rev = p.linkinfo.lsrcmd5
697 print 'Unexpanding to rev', rev
698 elif p.islink() and p.isexpanded():
700 print 'Updating %s' % p.name
701 p.update(rev, service_files)
705 # TODO: Package::update has to fixed to behave like svn does
706 if pac in self.pacs_broken:
707 checkout_package(self.apiurl, self.name, pac,
708 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
709 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
711 Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj).update()
712 elif state == 'A' and pac in self.pacs_available:
713 # file/dir called pac already exists and is under version control
714 msg = 'can\'t add package \'%s\': Object already exists' % pac
715 raise oscerr.PackageExists(self.name, pac, msg)
720 print 'unexpected state.. package \'%s\'' % pac
722 self.checkout_missing_pacs(expand_link=not unexpand_link)
724 self.write_packages()
726 def validate_pacs(self, validators, verbose_validation=False, *pacs):
728 for pac in self.pacs_broken:
729 if self.get_state(pac) != 'D':
730 msg = 'validation failed: package \'%s\' is missing' % pac
731 raise oscerr.PackageMissing(self.name, pac, msg)
732 pacs = self.pacs_have
734 if pac in self.pacs_broken and self.get_state(pac) != 'D':
735 msg = 'validation failed: package \'%s\' is missing' % pac
736 raise oscerr.PackageMissing(self.name, pac, msg)
737 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
740 p = Package(os.path.join(self.dir, pac))
741 p.validate(validators, verbose_validation)
743 def commit(self, pacs = (), msg = '', files = {}, validators_dir = None, verbose_validation = False):
748 if files.has_key(pac):
750 state = self.get_state(pac)
752 self.commitNewPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
754 self.commitDelPackage(pac)
756 # display the correct dir when sending the changes
757 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
760 p = Package(os.path.join(self.dir, pac))
762 p.commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
763 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
764 print 'osc: \'%s\' is not under version control' % pac
765 elif pac in self.pacs_broken:
766 print 'osc: \'%s\' package not found' % pac
768 self.commitExtPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
770 self.write_packages()
772 # if we have packages marked as '!' we cannot commit
773 for pac in self.pacs_broken:
774 if self.get_state(pac) != 'D':
775 msg = 'commit failed: package \'%s\' is missing' % pac
776 raise oscerr.PackageMissing(self.name, pac, msg)
778 for pac in self.pacs_have:
779 state = self.get_state(pac)
782 Package(os.path.join(self.dir, pac)).commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
784 self.commitDelPackage(pac)
786 self.commitNewPackage(pac, msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
788 self.write_packages()
790 def commitNewPackage(self, pac, msg = '', files = [], validators_dir = None, verbose_validation = False):
791 """creates and commits a new package if it does not exist on the server"""
792 if pac in self.pacs_available:
793 print 'package \'%s\' already exists' % pac
795 user = conf.get_apiurl_usr(self.apiurl)
796 edit_meta(metatype='pkg',
797 path_args=(quote_plus(self.name), quote_plus(pac)),
802 # display the correct dir when sending the changes
804 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
808 p = Package(os.path.join(self.dir, pac))
810 print statfrmt('Sending', os.path.normpath(p.dir))
811 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
812 self.set_state(pac, ' ')
815 def commitDelPackage(self, pac):
816 """deletes a package on the server and in the working copy"""
818 # display the correct dir when sending the changes
819 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
822 pac_dir = os.path.join(self.dir, pac)
823 p = Package(os.path.join(self.dir, pac))
824 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
825 delete_storedir(p.storedir)
831 pac_dir = os.path.join(self.dir, pac)
832 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
833 print statfrmt('Deleting', getTransActPath(pac_dir))
834 delete_package(self.apiurl, self.name, pac)
835 self.del_package_node(pac)
837 def commitExtPackage(self, pac, msg, files = [], validators_dir=None, verbose_validation=False):
838 """commits a package from an external project"""
839 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
842 pac_path = os.path.join(self.dir, pac)
844 project = store_read_project(pac_path)
845 package = store_read_package(pac_path)
846 apiurl = store_read_apiurl(pac_path, defaulturl=False)
847 if not meta_exists(metatype='pkg',
848 path_args=(quote_plus(project), quote_plus(package)),
849 template_args=None, create_new=False, apiurl=apiurl):
850 user = conf.get_apiurl_usr(self.apiurl)
851 edit_meta(metatype='pkg',
852 path_args=(quote_plus(project), quote_plus(package)),
853 template_args=({'name': pac, 'user': user}), apiurl=apiurl)
854 p = Package(pac_path)
856 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
860 r.append('*****************************************************')
861 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
862 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
863 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
864 r.append('*****************************************************')
868 def init_project(apiurl, dir, project, package_tracking=True, getPackageList=True, progress_obj=None, wc_check=True):
871 if not os.path.exists(dir):
872 # use makedirs (checkout_no_colon config option might be enabled)
874 elif not os.path.isdir(dir):
875 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
876 if os.path.exists(os.path.join(dir, store)):
877 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
879 os.mkdir(os.path.join(dir, store))
881 store_write_project(dir, project)
882 store_write_apiurl(dir, apiurl)
884 store_write_initial_packages(dir, project, [])
885 return Project(dir, getPackageList, progress_obj, wc_check)
889 """represent a package (its directory) and read/keep/write its metadata"""
891 # should _meta be a required file?
892 REQ_STOREFILES = ('_project', '_package', '_apiurl', '_files', '_osclib_version')
893 OPT_STOREFILES = ('_to_be_added', '_to_be_deleted', '_in_conflict', '_in_update',
894 '_in_commit', '_meta', '_meta_mode', '_frozenlink', '_pulled', '_linkrepair',
895 '_size_limit', '_commit_msg')
897 def __init__(self, workingdir, progress_obj=None, size_limit=None, wc_check=True):
900 self.dir = workingdir
901 self.absdir = os.path.abspath(self.dir)
902 self.storedir = os.path.join(self.absdir, store)
903 self.progress_obj = progress_obj
904 self.size_limit = size_limit
905 if size_limit and size_limit == 0:
906 self.size_limit = None
908 check_store_version(self.dir)
910 self.prjname = store_read_project(self.dir)
911 self.name = store_read_package(self.dir)
912 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
914 self.update_datastructs()
917 dirty_files = self.wc_check()
919 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
920 'Please run \'osc repairwc %s\' (Note this might _remove_\n' \
921 'files from the .osc/ dir). Please check the state\n' \
922 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
923 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, dirty_files, msg)
929 for fname in self.filenamelist:
930 if not os.path.exists(os.path.join(self.storedir, fname)) and not fname in self.skipped:
931 dirty_files.append(fname)
932 for fname in Package.REQ_STOREFILES:
933 if not os.path.isfile(os.path.join(self.storedir, fname)):
934 dirty_files.append(fname)
935 for fname in os.listdir(self.storedir):
936 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
937 fname.startswith('_build'):
939 elif fname in self.filenamelist and fname in self.skipped:
940 dirty_files.append(fname)
941 elif not fname in self.filenamelist:
942 dirty_files.append(fname)
943 for fname in self.to_be_deleted[:]:
944 if not fname in self.filenamelist:
945 dirty_files.append(fname)
946 for fname in self.in_conflict[:]:
947 if not fname in self.filenamelist:
948 dirty_files.append(fname)
951 def wc_repair(self, apiurl=None):
952 if not os.path.exists(os.path.join(self.storedir, '_apiurl')) or apiurl:
954 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
955 'no \'apiurl\' was passed to wc_repair'
956 # hmm should we raise oscerr.WrongArgs?
957 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
959 conf.parse_apisrv_url(None, apiurl)
960 store_write_apiurl(self.dir, apiurl)
961 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
962 # all files which are present in the filelist have to exist in the storedir
963 for f in self.filelist:
964 # XXX: should we also check the md5?
965 if not os.path.exists(os.path.join(self.storedir, f.name)) and not f.name in self.skipped:
966 # if get_source_file fails we're screwed up...
967 get_source_file(self.apiurl, self.prjname, self.name, f.name,
968 targetfilename=os.path.join(self.storedir, f.name), revision=self.rev,
970 for fname in os.listdir(self.storedir):
971 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
972 fname.startswith('_build'):
974 elif not fname in self.filenamelist or fname in self.skipped:
975 # this file does not belong to the storedir so remove it
976 os.unlink(os.path.join(self.storedir, fname))
977 for fname in self.to_be_deleted[:]:
978 if not fname in self.filenamelist:
979 self.to_be_deleted.remove(fname)
980 self.write_deletelist()
981 for fname in self.in_conflict[:]:
982 if not fname in self.filenamelist:
983 self.in_conflict.remove(fname)
984 self.write_conflictlist()
987 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
988 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
991 def addfile(self, n):
992 if not os.path.exists(os.path.join(self.absdir, n)):
993 raise oscerr.OscIOError(None, 'error: file \'%s\' does not exist' % n)
994 if n in self.to_be_deleted:
995 self.to_be_deleted.remove(n)
996 # self.delete_storefile(n)
997 self.write_deletelist()
998 elif n in self.filenamelist or n in self.to_be_added:
999 raise oscerr.PackageFileConflict(self.prjname, self.name, n, 'osc: warning: \'%s\' is already under version control' % n)
1000 # shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
1002 pathname = os.path.join(self.dir, n)
1005 self.to_be_added.append(n)
1006 self.write_addlist()
1007 print statfrmt('A', pathname)
1009 def delete_file(self, n, force=False):
1010 """deletes a file if possible and marks the file as deleted"""
1013 state = self.status(n)
1014 except IOError, ioe:
1017 if state in ['?', 'A', 'M', 'R', 'C'] and not force:
1018 return (False, state)
1019 # special handling for skipped files: if file exists, simply delete it
1021 exists = os.path.exists(os.path.join(self.dir, n))
1022 self.delete_localfile(n)
1023 return (exists, 'S')
1025 self.delete_localfile(n)
1026 was_added = n in self.to_be_added
1027 if state in ('A', 'R') or state == '!' and was_added:
1028 self.to_be_added.remove(n)
1029 self.write_addlist()
1031 # don't remove "merge files" (*.r, *.mine...)
1032 # that's why we don't use clear_from_conflictlist
1033 self.in_conflict.remove(n)
1034 self.write_conflictlist()
1035 if not state in ('A', '?') and not (state == '!' and was_added):
1036 self.put_on_deletelist(n)
1037 self.write_deletelist()
1038 return (True, state)
1040 def delete_storefile(self, n):
1041 try: os.unlink(os.path.join(self.storedir, n))
1044 def delete_localfile(self, n):
1045 try: os.unlink(os.path.join(self.dir, n))
1048 def put_on_deletelist(self, n):
1049 if n not in self.to_be_deleted:
1050 self.to_be_deleted.append(n)
1052 def put_on_conflictlist(self, n):
1053 if n not in self.in_conflict:
1054 self.in_conflict.append(n)
1056 def put_on_addlist(self, n):
1057 if n not in self.to_be_added:
1058 self.to_be_added.append(n)
1060 def clear_from_conflictlist(self, n):
1061 """delete an entry from the file, and remove the file if it would be empty"""
1062 if n in self.in_conflict:
1064 filename = os.path.join(self.dir, n)
1065 storefilename = os.path.join(self.storedir, n)
1066 myfilename = os.path.join(self.dir, n + '.mine')
1067 if self.islinkrepair() or self.ispulled():
1068 upfilename = os.path.join(self.dir, n + '.new')
1070 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1073 os.unlink(myfilename)
1074 # the working copy may be updated, so the .r* ending may be obsolete...
1075 # then we don't care
1076 os.unlink(upfilename)
1077 if self.islinkrepair() or self.ispulled():
1078 os.unlink(os.path.join(self.dir, n + '.old'))
1082 self.in_conflict.remove(n)
1084 self.write_conflictlist()
1086 # XXX: this isn't used at all
1087 def write_meta_mode(self):
1088 # XXX: the "elif" is somehow a contradiction (with current and the old implementation
1089 # it's not possible to "leave" the metamode again) (except if you modify pac.meta
1090 # which is really ugly:) )
1092 store_write_string(self.absdir, '_meta_mode', '')
1093 elif self.ismetamode():
1094 os.unlink(os.path.join(self.storedir, '_meta_mode'))
1096 def write_sizelimit(self):
1097 if self.size_limit and self.size_limit <= 0:
1099 os.unlink(os.path.join(self.storedir, '_size_limit'))
1103 store_write_string(self.absdir, '_size_limit', str(self.size_limit) + '\n')
1105 def write_addlist(self):
1106 self.__write_storelist('_to_be_added', self.to_be_added)
1108 def write_deletelist(self):
1109 self.__write_storelist('_to_be_deleted', self.to_be_deleted)
1111 def delete_source_file(self, n):
1112 """delete local a source file"""
1113 self.delete_localfile(n)
1114 self.delete_storefile(n)
1116 def delete_remote_source_file(self, n):
1117 """delete a remote source file (e.g. from the server)"""
1118 query = 'rev=upload'
1119 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1122 def put_source_file(self, n, copy_only=False):
1123 cdir = os.path.join(self.storedir, '_in_commit')
1125 if not os.path.isdir(cdir):
1127 query = 'rev=repository'
1128 tmpfile = os.path.join(cdir, n)
1129 shutil.copyfile(os.path.join(self.dir, n), tmpfile)
1130 # escaping '+' in the URL path (note: not in the URL query string) is
1131 # only a workaround for ruby on rails, which swallows it otherwise
1133 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1134 http_PUT(u, file = os.path.join(self.dir, n))
1135 os.rename(tmpfile, os.path.join(self.storedir, n))
1137 if os.path.isdir(cdir):
1139 if n in self.to_be_added:
1140 self.to_be_added.remove(n)
1142 def __generate_commitlist(self, todo_send):
1143 root = ET.Element('directory')
1144 keys = todo_send.keys()
1147 ET.SubElement(root, 'entry', name=i, md5=todo_send[i])
1150 def __send_commitlog(self, msg, local_filelist):
1151 """send the commitlog and the local filelist to the server"""
1152 query = {'cmd' : 'commitfilelist',
1153 'user' : conf.get_apiurl_usr(self.apiurl),
1155 if self.islink() and self.isexpanded():
1156 query['keeplink'] = '1'
1157 if conf.config['linkcontrol'] or self.isfrozen():
1158 query['linkrev'] = self.linkinfo.srcmd5
1160 query['repairlink'] = '1'
1161 query['linkrev'] = self.get_pulled_srcmd5()
1162 if self.islinkrepair():
1163 query['repairlink'] = '1'
1164 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
1165 f = http_POST(u, data=ET.tostring(local_filelist))
1166 root = ET.parse(f).getroot()
1169 def __get_todo_send(self, server_filelist):
1170 """parse todo from a previous __send_commitlog call"""
1171 error = server_filelist.get('error')
1174 elif error != 'missing':
1175 raise oscerr.PackageInternalError(self.prjname, self.name,
1176 '__get_todo_send: unexpected \'error\' attr: \'%s\'' % error)
1178 for n in server_filelist.findall('entry'):
1179 name = n.get('name')
1181 raise oscerr.APIError('missing \'name\' attribute:\n%s\n' % ET.tostring(server_filelist))
1182 todo.append(n.get('name'))
1185 def validate(self, validators_dir, verbose_validation=False):
1188 if validators_dir is None or self.name.startswith('_'):
1190 for validator in sorted(os.listdir(validators_dir)):
1191 if validator.startswith('.'):
1193 fn = os.path.join(validators_dir, validator)
1194 mode = os.stat(fn).st_mode
1195 if stat.S_ISREG(mode):
1196 if verbose_validation:
1197 print 'osc runs source validator: %s' % fn
1198 p = subprocess.Popen([fn, '--verbose'], close_fds=True)
1200 p = subprocess.Popen([fn], close_fds=True)
1202 raise oscerr.ExtRuntimeError('ERROR: source_validator failed:\n%s' % p.stdout, validator)
1204 def commit(self, msg='', validators_dir=None, verbose_validation=False):
1205 # commit only if the upstream revision is the same as the working copy's
1206 upstream_rev = self.latest_rev()
1207 if self.rev != upstream_rev:
1208 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
1210 if not validators_dir is None:
1211 self.validate(validators_dir, verbose_validation)
1214 self.todo = [i for i in self.to_be_added if not i in self.filenamelist] + self.filenamelist
1216 pathn = getTransActPath(self.dir)
1221 for filename in self.filenamelist + [i for i in self.to_be_added if not i in self.filenamelist]:
1222 if filename.startswith('_service:') or filename.startswith('_service_'):
1224 st = self.status(filename)
1226 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
1228 elif filename in self.todo:
1229 if st in ('A', 'R', 'M'):
1230 todo_send[filename] = dgst(os.path.join(self.absdir, filename))
1231 real_send.append(filename)
1232 print statfrmt('Sending', os.path.join(pathn, filename))
1233 elif st in (' ', '!', 'S'):
1234 if st == '!' and filename in self.to_be_added:
1235 print 'file \'%s\' is marked as \'A\' but does not exist' % filename
1237 f = self.findfilebyname(filename)
1239 raise oscerr.PackageInternalError(self.prjname, self.name,
1240 'error: file \'%s\' with state \'%s\' is not known by meta' \
1242 todo_send[filename] = f.md5
1244 todo_delete.append(filename)
1245 print statfrmt('Deleting', os.path.join(pathn, filename))
1246 elif st in ('R', 'M', 'D', ' ', '!', 'S'):
1247 # ignore missing new file (it's not part of the current commit)
1248 if st == '!' and filename in self.to_be_added:
1250 f = self.findfilebyname(filename)
1252 raise oscerr.PackageInternalError(self.prjname, self.name,
1253 'error: file \'%s\' with state \'%s\' is not known by meta' \
1255 todo_send[filename] = f.md5
1257 if not real_send and not todo_delete and not self.islinkrepair() and not self.ispulled():
1258 print 'nothing to do for package %s' % self.name
1261 print 'Transmitting file data ',
1262 filelist = self.__generate_commitlist(todo_send)
1263 sfilelist = self.__send_commitlog(msg, filelist)
1264 send = self.__get_todo_send(sfilelist)
1265 real_send = [i for i in real_send if not i in send]
1266 # abort after 3 tries
1268 while len(send) and tries:
1269 for filename in send[:]:
1270 sys.stdout.write('.')
1272 self.put_source_file(filename)
1273 send.remove(filename)
1275 sfilelist = self.__send_commitlog(msg, filelist)
1276 send = self.__get_todo_send(sfilelist)
1278 raise oscerr.PackageInternalError(self.prjname, self.name,
1279 'server does not accept filelist:\n%s\nmissing:\n%s\n' \
1280 % (ET.tostring(filelist), ET.tostring(sfilelist)))
1281 # these files already exist on the server
1282 # just copy them into the storedir
1283 for filename in real_send:
1284 self.put_source_file(filename, copy_only=True)
1286 self.rev = sfilelist.get('rev')
1288 print 'Committed revision %s.' % self.rev
1291 os.unlink(os.path.join(self.storedir, '_pulled'))
1292 if self.islinkrepair():
1293 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1294 self.linkrepair = False
1295 # XXX: mark package as invalid?
1296 print 'The source link has been repaired. This directory can now be removed.'
1298 if self.islink() and self.isexpanded():
1300 li.read(sfilelist.find('linkinfo'))
1301 if li.xsrcmd5 is None:
1302 raise oscerr.APIError('linkinfo has no xsrcmd5 attr:\n%s\n' % ET.tostring(sfilelist))
1303 sfilelist = ET.fromstring(self.get_files_meta(revision=li.xsrcmd5))
1304 for i in sfilelist.findall('entry'):
1305 if i.get('name') in self.skipped:
1306 i.set('skipped', 'true')
1307 store_write_string(self.absdir, '_files', ET.tostring(sfilelist) + '\n')
1308 for filename in todo_delete:
1309 self.to_be_deleted.remove(filename)
1310 self.delete_storefile(filename)
1311 self.write_deletelist()
1312 self.write_addlist()
1313 self.update_datastructs()
1315 print_request_list(self.apiurl, self.prjname, self.name)
1317 def __write_storelist(self, name, data):
1320 os.unlink(os.path.join(self.storedir, name))
1324 store_write_string(self.absdir, name, '%s\n' % '\n'.join(data))
1326 def write_conflictlist(self):
1327 self.__write_storelist('_in_conflict', self.in_conflict)
1329 def updatefile(self, n, revision, mtime=None):
1330 filename = os.path.join(self.dir, n)
1331 storefilename = os.path.join(self.storedir, n)
1332 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1333 origfile = os.path.join(self.storedir, '_in_update', n)
1334 if os.path.isfile(filename):
1335 shutil.copyfile(filename, origfile_tmp)
1336 os.rename(origfile_tmp, origfile)
1340 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=storefilename,
1341 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1343 shutil.copyfile(storefilename, filename)
1344 if not origfile is None:
1347 def mergefile(self, n, revision, mtime=None):
1348 filename = os.path.join(self.dir, n)
1349 storefilename = os.path.join(self.storedir, n)
1350 myfilename = os.path.join(self.dir, n + '.mine')
1351 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1352 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1353 origfile = os.path.join(self.storedir, '_in_update', n)
1354 shutil.copyfile(filename, origfile_tmp)
1355 os.rename(origfile_tmp, origfile)
1356 os.rename(filename, myfilename)
1358 get_source_file(self.apiurl, self.prjname, self.name, n,
1359 revision=revision, targetfilename=upfilename,
1360 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1362 if binary_file(myfilename) or binary_file(upfilename):
1364 shutil.copyfile(upfilename, filename)
1365 shutil.copyfile(upfilename, storefilename)
1367 self.in_conflict.append(n)
1368 self.write_conflictlist()
1372 # diff3 OPTIONS... MINE OLDER YOURS
1373 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1374 # we would rather use the subprocess module, but it is not availablebefore 2.4
1375 ret = subprocess.call(merge_cmd, shell=True)
1377 # "An exit status of 0 means `diff3' was successful, 1 means some
1378 # conflicts were found, and 2 means trouble."
1380 # merge was successful... clean up
1381 shutil.copyfile(upfilename, storefilename)
1382 os.unlink(upfilename)
1383 os.unlink(myfilename)
1387 # unsuccessful merge
1388 shutil.copyfile(upfilename, storefilename)
1390 self.in_conflict.append(n)
1391 self.write_conflictlist()
1394 raise oscerr.ExtRuntimeError('diff3 failed with exit code: %s' % ret, merge_cmd)
1396 def update_local_filesmeta(self, revision=None):
1398 Update the local _files file in the store.
1399 It is replaced with the version pulled from upstream.
1401 meta = self.get_files_meta(revision=revision)
1402 store_write_string(self.absdir, '_files', meta + '\n')
1404 def get_files_meta(self, revision='latest', skip_service=True):
1405 fm = show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, meta=self.meta)
1406 # look for "too large" files according to size limit and mark them
1407 root = ET.fromstring(fm)
1408 for e in root.findall('entry'):
1409 size = e.get('size')
1410 if size and self.size_limit and int(size) > self.size_limit \
1411 or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
1412 e.set('skipped', 'true')
1413 return ET.tostring(root)
1415 def update_datastructs(self):
1417 Update the internal data structures if the local _files
1418 file has changed (e.g. update_local_filesmeta() has been
1422 files_tree = read_filemeta(self.dir)
1423 files_tree_root = files_tree.getroot()
1425 self.rev = files_tree_root.get('rev')
1426 self.srcmd5 = files_tree_root.get('srcmd5')
1428 self.linkinfo = Linkinfo()
1429 self.linkinfo.read(files_tree_root.find('linkinfo'))
1431 self.filenamelist = []
1434 for node in files_tree_root.findall('entry'):
1436 f = File(node.get('name'),
1438 int(node.get('size')),
1439 int(node.get('mtime')))
1440 if node.get('skipped'):
1441 self.skipped.append(f.name)
1444 # okay, a very old version of _files, which didn't contain any metadata yet...
1445 f = File(node.get('name'), '', 0, 0)
1446 self.filelist.append(f)
1447 self.filenamelist.append(f.name)
1449 self.to_be_added = read_tobeadded(self.absdir)
1450 self.to_be_deleted = read_tobedeleted(self.absdir)
1451 self.in_conflict = read_inconflict(self.absdir)
1452 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1453 self.size_limit = read_sizelimit(self.dir)
1454 self.meta = self.ismetamode()
1456 # gather unversioned files, but ignore some stuff
1458 for i in os.listdir(self.dir):
1459 for j in conf.config['exclude_glob']:
1460 if fnmatch.fnmatch(i, j):
1461 self.excluded.append(i)
1463 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1464 if i not in self.excluded
1465 if i not in self.filenamelist ]
1468 """tells us if the package is a link (has 'linkinfo').
1469 A package with linkinfo is a package which links to another package.
1470 Returns True if the package is a link, otherwise False."""
1471 return self.linkinfo.islink()
1473 def isexpanded(self):
1474 """tells us if the package is a link which is expanded.
1475 Returns True if the package is expanded, otherwise False."""
1476 return self.linkinfo.isexpanded()
1478 def islinkrepair(self):
1479 """tells us if we are repairing a broken source link."""
1480 return self.linkrepair
1483 """tells us if we have pulled a link."""
1484 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1487 """tells us if the link is frozen."""
1488 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1490 def ismetamode(self):
1491 """tells us if the package is in meta mode"""
1492 return os.path.isfile(os.path.join(self.storedir, '_meta_mode'))
1494 def get_pulled_srcmd5(self):
1496 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1497 pulledrev = line.strip()
1500 def haslinkerror(self):
1502 Returns True if the link is broken otherwise False.
1503 If the package is not a link it returns False.
1505 return self.linkinfo.haserror()
1507 def linkerror(self):
1509 Returns an error message if the link is broken otherwise None.
1510 If the package is not a link it returns None.
1512 return self.linkinfo.error
1514 def update_local_pacmeta(self):
1516 Update the local _meta file in the store.
1517 It is replaced with the version pulled from upstream.
1519 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1520 store_write_string(self.absdir, '_meta', meta + '\n')
1522 def findfilebyname(self, n):
1523 for i in self.filelist:
1527 def get_status(self, excluded=False, *exclude_states):
1531 todo = self.filenamelist + self.to_be_added + \
1532 [i for i in self.filenamelist_unvers if not os.path.isdir(os.path.join(self.absdir, i))]
1534 todo.extend([i for i in self.excluded if i != store])
1537 for fname in sorted(todo):
1538 st = self.status(fname)
1539 if not st in exclude_states:
1540 res.append((st, fname))
1543 def status(self, n):
1547 file storefile file present STATUS
1548 exists exists in _files
1550 x - - 'A' and listed in _to_be_added
1551 x x - 'R' and listed in _to_be_added
1552 x x x ' ' if digest differs: 'M'
1553 and if in conflicts file: 'C'
1555 - x x 'D' and listed in _to_be_deleted
1556 x x x 'D' and listed in _to_be_deleted (e.g. if deleted file was modified)
1557 x x x 'C' and listed in _in_conflict
1558 x - x 'S' and listed in self.skipped
1559 - - x 'S' and listed in self.skipped
1565 known_by_meta = False
1567 exists_in_store = False
1568 if n in self.filenamelist:
1569 known_by_meta = True
1570 if os.path.exists(os.path.join(self.absdir, n)):
1572 if os.path.exists(os.path.join(self.storedir, n)):
1573 exists_in_store = True
1575 if n in self.to_be_deleted:
1577 elif n in self.in_conflict:
1579 elif n in self.skipped:
1581 elif n in self.to_be_added and exists and exists_in_store:
1583 elif n in self.to_be_added and exists:
1585 elif exists and exists_in_store and known_by_meta:
1586 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1590 elif n in self.to_be_added and not exists:
1592 elif not exists and exists_in_store and known_by_meta and not n in self.to_be_deleted:
1594 elif exists and not exists_in_store and not known_by_meta:
1596 elif not exists_in_store and known_by_meta:
1597 # XXX: this codepath shouldn't be reached (we restore the storefile
1598 # in update_datastructs)
1599 raise oscerr.PackageInternalError(self.prjname, self.name,
1600 'error: file \'%s\' is known by meta but no storefile exists.\n'
1601 'This might be caused by an old wc format. Please backup your current\n'
1602 'wc and checkout the package again. Afterwards copy all files (except the\n'
1603 '.osc/ dir) into the new package wc.' % n)
1605 # this case shouldn't happen (except there was a typo in the filename etc.)
1606 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % n)
1610 def get_diff(self, revision=None, ignoreUnversioned=False):
1612 diff_hdr = 'Index: %s\n'
1613 diff_hdr += '===================================================================\n'
1617 def diff_add_delete(fname, add, revision):
1619 diff.append(diff_hdr % fname)
1623 diff.append('--- %s\t(revision 0)\n' % fname)
1625 if revision and not fname in self.to_be_added:
1626 rev = 'working copy'
1627 diff.append('+++ %s\t(%s)\n' % (fname, rev))
1628 fname = os.path.join(self.absdir, fname)
1630 diff.append('--- %s\t(revision %s)\n' % (fname, revision or self.rev))
1631 diff.append('+++ %s\t(working copy)\n' % fname)
1632 fname = os.path.join(self.storedir, fname)
1635 if revision is not None and not add:
1636 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1637 get_source_file(self.apiurl, self.prjname, self.name, origname, tmpfile, revision)
1639 if binary_file(fname):
1644 diff.append('Binary file \'%s\' %s.\n' % (origname, what))
1647 ltmpl = '@@ -0,0 +1,%d @@\n'
1650 ltmpl = '@@ -1,%d +0,0 @@\n'
1651 lines = [tmpl % i for i in open(fname, 'r').readlines()]
1653 diff.append(ltmpl % len(lines))
1654 if not lines[-1].endswith('\n'):
1655 lines.append('\n\\ No newline at end of file\n')
1658 if tmpfile is not None:
1663 if revision is None:
1664 todo = self.todo or [i for i in self.filenamelist if not i in self.to_be_added]+self.to_be_added
1666 if fname in self.to_be_added and self.status(fname) == 'A':
1668 elif fname in self.to_be_deleted:
1669 deleted.append(fname)
1670 elif fname in self.filenamelist:
1671 kept.append(self.findfilebyname(fname))
1672 elif fname in self.to_be_added and self.status(fname) == '!':
1673 raise oscerr.OscIOError(None, 'file \'%s\' is marked as \'A\' but does not exist\n'\
1674 '(either add the missing file or revert it)' % fname)
1675 elif not ignoreUnversioned:
1676 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % fname)
1678 fm = self.get_files_meta(revision=revision)
1679 root = ET.fromstring(fm)
1680 rfiles = self.__get_files(root)
1681 # swap added and deleted
1682 kept, deleted, added, services = self.__get_rev_changes(rfiles)
1683 added = [f.name for f in added]
1684 added.extend([f for f in self.to_be_added if not f in kept])
1685 deleted = [f.name for f in deleted]
1686 deleted.extend(self.to_be_deleted)
1691 # print kept, added, deleted
1693 state = self.status(f.name)
1694 if state in ('S', '?', '!'):
1696 elif state == ' ' and revision is None:
1698 elif revision and self.findfilebyname(f.name).md5 == f.md5 and state != 'M':
1700 yield [diff_hdr % f.name]
1701 if revision is None:
1702 yield get_source_file_diff(self.absdir, f.name, self.rev)
1707 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1708 get_source_file(self.apiurl, self.prjname, self.name, f.name, tmpfile, revision)
1709 diff = get_source_file_diff(self.absdir, f.name, revision,
1710 os.path.basename(tmpfile), os.path.dirname(tmpfile), f.name)
1712 if tmpfile is not None:
1718 yield diff_add_delete(f, True, revision)
1720 yield diff_add_delete(f, False, revision)
1722 def merge(self, otherpac):
1723 self.todo += otherpac.todo
1737 '\n '.join(self.filenamelist),
1745 def read_meta_from_spec(self, spec = None):
1750 # scan for spec files
1751 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1752 if len(speclist) == 1:
1753 specfile = speclist[0]
1754 elif len(speclist) > 1:
1755 print 'the following specfiles were found:'
1756 for filename in speclist:
1758 print 'please specify one with --specfile'
1761 print 'no specfile was found - please specify one ' \
1765 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1766 self.summary = data.get('Summary', '')
1767 self.url = data.get('Url', '')
1768 self.descr = data.get('%description', '')
1771 def update_package_meta(self, force=False):
1773 for the updatepacmetafromspec subcommand
1774 argument force supress the confirm question
1777 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1779 root = ET.fromstring(m)
1780 root.find('title').text = self.summary
1781 root.find('description').text = ''.join(self.descr)
1782 url = root.find('url')
1784 url = ET.SubElement(root, 'url')
1787 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1788 mf = metafile(u, ET.tostring(root))
1791 print '*' * 36, 'old', '*' * 36
1793 print '*' * 36, 'new', '*' * 36
1794 print ET.tostring(root)
1796 repl = raw_input('Write? (y/N/e) ')
1807 def mark_frozen(self):
1808 store_write_string(self.absdir, '_frozenlink', '')
1810 print "The link in this package is currently broken. Checking"
1811 print "out the last working version instead; please use 'osc pull'"
1812 print "to repair the link."
1815 def unmark_frozen(self):
1816 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1817 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1819 def latest_rev(self):
1820 if self.islinkrepair():
1821 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1, meta=self.meta)
1822 elif self.islink() and self.isexpanded():
1823 if self.isfrozen() or self.ispulled():
1824 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1827 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, meta=self.meta)
1830 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1832 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base", meta=self.meta)
1835 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name, meta=self.meta)
1838 def __get_files(self, fmeta_root):
1840 if fmeta_root.get('rev') is None and len(fmeta_root.findall('entry')) > 0:
1841 raise oscerr.APIError('missing rev attribute in _files:\n%s' % ''.join(ET.tostring(fmeta_root)))
1842 for i in fmeta_root.findall('entry'):
1843 skipped = i.get('skipped') is not None
1844 f.append(File(i.get('name'), i.get('md5'),
1845 int(i.get('size')), int(i.get('mtime')), skipped))
1848 def __get_rev_changes(self, revfiles):
1855 revfilenames.append(f.name)
1856 # treat skipped like deleted files
1858 if f.name.startswith('_service:'):
1863 # treat skipped like added files
1864 # problem: this overwrites existing files during the update
1865 # (because skipped files aren't in self.filenamelist_unvers)
1866 if f.name in self.filenamelist and not f.name in self.skipped:
1870 for f in self.filelist:
1871 if not f.name in revfilenames:
1874 return kept, added, deleted, services
1876 def update(self, rev = None, service_files = False, size_limit = None):
1879 # size_limit is only temporary for this update
1880 old_size_limit = self.size_limit
1881 if not size_limit is None:
1882 self.size_limit = int(size_limit)
1883 if os.path.isfile(os.path.join(self.storedir, '_in_update', '_files')):
1884 print 'resuming broken update...'
1885 root = ET.parse(os.path.join(self.storedir, '_in_update', '_files')).getroot()
1886 rfiles = self.__get_files(root)
1887 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1888 # check if we aborted in the middle of a file update
1889 broken_file = os.listdir(os.path.join(self.storedir, '_in_update'))
1890 broken_file.remove('_files')
1891 if len(broken_file) == 1:
1892 origfile = os.path.join(self.storedir, '_in_update', broken_file[0])
1893 wcfile = os.path.join(self.absdir, broken_file[0])
1894 origfile_md5 = dgst(origfile)
1895 origfile_meta = self.findfilebyname(broken_file[0])
1896 if origfile.endswith('.copy'):
1897 # ok it seems we aborted at some point during the copy process
1898 # (copy process == copy wcfile to the _in_update dir). remove file+continue
1900 elif self.findfilebyname(broken_file[0]) is None:
1901 # should we remove this file from _in_update? if we don't
1902 # the user has no chance to continue without removing the file manually
1903 raise oscerr.PackageInternalError(self.prjname, self.name,
1904 '\'%s\' is not known by meta but exists in \'_in_update\' dir')
1905 elif os.path.isfile(wcfile) and dgst(wcfile) != origfile_md5:
1906 (fd, tmpfile) = tempfile.mkstemp(dir=self.absdir, prefix=broken_file[0]+'.')
1908 os.rename(wcfile, tmpfile)
1909 os.rename(origfile, wcfile)
1910 print 'warning: it seems you modified \'%s\' after the broken ' \
1911 'update. Restored original file and saved modified version ' \
1912 'to \'%s\'.' % (wcfile, tmpfile)
1913 elif not os.path.isfile(wcfile):
1914 # this is strange... because it existed before the update. restore it
1915 os.rename(origfile, wcfile)
1917 # everything seems to be ok
1919 elif len(broken_file) > 1:
1920 raise oscerr.PackageInternalError(self.prjname, self.name, 'too many files in \'_in_update\' dir')
1923 if os.path.exists(os.path.join(self.storedir, f.name)):
1924 if dgst(os.path.join(self.storedir, f.name)) == f.md5:
1932 if not service_files:
1934 self.__update(kept, added, deleted, services, ET.tostring(root), root.get('rev'))
1935 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1936 os.rmdir(os.path.join(self.storedir, '_in_update'))
1937 # ok everything is ok (hopefully)...
1938 fm = self.get_files_meta(revision=rev)
1939 root = ET.fromstring(fm)
1940 rfiles = self.__get_files(root)
1941 store_write_string(self.absdir, '_files', fm + '\n', subdir='_in_update')
1942 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1943 if not service_files:
1945 self.__update(kept, added, deleted, services, fm, root.get('rev'))
1946 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1947 if os.path.isdir(os.path.join(self.storedir, '_in_update')):
1948 os.rmdir(os.path.join(self.storedir, '_in_update'))
1949 self.size_limit = old_size_limit
1951 def __update(self, kept, added, deleted, services, fm, rev):
1952 pathn = getTransActPath(self.dir)
1953 # check for conflicts with existing files
1955 if f.name in self.filenamelist_unvers:
1956 raise oscerr.PackageFileConflict(self.prjname, self.name, f.name,
1957 'failed to add file \'%s\' file/dir with the same name already exists' % f.name)
1958 # ok, the update can't fail due to existing files
1960 self.updatefile(f.name, rev, f.mtime)
1961 print statfrmt('A', os.path.join(pathn, f.name))
1963 # if the storefile doesn't exist we're resuming an aborted update:
1964 # the file was already deleted but we cannot know this
1965 # OR we're processing a _service: file (simply keep the file)
1966 if os.path.isfile(os.path.join(self.storedir, f.name)) and self.status(f.name) != 'M':
1967 # if self.status(f.name) != 'M':
1968 self.delete_localfile(f.name)
1969 self.delete_storefile(f.name)
1970 print statfrmt('D', os.path.join(pathn, f.name))
1971 if f.name in self.to_be_deleted:
1972 self.to_be_deleted.remove(f.name)
1973 self.write_deletelist()
1976 state = self.status(f.name)
1977 # print f.name, state
1978 if state == 'M' and self.findfilebyname(f.name).md5 == f.md5:
1979 # remote file didn't change
1982 # try to merge changes
1983 merge_status = self.mergefile(f.name, rev, f.mtime)
1984 print statfrmt(merge_status, os.path.join(pathn, f.name))
1986 self.updatefile(f.name, rev, f.mtime)
1987 print 'Restored \'%s\'' % os.path.join(pathn, f.name)
1989 get_source_file(self.apiurl, self.prjname, self.name, f.name,
1990 targetfilename=os.path.join(self.storedir, f.name), revision=rev,
1991 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
1992 print 'skipping \'%s\' (this is due to conflicts)' % f.name
1993 elif state == 'D' and self.findfilebyname(f.name).md5 != f.md5:
1994 # XXX: in the worst case we might end up with f.name being
1995 # in _to_be_deleted and in _in_conflict... this needs to be checked
1996 if os.path.exists(os.path.join(self.absdir, f.name)):
1997 merge_status = self.mergefile(f.name, rev, f.mtime)
1998 print statfrmt(merge_status, os.path.join(pathn, f.name))
1999 if merge_status == 'C':
2000 # state changes from delete to conflict
2001 self.to_be_deleted.remove(f.name)
2002 self.write_deletelist()
2004 # XXX: we cannot recover this case because we've no file
2006 self.updatefile(f.name, rev, f.mtime)
2007 print statfrmt('U', os.path.join(pathn, f.name))
2008 elif state == ' ' and self.findfilebyname(f.name).md5 != f.md5:
2009 self.updatefile(f.name, rev, f.mtime)
2010 print statfrmt('U', os.path.join(pathn, f.name))
2012 # checkout service files
2014 get_source_file(self.apiurl, self.prjname, self.name, f.name,
2015 targetfilename=os.path.join(self.absdir, f.name), revision=rev,
2016 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
2017 print statfrmt('A', os.path.join(pathn, f.name))
2018 store_write_string(self.absdir, '_files', fm + '\n')
2020 self.update_local_pacmeta()
2021 self.update_datastructs()
2023 print 'At revision %s.' % self.rev
2025 def run_source_services(self, mode=None, singleservice=None):
2026 curdir = os.getcwd()
2027 os.chdir(self.absdir) # e.g. /usr/lib/obs/service/verify_file fails if not inside the project dir.
2029 if self.filenamelist.count('_service') or self.filenamelist_unvers.count('_service'):
2030 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
2032 si.getProjectGlobalServices(self.apiurl, self.prjname, self.name)
2033 si.execute(self.absdir, mode, singleservice)
2036 def prepare_filelist(self):
2037 """Prepare a list of files, which will be processed by process_filelist
2038 method. This allows easy modifications of a file list in commit
2042 self.todo = self.filenamelist + self.filenamelist_unvers
2046 for f in [f for f in self.todo if not os.path.isdir(f)]:
2048 status = self.status(f)
2053 ret += "%s %s %s\n" % (action, status, f)
2056 # Edit a filelist for package \'%s\'
2058 # l, leave = leave a file as is
2059 # r, remove = remove a file
2060 # a, add = add a file
2062 # If you remove file from a list, it will be unchanged
2063 # If you remove all, commit will be aborted""" % self.name
2067 def edit_filelist(self):
2068 """Opens a package list in editor for editing. This allows easy
2069 modifications of it just by simple text editing
2073 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
2074 f = os.fdopen(fd, 'w')
2075 f.write(self.prepare_filelist())
2077 mtime_orig = os.stat(filename).st_mtime
2080 run_editor(filename)
2081 mtime = os.stat(filename).st_mtime
2082 if mtime_orig < mtime:
2083 filelist = open(filename).readlines()
2087 raise oscerr.UserAbort()
2089 return self.process_filelist(filelist)
2091 def process_filelist(self, filelist):
2092 """Process a filelist - it add/remove or leave files. This depends on
2093 user input. If no file is processed, it raises an ValueError
2097 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
2099 foo = line.split(' ')
2101 action, state, name = (foo[0], ' ', foo[3])
2103 action, state, name = (foo[0], foo[1], foo[2])
2106 action = action.lower()
2109 if action in ('r', 'remove'):
2110 if self.status(name) == '?':
2112 if name in self.todo:
2113 self.todo.remove(name)
2115 self.delete_file(name, True)
2116 elif action in ('a', 'add'):
2117 if self.status(name) != '?':
2118 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
2121 elif action in ('l', 'leave'):
2124 raise ValueError("Unknow action `%s'" % action)
2127 raise ValueError("Empty filelist")
2129 def revert(self, filename):
2130 if not filename in self.filenamelist and not filename in self.to_be_added:
2131 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % filename)
2132 elif filename in self.skipped:
2133 raise oscerr.OscIOError(None, 'file \'%s\' is marked as skipped and cannot be reverted' % filename)
2134 if filename in self.filenamelist and not os.path.exists(os.path.join(self.storedir, filename)):
2135 raise oscerr.PackageInternalError('file \'%s\' is listed in filenamelist but no storefile exists' % filename)
2136 state = self.status(filename)
2137 if not (state == 'A' or state == '!' and filename in self.to_be_added):
2138 shutil.copyfile(os.path.join(self.storedir, filename), os.path.join(self.absdir, filename))
2140 self.to_be_deleted.remove(filename)
2141 self.write_deletelist()
2143 self.clear_from_conflictlist(filename)
2144 elif state in ('A', 'R') or state == '!' and filename in self.to_be_added:
2145 self.to_be_added.remove(filename)
2146 self.write_addlist()
2149 def init_package(apiurl, project, package, dir, size_limit=None, meta=False, progress_obj=None):
2152 if not os.path.exists(dir):
2154 elif not os.path.isdir(dir):
2155 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
2156 if os.path.exists(os.path.join(dir, store)):
2157 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
2159 os.mkdir(os.path.join(dir, store))
2160 store_write_project(dir, project)
2161 store_write_string(dir, '_package', package + '\n')
2162 store_write_apiurl(dir, apiurl)
2164 store_write_string(dir, '_meta_mode', '')
2166 store_write_string(dir, '_size_limit', str(size_limit) + '\n')
2167 store_write_string(dir, '_files', '<directory />' + '\n')
2168 store_write_string(dir, '_osclib_version', __store_version__ + '\n')
2169 return Package(dir, progress_obj=progress_obj, size_limit=size_limit)
2172 class AbstractState:
2174 Base class which represents state-like objects (<review />, <state />).
2176 def __init__(self, tag):
2179 def get_node_attrs(self):
2180 """return attributes for the tag/element"""
2181 raise NotImplementedError()
2183 def get_node_name(self):
2184 """return tag/element name"""
2187 def get_comment(self):
2188 """return data from <comment /> tag"""
2189 raise NotImplementedError()
2192 """serialize object to XML"""
2193 root = ET.Element(self.get_node_name())
2194 for attr in self.get_node_attrs():
2195 val = getattr(self, attr)
2198 if self.get_comment():
2199 ET.SubElement(root, 'comment').text = self.get_comment()
2203 """return "pretty" XML data"""
2204 root = self.to_xml()
2206 return ET.tostring(root)
2209 class ReviewState(AbstractState):
2210 """Represents the review state in a request"""
2211 def __init__(self, review_node):
2212 if not review_node.get('state'):
2213 raise oscerr.APIError('invalid review node (state attr expected): %s' % \
2214 ET.tostring(review_node))
2215 AbstractState.__init__(self, review_node.tag)
2216 self.state = review_node.get('state')
2217 self.by_user = review_node.get('by_user')
2218 self.by_group = review_node.get('by_group')
2219 self.by_project = review_node.get('by_project')
2220 self.by_package = review_node.get('by_package')
2221 self.who = review_node.get('who')
2222 self.when = review_node.get('when')
2224 if not review_node.find('comment') is None and \
2225 review_node.find('comment').text:
2226 self.comment = review_node.find('comment').text.strip()
2228 def get_node_attrs(self):
2229 return ('state', 'by_user', 'by_group', 'by_project', 'by_package', 'who', 'when')
2231 def get_comment(self):
2235 class RequestState(AbstractState):
2236 """Represents the state of a request"""
2237 def __init__(self, state_node):
2238 if not state_node.get('name'):
2239 raise oscerr.APIError('invalid request state node (name attr expected): %s' % \
2240 ET.tostring(state_node))
2241 AbstractState.__init__(self, state_node.tag)
2242 self.name = state_node.get('name')
2243 self.who = state_node.get('who')
2244 self.when = state_node.get('when')
2246 if not state_node.find('comment') is None and \
2247 state_node.find('comment').text:
2248 self.comment = state_node.find('comment').text.strip()
2250 def get_node_attrs(self):
2251 return ('name', 'who', 'when')
2253 def get_comment(self):
2259 Represents a <action /> element of a Request.
2260 This class is quite common so that it can be used for all different
2261 action types. Note: instances only provide attributes for their specific
2264 r = Action('set_bugowner', tgt_project='foo', person_name='buguser')
2265 # available attributes: r.type (== 'set_bugowner'), r.tgt_project (== 'foo'), r.tgt_package (== None)
2267 <action type="set_bugowner">
2268 <target project="foo" />
2269 <person name="buguser" />
2272 r = Action('delete', tgt_project='foo', tgt_package='bar')
2273 # available attributes: r.type (== 'delete'), r.tgt_project (== 'foo'), r.tgt_package (=='bar')
2275 <action type="delete">
2276 <target package="bar" project="foo" />
2280 # allowed types + the corresponding (allowed) attributes
2281 type_args = {'submit': ('src_project', 'src_package', 'src_rev', 'tgt_project', 'tgt_package', 'opt_sourceupdate',
2282 'acceptinfo_rev', 'acceptinfo_srcmd5', 'acceptinfo_xsrcmd5', 'acceptinfo_osrcmd5',
2283 'acceptinfo_oxsrcmd5', 'opt_updatelink'),
2284 'add_role': ('tgt_project', 'tgt_package', 'person_name', 'person_role', 'group_name', 'group_role'),
2285 'set_bugowner': ('tgt_project', 'tgt_package', 'person_name'), # obsoleted by add_role
2286 'maintenance_release': ('src_project', 'src_package', 'src_rev', 'tgt_project', 'tgt_package', 'person_name'),
2287 'maintenance_incident': ('src_project', 'tgt_project', 'person_name'),
2288 'delete': ('tgt_project', 'tgt_package'),
2289 'change_devel': ('src_project', 'src_package', 'tgt_project', 'tgt_package')}
2290 # attribute prefix to element name map (only needed for abbreviated attributes)
2291 prefix_to_elm = {'src': 'source', 'tgt': 'target', 'opt': 'options'}
2293 def __init__(self, type, **kwargs):
2294 if not type in Action.type_args.keys():
2295 raise oscerr.WrongArgs('invalid action type: \'%s\'' % type)
2297 for i in kwargs.keys():
2298 if not i in Action.type_args[type]:
2299 raise oscerr.WrongArgs('invalid argument: \'%s\'' % i)
2300 # set all type specific attributes
2301 for i in Action.type_args[type]:
2302 if kwargs.has_key(i):
2303 setattr(self, i, kwargs[i])
2305 setattr(self, i, None)
2309 Serialize object to XML.
2310 The xml tag names and attributes are constructed from the instance's attributes.
2312 self.group_name -> tag name is "group", attribute name is "name"
2313 self.src_project -> tag name is "source" (translated via prefix_to_elm dict),
2314 attribute name is "project"
2315 Attributes prefixed with "opt_" need a special handling, the resulting xml should
2316 look like this: opt_updatelink -> <options><updatelink>value</updatelink></options>.
2317 Attributes which are "None" will be skipped.
2319 root = ET.Element('action', type=self.type)
2320 for i in Action.type_args[self.type]:
2321 prefix, attr = i.split('_', 1)
2322 val = getattr(self, i)
2325 elm = root.find(Action.prefix_to_elm.get(prefix, prefix))
2327 elm = ET.Element(Action.prefix_to_elm.get(prefix, prefix))
2330 ET.SubElement(elm, attr).text = val
2336 """return "pretty" XML data"""
2337 root = self.to_xml()
2339 return ET.tostring(root)
2342 def from_xml(action_node):
2343 """create action from XML"""
2344 if action_node is None or \
2345 not action_node.get('type') in Action.type_args.keys() or \
2346 not action_node.tag in ('action', 'submit'):
2347 raise oscerr.WrongArgs('invalid argument')
2348 elm_to_prefix = dict([(i[1], i[0]) for i in Action.prefix_to_elm.items()])
2350 for node in action_node:
2351 prefix = elm_to_prefix.get(node.tag, node.tag)
2353 data = [('opt_%s' % opt.tag, opt.text.strip()) for opt in node if opt.text]
2355 data = [('%s_%s' % (prefix, k), v) for k, v in node.items()]
2356 kwargs.update(dict(data))
2357 return Action(action_node.get('type'), **kwargs)
2361 """Represents a request (<request />)"""
2364 self._init_attributes()
2366 def _init_attributes(self):
2367 """initialize attributes with default values"""
2370 self.description = ''
2373 self.statehistory = []
2376 def read(self, root):
2377 """read in a request"""
2378 self._init_attributes()
2379 if not root.get('id'):
2380 raise oscerr.APIError('invalid request: %s\n' % ET.tostring(root))
2381 self.reqid = root.get('id')
2382 if root.find('state') is None:
2383 raise oscerr.APIError('invalid request (state expected): %s\n' % ET.tostring(root))
2384 self.state = RequestState(root.find('state'))
2385 action_nodes = root.findall('action')
2386 if not action_nodes:
2387 # check for old-style requests
2388 for i in root.findall('submit'):
2389 i.set('type', 'submit')
2390 action_nodes.append(i)
2391 for action in action_nodes:
2392 self.actions.append(Action.from_xml(action))
2393 for review in root.findall('review'):
2394 self.reviews.append(ReviewState(review))
2395 for hist_state in root.findall('history'):
2396 self.statehistory.append(RequestState(hist_state))
2397 if not root.find('title') is None:
2398 self.title = root.find('title').text.strip()
2399 if not root.find('description') is None and root.find('description').text:
2400 self.description = root.find('description').text.strip()
2402 def add_action(self, type, **kwargs):
2403 """add a new action to the request"""
2404 self.actions.append(Action(type, **kwargs))
2406 def get_actions(self, *types):
2408 get all actions with a specific type
2409 (if types is empty return all actions)
2413 return [i for i in self.actions if i.type in types]
2415 def get_creator(self):
2416 """return the creator of the request"""
2417 if len(self.statehistory):
2418 return self.statehistory[0].who
2419 return self.state.who
2422 """serialize object to XML"""
2423 root = ET.Element('request')
2424 if not self.reqid is None:
2425 root.set('id', self.reqid)
2426 for action in self.actions:
2427 root.append(action.to_xml())
2428 if not self.state is None:
2429 root.append(self.state.to_xml())
2430 for review in self.reviews:
2431 root.append(review.to_xml())
2432 for hist in self.statehistory:
2433 root.append(hist.to_xml())
2435 ET.SubElement(root, 'title').text = self.title
2436 if self.description:
2437 ET.SubElement(root, 'description').text = self.description
2441 """return "pretty" XML data"""
2442 root = self.to_xml()
2444 return ET.tostring(root)
2447 def format_review(review, show_srcupdate=False):
2449 format a review depending on the reviewer's type.
2450 A dict which contains the formatted str's is returned.
2453 d = {'state': '%s:' % review.state}
2454 if review.by_package:
2455 d['by'] = '%s/%s' % (review.by_project, review.by_package)
2456 d['type'] = 'Package'
2457 elif review.by_project:
2458 d['by'] = '%s' % review.by_project
2459 d['type'] = 'Project'
2460 elif review.by_group:
2461 d['by'] = '%s' % review.by_group
2464 d['by'] = '%s' % review.by_user
2467 d['by'] += '(%s)' % review.who
2471 def format_action(action, show_srcupdate=False):
2473 format an action depending on the action's type.
2474 A dict which contains the formatted str's is returned.
2476 def prj_pkg_join(prj, pkg):
2479 return '%s/%s' % (prj, pkg)
2481 d = {'type': '%s:' % action.type}
2482 if action.type == 'set_bugowner':
2483 d['source'] = action.person_name
2484 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2485 elif action.type == 'change_devel':
2486 d['source'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2487 d['target'] = 'developed in %s' % prj_pkg_join(action.src_project, action.src_package)
2488 elif action.type == 'maintenance_incident':
2489 d['source'] = '%s ->' % action.src_project
2490 d['target'] = action.tgt_project
2491 elif action.type == 'maintenance_release':
2492 d['source'] = '%s ->' % prj_pkg_join(action.src_project, action.src_package)
2493 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2494 elif action.type == 'submit':
2496 if action.opt_sourceupdate and show_srcupdate:
2497 srcupdate = '(%s)' % action.opt_sourceupdate
2498 d['source'] = '%s%s ->' % (prj_pkg_join(action.src_project, action.src_package), srcupdate)
2499 tgt_package = action.tgt_package
2500 if action.src_package == action.tgt_package:
2502 d['target'] = prj_pkg_join(action.tgt_project, tgt_package)
2503 elif action.type == 'add_role':
2505 if action.person_name and action.person_role:
2506 roles.append('person: %s as %s' % (action.person_name, action.person_role))
2507 if action.group_name and action.group_role:
2508 roles.append('group: %s as %s' % (action.group_name, action.group_role))
2509 d['source'] = ', '.join(roles)
2510 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2511 elif action.type == 'delete':
2513 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2516 def list_view(self):
2517 """return "list view" format"""
2519 lines = ['%6s State:%-10s By:%-12s When:%-19s' % (self.reqid, self.state.name, self.state.who, self.state.when)]
2520 tmpl = ' %(type)-16s %(source)-50s %(target)s'
2521 for action in self.actions:
2522 lines.append(tmpl % Request.format_action(action))
2523 tmpl = ' Review by %(type)-10s is %(state)-10s %(by)-50s'
2524 for review in self.reviews:
2525 lines.append(tmpl % Request.format_review(review))
2526 history = ['%s(%s)' % (hist.name, hist.who) for hist in self.statehistory]
2528 lines.append(' From: %s' % ' -> '.join(history))
2529 if self.description:
2530 lines.append(textwrap.fill(self.description, width=80, initial_indent=' Descr: ',
2531 subsequent_indent=' '))
2532 return '\n'.join(lines)
2535 """return "detailed" format"""
2536 lines = ['Request: #%s\n' % self.reqid]
2537 for action in self.actions:
2538 tmpl = ' %(type)-13s %(source)s %(target)s'
2539 if action.type == 'delete':
2540 # remove 1 whitespace because source is empty
2541 tmpl = ' %(type)-12s %(source)s %(target)s'
2542 lines.append(tmpl % Request.format_action(action, show_srcupdate=True))
2543 lines.append('\n\nMessage:')
2544 if self.description:
2545 lines.append(self.description)
2547 lines.append('<no message>')
2549 lines.append('\nState: %-10s %-12s %s' % (self.state.name, self.state.when, self.state.who))
2550 lines.append('Comment: %s' % (self.state.comment or '<no comment>'))
2553 tmpl = '%(state)-10s %(by)-50s %(when)-12s %(who)-20s %(comment)s'
2555 for review in reversed(self.reviews):
2556 d = {'state': review.state}
2558 d['by'] = "User: " + review.by_user
2560 d['by'] = "Group: " + review.by_group
2561 if review.by_package:
2562 d['by'] = "Package: " + review.by_project + "/" + review.by_package
2563 elif review.by_project:
2564 d['by'] = "Project: " + review.by_project
2565 d['when'] = review.when or ''
2566 d['who'] = review.who or ''
2567 d['comment'] = review.comment or ''
2568 reviews.append(tmpl % d)
2570 lines.append('\nReview: %s' % indent.join(reviews))
2572 tmpl = '%(name)-10s %(when)-12s %(who)s'
2574 for hist in reversed(self.statehistory):
2575 d = {'name': hist.name, 'when': hist.when,
2577 histories.append(tmpl % d)
2579 lines.append('\nHistory: %s' % indent.join(histories))
2581 return '\n'.join(lines)
2583 def __cmp__(self, other):
2584 return cmp(int(self.reqid), int(other.reqid))
2586 def create(self, apiurl):
2587 """create a new request"""
2588 u = makeurl(apiurl, ['request'], query='cmd=create')
2589 f = http_POST(u, data=self.to_str())
2590 root = ET.fromstring(f.read())
2594 """format time as Apr 02 18:19
2596 depending on whether it is in the current year
2600 if time.localtime()[0] == time.localtime(t)[0]:
2602 return time.strftime('%b %d %H:%M',time.localtime(t))
2604 return time.strftime('%b %d %Y',time.localtime(t))
2607 def is_project_dir(d):
2610 return os.path.exists(os.path.join(d, store, '_project')) and not \
2611 os.path.exists(os.path.join(d, store, '_package'))
2614 def is_package_dir(d):
2617 return os.path.exists(os.path.join(d, store, '_project')) and \
2618 os.path.exists(os.path.join(d, store, '_package'))
2620 def parse_disturl(disturl):
2621 """Parse a disturl, returns tuple (apiurl, project, source, repository,
2622 revision), else raises an oscerr.WrongArgs exception
2627 m = DISTURL_RE.match(disturl)
2629 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
2631 apiurl = m.group('apiurl')
2632 if apiurl.split('.')[0] != 'api':
2633 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
2634 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
2636 def parse_buildlogurl(buildlogurl):
2637 """Parse a build log url, returns a tuple (apiurl, project, package,
2638 repository, arch), else raises oscerr.WrongArgs exception"""
2640 global BUILDLOGURL_RE
2642 m = BUILDLOGURL_RE.match(buildlogurl)
2644 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
2646 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
2649 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
2650 This is handy to allow copy/paste a project/package combination in this form.
2652 Trailing slashes are removed before the split, because the split would
2653 otherwise give an additional empty string.
2661 def expand_proj_pack(args, idx=0, howmany=0):
2662 """looks for occurance of '.' at the position idx.
2663 If howmany is 2, both proj and pack are expanded together
2664 using the current directory, or none of them, if not possible.
2665 If howmany is 0, proj is expanded if possible, then, if there
2666 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
2667 expanded, if possible.
2668 If howmany is 1, only proj is expanded if possible.
2670 If args[idx] does not exists, an implicit '.' is assumed.
2671 if not enough elements up to idx exist, an error is raised.
2673 See also parseargs(args), slash_split(args), findpacs(args)
2674 All these need unification, somehow.
2677 # print args,idx,howmany
2680 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
2682 if len(args) == idx:
2684 if args[idx+0] == '.':
2685 if howmany == 0 and len(args) > idx+1:
2686 if args[idx+1] == '.':
2688 # remove one dot and make sure to expand both proj and pack
2693 # print args,idx,howmany
2695 args[idx+0] = store_read_project('.')
2698 package = store_read_package('.')
2699 args.insert(idx+1, package)
2703 package = store_read_package('.')
2704 args.insert(idx+1, package)
2708 def findpacs(files, progress_obj=None):
2709 """collect Package objects belonging to the given files
2710 and make sure each Package is returned only once"""
2713 p = filedir_to_pac(f, progress_obj)
2716 if i.name == p.name:
2726 def filedir_to_pac(f, progress_obj=None):
2727 """Takes a working copy path, or a path to a file inside a working copy,
2728 and returns a Package object instance
2730 If the argument was a filename, add it onto the "todo" list of the Package """
2732 if os.path.isdir(f):
2734 p = Package(wd, progress_obj=progress_obj)
2736 wd = os.path.dirname(f) or os.curdir
2737 p = Package(wd, progress_obj=progress_obj)
2738 p.todo = [ os.path.basename(f) ]
2742 def read_filemeta(dir):
2745 msg = '\'%s\' is not a valid working copy.\n' % dir
2746 if not is_package_dir(dir):
2747 raise oscerr.NoWorkingCopy(msg)
2749 filesmeta = os.path.join(dir, store, '_files')
2750 if not os.path.isfile(filesmeta):
2751 print >>sys.stderr, "Warning: file _files is missing, creating a default one"
2752 store_write_string(os.path.join(dir, store), '_files', '<directory \>')
2755 r = ET.parse(filesmeta)
2756 except SyntaxError, e:
2757 raise oscerr.NoWorkingCopy(msg +
2758 'When parsing .osc/_files, the following error was encountered:\n'
2762 def store_readlist(dir, name):
2766 if os.path.exists(os.path.join(dir, store, name)):
2767 r = [line.strip() for line in open(os.path.join(dir, store, name), 'r')]
2770 def read_tobeadded(dir):
2771 return store_readlist(dir, '_to_be_added')
2773 def read_tobedeleted(dir):
2774 return store_readlist(dir, '_to_be_deleted')
2776 def read_sizelimit(dir):
2780 fname = os.path.join(dir, store, '_size_limit')
2782 if os.path.exists(fname):
2783 r = open(fname).readline().strip()
2785 if r is None or not r.isdigit():
2789 def read_inconflict(dir):
2790 return store_readlist(dir, '_in_conflict')
2792 def parseargs(list_of_args):
2793 """Convenience method osc's commandline argument parsing.
2795 If called with an empty tuple (or list), return a list containing the current directory.
2796 Otherwise, return a list of the arguments."""
2798 return list(list_of_args)
2803 def statfrmt(statusletter, filename):
2804 return '%s %s' % (statusletter, filename)
2807 def pathjoin(a, *p):
2808 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2809 path = os.path.join(a, *p)
2810 if path.startswith('./'):
2815 def makeurl(baseurl, l, query=[]):
2816 """Given a list of path compoments, construct a complete URL.
2818 Optional parameters for a query string can be given as a list, as a
2819 dictionary, or as an already assembled string.
2820 In case of a dictionary, the parameters will be urlencoded by this
2821 function. In case of a list not -- this is to be backwards compatible.
2824 if conf.config['verbose'] > 1:
2825 print 'makeurl:', baseurl, l, query
2827 if type(query) == type(list()):
2828 query = '&'.join(query)
2829 elif type(query) == type(dict()):
2830 query = urlencode(query)
2832 scheme, netloc = urlsplit(baseurl)[0:2]
2833 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2836 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2837 """wrapper around urllib2.urlopen for error handling,
2838 and to support additional (PUT, DELETE) methods"""
2842 if conf.config['http_debug']:
2843 print >>sys.stderr, '\n\n--', method, url
2845 if method == 'POST' and not file and not data:
2846 # adding data to an urllib2 request transforms it into a POST
2849 req = urllib2.Request(url)
2850 api_host_options = {}
2851 if conf.is_known_apiurl(url):
2852 # ok no external request
2853 urllib2.install_opener(conf._build_opener(url))
2854 api_host_options = conf.get_apiurl_api_host_options(url)
2855 for header, value in api_host_options['http_headers']:
2856 req.add_header(header, value)
2858 req.get_method = lambda: method
2860 # POST requests are application/x-www-form-urlencoded per default
2861 # since we change the request into PUT, we also need to adjust the content type header
2862 if method == 'PUT' or (method == 'POST' and data):
2863 req.add_header('Content-Type', 'application/octet-stream')
2865 if type(headers) == type({}):
2866 for i in headers.keys():
2868 req.add_header(i, headers[i])
2870 if file and not data:
2871 size = os.path.getsize(file)
2873 data = open(file, 'rb').read()
2876 filefd = open(file, 'rb')
2878 if sys.platform[:3] != 'win':
2879 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2881 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2883 except EnvironmentError, e:
2885 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2886 '\non a filesystem which does not support this.' % (e, file))
2887 elif hasattr(e, 'winerror') and e.winerror == 5:
2888 # falling back to the default io
2889 data = open(file, 'rb').read()
2893 if conf.config['debug']: print >>sys.stderr, method, url
2895 old_timeout = socket.getdefaulttimeout()
2896 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2897 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2898 socket.setdefaulttimeout(timeout)
2900 fd = urllib2.urlopen(req, data=data)
2902 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2903 socket.setdefaulttimeout(old_timeout)
2904 if hasattr(conf.cookiejar, 'save'):
2905 conf.cookiejar.save(ignore_discard=True)
2907 if filefd: filefd.close()
2912 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2913 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2914 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2915 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2918 def check_store_version(dir):
2921 versionfile = os.path.join(dir, store, '_osclib_version')
2923 v = open(versionfile).read().strip()
2928 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2929 if os.path.exists(os.path.join(dir, '.svn')):
2930 msg = msg + '\nTry svn instead of osc.'
2931 raise oscerr.NoWorkingCopy(msg)
2933 if v != __store_version__:
2934 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2935 # version is fine, no migration needed
2936 f = open(versionfile, 'w')
2937 f.write(__store_version__ + '\n')
2940 msg = 'The osc metadata of your working copy "%s"' % dir
2941 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2942 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2943 raise oscerr.WorkingCopyWrongVersion, msg
2946 def meta_get_packagelist(apiurl, prj, deleted=None):
2950 query['deleted'] = 1
2952 u = makeurl(apiurl, ['source', prj], query)
2954 root = ET.parse(f).getroot()
2955 return [ node.get('name') for node in root.findall('entry') ]
2958 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2959 """return a list of file names,
2960 or a list File() instances if verbose=True"""
2966 query['rev'] = revision
2968 query['rev'] = 'latest'
2970 u = makeurl(apiurl, ['source', prj, package], query=query)
2972 root = ET.parse(f).getroot()
2975 return [ node.get('name') for node in root.findall('entry') ]
2979 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2980 rev = root.get('rev')
2981 for node in root.findall('entry'):
2982 f = File(node.get('name'),
2984 int(node.get('size')),
2985 int(node.get('mtime')))
2991 def meta_get_project_list(apiurl, deleted=None):
2994 query['deleted'] = 1
2996 u = makeurl(apiurl, ['source'], query)
2998 root = ET.parse(f).getroot()
2999 return sorted([ node.get('name') for node in root if node.get('name')])
3002 def show_project_meta(apiurl, prj):
3003 url = makeurl(apiurl, ['source', prj, '_meta'])
3005 return f.readlines()
3008 def show_project_conf(apiurl, prj):
3009 url = makeurl(apiurl, ['source', prj, '_config'])
3011 return f.readlines()
3014 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
3015 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
3019 except urllib2.HTTPError, e:
3020 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
3024 def show_package_meta(apiurl, prj, pac, meta=False):
3029 # packages like _pattern and _project do not have a _meta file
3030 if pac.startswith('_pattern') or pac.startswith('_project'):
3033 url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
3036 return f.readlines()
3037 except urllib2.HTTPError, e:
3038 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
3042 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
3044 path.append('source')
3050 path.append('_attribute')
3052 path.append(attribute)
3055 query.append("with_default=1")
3057 query.append("with_project=1")
3058 url = makeurl(apiurl, path, query)
3061 return f.readlines()
3062 except urllib2.HTTPError, e:
3063 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
3067 def show_develproject(apiurl, prj, pac, xml_node=False):
3068 m = show_package_meta(apiurl, prj, pac)
3069 node = ET.fromstring(''.join(m)).find('devel')
3070 if not node is None:
3073 return node.get('project')
3077 def show_package_disabled_repos(apiurl, prj, pac):
3078 m = show_package_meta(apiurl, prj, pac)
3079 #FIXME: don't work if all repos of a project are disabled and only some are enabled since <disable/> is empty
3081 root = ET.fromstring(''.join(m))
3082 elm = root.find('build')
3083 r = [ node.get('repository') for node in elm.findall('disable')]
3089 def show_pattern_metalist(apiurl, prj):
3090 url = makeurl(apiurl, ['source', prj, '_pattern'])
3094 except urllib2.HTTPError, e:
3095 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
3097 r = [ node.get('name') for node in tree.getroot() ]
3102 def show_pattern_meta(apiurl, prj, pattern):
3103 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
3106 return f.readlines()
3107 except urllib2.HTTPError, e:
3108 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
3113 """metafile that can be manipulated and is stored back after manipulation."""
3114 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
3118 self.change_is_required = change_is_required
3119 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
3120 f = os.fdopen(fd, 'w')
3121 f.write(''.join(input))
3123 self.hash_orig = dgst(self.filename)
3126 if self.change_is_required and self.hash_orig == dgst(self.filename):
3127 print 'File unchanged. Not saving.'
3128 os.unlink(self.filename)
3131 print 'Sending meta data...'
3132 # don't do any exception handling... it's up to the caller what to do in case
3134 http_PUT(self.url, file=self.filename)
3135 os.unlink(self.filename)
3141 run_editor(self.filename)
3145 except urllib2.HTTPError, e:
3146 error_help = "%d" % e.code
3147 if e.headers.get('X-Opensuse-Errorcode'):
3148 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
3150 print >>sys.stderr, 'BuildService API error:', error_help
3151 # examine the error - we can't raise an exception because we might want
3154 if '<summary>' in data:
3155 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
3156 ri = raw_input('Try again? ([y/N]): ')
3157 if ri not in ['y', 'Y']:
3163 if os.path.exists(self.filename):
3164 print 'discarding %s' % self.filename
3165 os.unlink(self.filename)
3168 # different types of metadata
3169 metatypes = { 'prj': { 'path': 'source/%s/_meta',
3170 'template': new_project_templ,
3173 'pkg': { 'path' : 'source/%s/%s/_meta',
3174 'template': new_package_templ,
3177 'attribute': { 'path' : 'source/%s/%s/_meta',
3178 'template': new_attribute_templ,
3181 'prjconf': { 'path': 'source/%s/_config',
3185 'user': { 'path': 'person/%s',
3186 'template': new_user_template,
3189 'pattern': { 'path': 'source/%s/_pattern/%s',
3190 'template': new_pattern_template,
3195 def meta_exists(metatype,
3204 apiurl = conf.config['apiurl']
3205 url = make_meta_url(metatype, path_args, apiurl)
3207 data = http_GET(url).readlines()
3208 except urllib2.HTTPError, e:
3209 if e.code == 404 and create_new:
3210 data = metatypes[metatype]['template']
3212 data = StringIO(data % template_args).readlines()
3218 def make_meta_url(metatype, path_args=None, apiurl=None, force=False):
3222 apiurl = conf.config['apiurl']
3223 if metatype not in metatypes.keys():
3224 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
3225 path = metatypes[metatype]['path']
3228 path = path % path_args
3232 query = { 'force': '1' }
3234 return makeurl(apiurl, [path], query)
3237 def edit_meta(metatype,
3243 change_is_required=False,
3249 apiurl = conf.config['apiurl']
3251 data = meta_exists(metatype,
3254 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
3258 change_is_required = True
3260 url = make_meta_url(metatype, path_args, apiurl, force)
3261 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
3269 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, meta=False):
3272 query['rev'] = revision
3274 query['rev'] = 'latest'
3276 query['linkrev'] = linkrev
3277 elif conf.config['linkcontrol']:
3278 query['linkrev'] = 'base'
3284 query['emptylink'] = 1
3285 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
3288 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False):
3289 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta)
3290 return ET.fromstring(''.join(m)).get('srcmd5')
3293 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False):
3294 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta)
3296 # only source link packages have a <linkinfo> element.
3297 li_node = ET.fromstring(''.join(m)).find('linkinfo')
3305 raise oscerr.LinkExpandError(prj, pac, li.error)
3309 def show_upstream_rev(apiurl, prj, pac, revision=None, expand=False, linkrev=None, meta=False):
3310 m = show_files_meta(apiurl, prj, pac, revision=revision, expand=expand, linkrev=linkrev, meta=meta)
3311 return ET.fromstring(''.join(m)).get('rev')
3314 def read_meta_from_spec(specfile, *args):
3315 import codecs, locale, re
3317 Read tags and sections from spec file. To read out
3318 a tag the passed argument mustn't end with a colon. To
3319 read out a section the passed argument must start with
3321 This method returns a dictionary which contains the