1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.130git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 new_project_templ = """\
39 <project name="%(name)s">
41 <title></title> <!-- Short title of NewProject -->
43 <!-- This is for a longer description of the purpose of the project -->
46 <person role="maintainer" userid="%(user)s" />
47 <person role="bugowner" userid="%(user)s" />
48 <!-- remove this block to publish your packages on the mirrors -->
59 <!-- remove this comment to enable one or more build targets
61 <repository name="openSUSE_Factory">
62 <path project="openSUSE:Factory" repository="standard" />
66 <repository name="openSUSE_11.2">
67 <path project="openSUSE:11.2" repository="standard"/>
71 <repository name="openSUSE_11.1">
72 <path project="openSUSE:11.1" repository="standard"/>
76 <repository name="Fedora_12">
77 <path project="Fedora:12" repository="standard" />
81 <repository name="SLE_11">
82 <path project="SUSE:SLE-11" repository="standard" />
91 new_package_templ = """\
92 <package name="%(name)s">
94 <title></title> <!-- Title of package -->
97 <!-- for long description -->
100 <person role="maintainer" userid="%(user)s"/>
101 <person role="bugowner" userid="%(user)s"/>
103 <url>PUT_UPSTREAM_URL_HERE</url>
107 use one of the examples below to disable building of this package
108 on a certain architecture, in a certain repository,
109 or a combination thereof:
111 <disable arch="x86_64"/>
112 <disable repository="SUSE_SLE-10"/>
113 <disable repository="SUSE_SLE-10" arch="x86_64"/>
115 Possible sections where you can use the tags above:
125 Please have a look at:
126 http://en.opensuse.org/Restricted_formats
127 Packages containing formats listed there are NOT allowed to
128 be packaged in the openSUSE Buildservice and will be deleted!
135 new_attribute_templ = """\
137 <attribute namespace="" name="">
143 new_user_template = """\
145 <login>%(user)s</login>
146 <email>PUT_EMAIL_ADDRESS_HERE</email>
147 <realname>PUT_REAL_NAME_HERE</realname>
149 <project name="home:%(user)s"/>
165 new_pattern_template = """\
166 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
172 buildstatus_symbols = {'succeeded': '.',
174 'expansion error': 'U', # obsolete with OBS 2.0
188 # os.path.samefile is available only under Unix
189 def os_path_samefile(path1, path2):
191 return os.path.samefile(path1, path2)
193 return os.path.realpath(path1) == os.path.realpath(path2)
196 """represent a file, including its metadata"""
197 def __init__(self, name, md5, size, mtime, skipped=False):
202 self.skipped = skipped
210 """Source service content
213 """creates an empty serviceinfo instance"""
216 def read(self, serviceinfo_node, append=False):
217 """read in the source services <services> element passed as
220 if serviceinfo_node == None:
224 services = serviceinfo_node.findall('service')
226 for service in services:
227 name = service.get('name')
228 mode = service.get('mode', None)
229 data = { 'name' : name, 'mode' : '' }
233 for param in service.findall('param'):
234 option = param.get('name', None)
236 name += " --" + option + " '" + value + "'"
237 data['command'] = name
238 self.services.append(data)
240 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
241 raise oscerr.APIError(msg)
243 def getProjectGlobalServices(self, apiurl, project, package):
244 # get all project wide services in one file, we don't store it yet
245 u = makeurl(apiurl, ['source', project, package], query='cmd=getprojectservices')
248 root = ET.parse(f).getroot()
249 self.read(root, True)
250 except urllib2.HTTPError, e:
254 def addVerifyFile(self, serviceinfo_node, filename):
257 f = open(filename, 'r')
258 digest = hashlib.sha256(f.read()).hexdigest()
262 s = ET.Element( "service", name="verify_file" )
263 ET.SubElement(s, "param", name="file").text = filename
264 ET.SubElement(s, "param", name="verifier").text = "sha256"
265 ET.SubElement(s, "param", name="checksum").text = digest
271 def addDownloadUrl(self, serviceinfo_node, url_string):
272 from urlparse import urlparse
273 url = urlparse( url_string )
274 protocol = url.scheme
279 s = ET.Element( "service", name="download_url" )
280 ET.SubElement(s, "param", name="protocol").text = protocol
281 ET.SubElement(s, "param", name="host").text = host
282 ET.SubElement(s, "param", name="path").text = path
287 def addGitUrl(self, serviceinfo_node, url_string):
289 s = ET.Element( "service", name="tar_scm" )
290 ET.SubElement(s, "param", name="url").text = url_string
291 ET.SubElement(s, "param", name="scm").text = "git"
295 def addRecompressTar(self, serviceinfo_node):
297 s = ET.Element( "service", name="recompress" )
298 ET.SubElement(s, "param", name="file").text = "*.tar"
299 ET.SubElement(s, "param", name="compression").text = "bz2"
303 def execute(self, dir, callmode = None, singleservice = None):
306 # cleanup existing generated files
307 for filename in os.listdir(dir):
308 if filename.startswith('_service:') or filename.startswith('_service_'):
309 os.unlink(os.path.join(dir, filename))
311 allservices = self.services or []
312 if singleservice and not singleservice in allservices:
313 # set array to the manual specified singleservice, if it is not part of _service file
314 data = { 'name' : singleservice, 'command' : singleservice, 'mode' : '' }
318 for service in allservices:
319 if singleservice and service['name'] != singleservice:
321 if service['mode'] == "disabled" and callmode != "disabled":
323 if service['mode'] != "disabled" and callmode == "disabled":
325 call = service['command']
326 temp_dir = tempfile.mkdtemp()
327 name = call.split(None, 1)[0]
328 if not os.path.exists("/usr/lib/obs/service/"+name):
329 msg = "ERROR: service is not installed!\n"
330 msg += "Maybe try this: zypper in obs-service-" + name
331 raise oscerr.APIError(msg)
332 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
333 if conf.config['verbose'] > 1:
334 print "Run source service:", c
335 ret = subprocess.call(c, shell=True)
337 print "ERROR: service call failed: " + c
338 # FIXME: addDownloadUrlService calls si.execute after
339 # updating _services.
340 print " (your _services file may be corrupt now)"
342 if service['mode'] == "trylocal" or service['mode'] == "localonly" or callmode == "local":
343 for filename in os.listdir(temp_dir):
344 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, filename) )
346 for filename in os.listdir(temp_dir):
347 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, "_service:"+name+":"+filename) )
351 """linkinfo metadata (which is part of the xml representing a directory
354 """creates an empty linkinfo instance"""
364 def read(self, linkinfo_node):
365 """read in the linkinfo metadata from the <linkinfo> element passed as
367 If the passed element is None, the method does nothing.
369 if linkinfo_node == None:
371 self.project = linkinfo_node.get('project')
372 self.package = linkinfo_node.get('package')
373 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
374 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
375 self.srcmd5 = linkinfo_node.get('srcmd5')
376 self.error = linkinfo_node.get('error')
377 self.rev = linkinfo_node.get('rev')
378 self.baserev = linkinfo_node.get('baserev')
381 """returns True if the linkinfo is not empty, otherwise False"""
382 if self.xsrcmd5 or self.lsrcmd5:
386 def isexpanded(self):
387 """returns True if the package is an expanded link"""
388 if self.lsrcmd5 and not self.xsrcmd5:
393 """returns True if the link is in error state (could not be applied)"""
399 """return an informatory string representation"""
400 if self.islink() and not self.isexpanded():
401 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
402 % (self.project, self.package, self.xsrcmd5, self.rev)
403 elif self.islink() and self.isexpanded():
405 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
406 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
408 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
409 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
414 # http://effbot.org/zone/element-lib.htm#prettyprint
415 def xmlindent(elem, level=0):
418 if not elem.text or not elem.text.strip():
421 xmlindent(e, level+1)
422 if not e.tail or not e.tail.strip():
424 if not e.tail or not e.tail.strip():
427 if level and (not elem.tail or not elem.tail.strip()):
431 """represent a project directory, holding packages"""
432 REQ_STOREFILES = ('_project', '_apiurl')
433 if conf.config['do_package_tracking']:
434 REQ_STOREFILES += ('_packages',)
435 def __init__(self, dir, getPackageList=True, progress_obj=None, wc_check=True):
438 self.absdir = os.path.abspath(dir)
439 self.progress_obj = progress_obj
441 self.name = store_read_project(self.dir)
442 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
446 dirty_files = self.wc_check()
448 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
449 'Please run \'osc repairwc %s\' and check the state\n' \
450 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
451 raise oscerr.WorkingCopyInconsistent(self.name, None, dirty_files, msg)
454 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
456 self.pacs_available = []
458 if conf.config['do_package_tracking']:
459 self.pac_root = self.read_packages().getroot()
460 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
461 self.pacs_excluded = [ i for i in os.listdir(self.dir)
462 for j in conf.config['exclude_glob']
463 if fnmatch.fnmatch(i, j) ]
464 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
465 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
466 # in the self.pacs_broken list
467 self.pacs_broken = []
468 for p in self.pacs_have:
469 if not os.path.isdir(os.path.join(self.absdir, p)):
470 # all states will be replaced with the '!'-state
471 # (except it is already marked as deleted ('D'-state))
472 self.pacs_broken.append(p)
474 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
476 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
481 for fname in Project.REQ_STOREFILES:
482 if not os.path.exists(os.path.join(self.absdir, store, fname)):
483 dirty_files.append(fname)
486 def wc_repair(self, apiurl=None):
488 if not os.path.exists(os.path.join(self.dir, store, '_apiurl')) or apiurl:
490 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
491 'no \'apiurl\' was passed to wc_repair'
492 # hmm should we raise oscerr.WrongArgs?
493 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
495 conf.parse_apisrv_url(None, apiurl)
496 store_write_apiurl(self.dir, apiurl)
497 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
499 def checkout_missing_pacs(self, expand_link=False):
500 for pac in self.pacs_missing:
502 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
503 # pac is not under version control but a local file/dir exists
504 msg = 'can\'t add package \'%s\': Object already exists' % pac
505 raise oscerr.PackageExists(self.name, pac, msg)
507 print 'checking out new package %s' % pac
508 checkout_package(self.apiurl, self.name, pac, \
509 pathname=getTransActPath(os.path.join(self.dir, pac)), \
510 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
512 def status(self, pac):
513 exists = os.path.exists(os.path.join(self.absdir, pac))
514 st = self.get_state(pac)
515 if st is None and exists:
518 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % pac)
519 elif st in ('A', ' ') and not exists:
521 elif st == 'D' and not exists:
526 def get_status(self, *exclude_states):
528 for pac in self.pacs_have:
529 st = self.status(pac)
530 if not st in exclude_states:
531 res.append((st, pac))
532 if not '?' in exclude_states:
533 res.extend([('?', pac) for pac in self.pacs_unvers])
536 def get_pacobj(self, pac, *pac_args, **pac_kwargs):
538 st = self.status(pac)
539 if st in ('?', '!') or st == 'D' and not os.path.exists(os.path.join(self.dir, pac)):
541 return Package(os.path.join(self.dir, pac), *pac_args, **pac_kwargs)
542 except oscerr.OscIOError:
545 def set_state(self, pac, state):
546 node = self.get_package_node(pac)
548 self.new_package_entry(pac, state)
550 node.set('state', state)
552 def get_package_node(self, pac):
553 for node in self.pac_root.findall('package'):
554 if pac == node.get('name'):
558 def del_package_node(self, pac):
559 for node in self.pac_root.findall('package'):
560 if pac == node.get('name'):
561 self.pac_root.remove(node)
563 def get_state(self, pac):
564 node = self.get_package_node(pac)
566 return node.get('state')
570 def new_package_entry(self, name, state):
571 ET.SubElement(self.pac_root, 'package', name=name, state=state)
573 def read_packages(self):
576 packages_file = os.path.join(self.absdir, store, '_packages')
577 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
578 return ET.parse(packages_file)
580 # scan project for existing packages and migrate them
582 for data in os.listdir(self.dir):
583 pac_dir = os.path.join(self.absdir, data)
584 # we cannot use self.pacs_available because we cannot guarantee that the package list
585 # was fetched from the server
586 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
587 and Package(pac_dir).name == data:
588 cur_pacs.append(ET.Element('package', name=data, state=' '))
589 store_write_initial_packages(self.absdir, self.name, cur_pacs)
590 return ET.parse(os.path.join(self.absdir, store, '_packages'))
592 def write_packages(self):
593 xmlindent(self.pac_root)
594 store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root))
596 def addPackage(self, pac):
598 for i in conf.config['exclude_glob']:
599 if fnmatch.fnmatch(pac, i):
600 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
601 raise oscerr.OscIOError(None, msg)
602 state = self.get_state(pac)
603 if state == None or state == 'D':
604 self.new_package_entry(pac, 'A')
605 self.write_packages()
606 # sometimes the new pac doesn't exist in the list because
607 # it would take too much time to update all data structs regularly
608 if pac in self.pacs_unvers:
609 self.pacs_unvers.remove(pac)
611 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
613 def delPackage(self, pac, force = False):
614 state = self.get_state(pac.name)
616 if state == ' ' or state == 'D':
618 for filename in pac.filenamelist + pac.filenamelist_unvers:
619 filestate = pac.status(filename)
620 if filestate == 'M' or filestate == 'C' or \
621 filestate == 'A' or filestate == '?':
624 del_files.append(filename)
625 if can_delete or force:
626 for filename in del_files:
627 pac.delete_localfile(filename)
628 if pac.status(filename) != '?':
629 pac.delete_storefile(filename)
630 # this is not really necessary
631 pac.put_on_deletelist(filename)
632 print statfrmt('D', getTransActPath(os.path.join(pac.dir, filename)))
633 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
634 pac.write_deletelist()
635 self.set_state(pac.name, 'D')
636 self.write_packages()
638 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
641 delete_dir(pac.absdir)
642 self.del_package_node(pac.name)
643 self.write_packages()
644 print statfrmt('D', pac.name)
646 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
648 print 'package is not under version control'
650 print 'unsupported state'
652 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
655 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
657 # we need to make sure that the _packages file will be written (even if an exception
660 # update complete project
661 # packages which no longer exists upstream
662 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
664 for pac in upstream_del:
665 p = Package(os.path.join(self.dir, pac))
666 self.delPackage(p, force = True)
667 delete_storedir(p.storedir)
672 self.pac_root.remove(self.get_package_node(p.name))
673 self.pacs_have.remove(pac)
675 for pac in self.pacs_have:
676 state = self.get_state(pac)
677 if pac in self.pacs_broken:
678 if self.get_state(pac) != 'A':
679 checkout_package(self.apiurl, self.name, pac,
680 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
681 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
684 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
686 if expand_link and p.islink() and not p.isexpanded():
689 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
691 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
694 rev = p.linkinfo.xsrcmd5
695 print 'Expanding to rev', rev
696 elif unexpand_link and p.islink() and p.isexpanded():
697 rev = p.linkinfo.lsrcmd5
698 print 'Unexpanding to rev', rev
699 elif p.islink() and p.isexpanded():
701 print 'Updating %s' % p.name
702 p.update(rev, service_files)
706 # TODO: Package::update has to fixed to behave like svn does
707 if pac in self.pacs_broken:
708 checkout_package(self.apiurl, self.name, pac,
709 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
710 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
712 Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj).update()
713 elif state == 'A' and pac in self.pacs_available:
714 # file/dir called pac already exists and is under version control
715 msg = 'can\'t add package \'%s\': Object already exists' % pac
716 raise oscerr.PackageExists(self.name, pac, msg)
721 print 'unexpected state.. package \'%s\'' % pac
723 self.checkout_missing_pacs(expand_link=not unexpand_link)
725 self.write_packages()
727 def validate_pacs(self, validators, verbose_validation=False, *pacs):
729 for pac in self.pacs_broken:
730 if self.get_state(pac) != 'D':
731 msg = 'validation failed: package \'%s\' is missing' % pac
732 raise oscerr.PackageMissing(self.name, pac, msg)
733 pacs = self.pacs_have
735 if pac in self.pacs_broken and self.get_state(pac) != 'D':
736 msg = 'validation failed: package \'%s\' is missing' % pac
737 raise oscerr.PackageMissing(self.name, pac, msg)
738 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
741 p = Package(os.path.join(self.dir, pac))
742 p.validate(validators, verbose_validation)
744 def commit(self, pacs = (), msg = '', files = {}, validators_dir = None, verbose_validation = False):
749 if files.has_key(pac):
751 state = self.get_state(pac)
753 self.commitNewPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
755 self.commitDelPackage(pac)
757 # display the correct dir when sending the changes
758 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
761 p = Package(os.path.join(self.dir, pac))
763 p.commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
764 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
765 print 'osc: \'%s\' is not under version control' % pac
766 elif pac in self.pacs_broken:
767 print 'osc: \'%s\' package not found' % pac
769 self.commitExtPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
771 self.write_packages()
773 # if we have packages marked as '!' we cannot commit
774 for pac in self.pacs_broken:
775 if self.get_state(pac) != 'D':
776 msg = 'commit failed: package \'%s\' is missing' % pac
777 raise oscerr.PackageMissing(self.name, pac, msg)
779 for pac in self.pacs_have:
780 state = self.get_state(pac)
783 Package(os.path.join(self.dir, pac)).commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
785 self.commitDelPackage(pac)
787 self.commitNewPackage(pac, msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
789 self.write_packages()
791 def commitNewPackage(self, pac, msg = '', files = [], validators_dir = None, verbose_validation = False):
792 """creates and commits a new package if it does not exist on the server"""
793 if pac in self.pacs_available:
794 print 'package \'%s\' already exists' % pac
796 user = conf.get_apiurl_usr(self.apiurl)
797 edit_meta(metatype='pkg',
798 path_args=(quote_plus(self.name), quote_plus(pac)),
803 # display the correct dir when sending the changes
805 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
809 p = Package(os.path.join(self.dir, pac))
811 print statfrmt('Sending', os.path.normpath(p.dir))
812 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
813 self.set_state(pac, ' ')
816 def commitDelPackage(self, pac):
817 """deletes a package on the server and in the working copy"""
819 # display the correct dir when sending the changes
820 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
823 pac_dir = os.path.join(self.dir, pac)
824 p = Package(os.path.join(self.dir, pac))
825 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
826 delete_storedir(p.storedir)
832 pac_dir = os.path.join(self.dir, pac)
833 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
834 print statfrmt('Deleting', getTransActPath(pac_dir))
835 delete_package(self.apiurl, self.name, pac)
836 self.del_package_node(pac)
838 def commitExtPackage(self, pac, msg, files = [], validators_dir=None, verbose_validation=False):
839 """commits a package from an external project"""
840 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
843 pac_path = os.path.join(self.dir, pac)
845 project = store_read_project(pac_path)
846 package = store_read_package(pac_path)
847 apiurl = store_read_apiurl(pac_path, defaulturl=False)
848 if not meta_exists(metatype='pkg',
849 path_args=(quote_plus(project), quote_plus(package)),
850 template_args=None, create_new=False, apiurl=apiurl):
851 user = conf.get_apiurl_usr(self.apiurl)
852 edit_meta(metatype='pkg',
853 path_args=(quote_plus(project), quote_plus(package)),
854 template_args=({'name': pac, 'user': user}), apiurl=apiurl)
855 p = Package(pac_path)
857 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
861 r.append('*****************************************************')
862 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
863 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
864 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
865 r.append('*****************************************************')
869 def init_project(apiurl, dir, project, package_tracking=True, getPackageList=True, progress_obj=None, wc_check=True):
872 if not os.path.exists(dir):
873 # use makedirs (checkout_no_colon config option might be enabled)
875 elif not os.path.isdir(dir):
876 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
877 if os.path.exists(os.path.join(dir, store)):
878 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
880 os.mkdir(os.path.join(dir, store))
882 store_write_project(dir, project)
883 store_write_apiurl(dir, apiurl)
885 store_write_initial_packages(dir, project, [])
886 return Project(dir, getPackageList, progress_obj, wc_check)
890 """represent a package (its directory) and read/keep/write its metadata"""
892 # should _meta be a required file?
893 REQ_STOREFILES = ('_project', '_package', '_apiurl', '_files', '_osclib_version')
894 OPT_STOREFILES = ('_to_be_added', '_to_be_deleted', '_in_conflict', '_in_update',
895 '_in_commit', '_meta', '_meta_mode', '_frozenlink', '_pulled', '_linkrepair',
896 '_size_limit', '_commit_msg')
898 def __init__(self, workingdir, progress_obj=None, size_limit=None, wc_check=True):
901 self.dir = workingdir
902 self.absdir = os.path.abspath(self.dir)
903 self.storedir = os.path.join(self.absdir, store)
904 self.progress_obj = progress_obj
905 self.size_limit = size_limit
906 if size_limit and size_limit == 0:
907 self.size_limit = None
909 check_store_version(self.dir)
911 self.prjname = store_read_project(self.dir)
912 self.name = store_read_package(self.dir)
913 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
915 self.update_datastructs()
918 dirty_files = self.wc_check()
920 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
921 'Please run \'osc repairwc %s\' (Note this might _remove_\n' \
922 'files from the .osc/ dir). Please check the state\n' \
923 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
924 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, dirty_files, msg)
930 for fname in self.filenamelist:
931 if not os.path.exists(os.path.join(self.storedir, fname)) and not fname in self.skipped:
932 dirty_files.append(fname)
933 for fname in Package.REQ_STOREFILES:
934 if not os.path.isfile(os.path.join(self.storedir, fname)):
935 dirty_files.append(fname)
936 for fname in os.listdir(self.storedir):
937 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
938 fname.startswith('_build'):
940 elif fname in self.filenamelist and fname in self.skipped:
941 dirty_files.append(fname)
942 elif not fname in self.filenamelist:
943 dirty_files.append(fname)
944 for fname in self.to_be_deleted[:]:
945 if not fname in self.filenamelist:
946 dirty_files.append(fname)
947 for fname in self.in_conflict[:]:
948 if not fname in self.filenamelist:
949 dirty_files.append(fname)
952 def wc_repair(self, apiurl=None):
953 if not os.path.exists(os.path.join(self.storedir, '_apiurl')) or apiurl:
955 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
956 'no \'apiurl\' was passed to wc_repair'
957 # hmm should we raise oscerr.WrongArgs?
958 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
960 conf.parse_apisrv_url(None, apiurl)
961 store_write_apiurl(self.dir, apiurl)
962 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
963 # all files which are present in the filelist have to exist in the storedir
964 for f in self.filelist:
965 # XXX: should we also check the md5?
966 if not os.path.exists(os.path.join(self.storedir, f.name)) and not f.name in self.skipped:
967 # if get_source_file fails we're screwed up...
968 get_source_file(self.apiurl, self.prjname, self.name, f.name,
969 targetfilename=os.path.join(self.storedir, f.name), revision=self.rev,
971 for fname in os.listdir(self.storedir):
972 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
973 fname.startswith('_build'):
975 elif not fname in self.filenamelist or fname in self.skipped:
976 # this file does not belong to the storedir so remove it
977 os.unlink(os.path.join(self.storedir, fname))
978 for fname in self.to_be_deleted[:]:
979 if not fname in self.filenamelist:
980 self.to_be_deleted.remove(fname)
981 self.write_deletelist()
982 for fname in self.in_conflict[:]:
983 if not fname in self.filenamelist:
984 self.in_conflict.remove(fname)
985 self.write_conflictlist()
988 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
989 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
992 def addfile(self, n):
993 if not os.path.exists(os.path.join(self.absdir, n)):
994 raise oscerr.OscIOError(None, 'error: file \'%s\' does not exist' % n)
995 if n in self.to_be_deleted:
996 self.to_be_deleted.remove(n)
997 # self.delete_storefile(n)
998 self.write_deletelist()
999 elif n in self.filenamelist or n in self.to_be_added:
1000 raise oscerr.PackageFileConflict(self.prjname, self.name, n, 'osc: warning: \'%s\' is already under version control' % n)
1001 # shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
1003 pathname = os.path.join(self.dir, n)
1006 self.to_be_added.append(n)
1007 self.write_addlist()
1008 print statfrmt('A', pathname)
1010 def delete_file(self, n, force=False):
1011 """deletes a file if possible and marks the file as deleted"""
1014 state = self.status(n)
1015 except IOError, ioe:
1018 if state in ['?', 'A', 'M', 'R', 'C'] and not force:
1019 return (False, state)
1020 # special handling for skipped files: if file exists, simply delete it
1022 exists = os.path.exists(os.path.join(self.dir, n))
1023 self.delete_localfile(n)
1024 return (exists, 'S')
1026 self.delete_localfile(n)
1027 was_added = n in self.to_be_added
1028 if state in ('A', 'R') or state == '!' and was_added:
1029 self.to_be_added.remove(n)
1030 self.write_addlist()
1032 # don't remove "merge files" (*.r, *.mine...)
1033 # that's why we don't use clear_from_conflictlist
1034 self.in_conflict.remove(n)
1035 self.write_conflictlist()
1036 if not state in ('A', '?') and not (state == '!' and was_added):
1037 self.put_on_deletelist(n)
1038 self.write_deletelist()
1039 return (True, state)
1041 def delete_storefile(self, n):
1042 try: os.unlink(os.path.join(self.storedir, n))
1045 def delete_localfile(self, n):
1046 try: os.unlink(os.path.join(self.dir, n))
1049 def put_on_deletelist(self, n):
1050 if n not in self.to_be_deleted:
1051 self.to_be_deleted.append(n)
1053 def put_on_conflictlist(self, n):
1054 if n not in self.in_conflict:
1055 self.in_conflict.append(n)
1057 def put_on_addlist(self, n):
1058 if n not in self.to_be_added:
1059 self.to_be_added.append(n)
1061 def clear_from_conflictlist(self, n):
1062 """delete an entry from the file, and remove the file if it would be empty"""
1063 if n in self.in_conflict:
1065 filename = os.path.join(self.dir, n)
1066 storefilename = os.path.join(self.storedir, n)
1067 myfilename = os.path.join(self.dir, n + '.mine')
1068 if self.islinkrepair() or self.ispulled():
1069 upfilename = os.path.join(self.dir, n + '.new')
1071 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1074 os.unlink(myfilename)
1075 # the working copy may be updated, so the .r* ending may be obsolete...
1076 # then we don't care
1077 os.unlink(upfilename)
1078 if self.islinkrepair() or self.ispulled():
1079 os.unlink(os.path.join(self.dir, n + '.old'))
1083 self.in_conflict.remove(n)
1085 self.write_conflictlist()
1087 # XXX: this isn't used at all
1088 def write_meta_mode(self):
1089 # XXX: the "elif" is somehow a contradiction (with current and the old implementation
1090 # it's not possible to "leave" the metamode again) (except if you modify pac.meta
1091 # which is really ugly:) )
1093 store_write_string(self.absdir, '_meta_mode', '')
1094 elif self.ismetamode():
1095 os.unlink(os.path.join(self.storedir, '_meta_mode'))
1097 def write_sizelimit(self):
1098 if self.size_limit and self.size_limit <= 0:
1100 os.unlink(os.path.join(self.storedir, '_size_limit'))
1104 store_write_string(self.absdir, '_size_limit', str(self.size_limit) + '\n')
1106 def write_addlist(self):
1107 self.__write_storelist('_to_be_added', self.to_be_added)
1109 def write_deletelist(self):
1110 self.__write_storelist('_to_be_deleted', self.to_be_deleted)
1112 def delete_source_file(self, n):
1113 """delete local a source file"""
1114 self.delete_localfile(n)
1115 self.delete_storefile(n)
1117 def delete_remote_source_file(self, n):
1118 """delete a remote source file (e.g. from the server)"""
1119 query = 'rev=upload'
1120 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1123 def put_source_file(self, n, copy_only=False):
1124 cdir = os.path.join(self.storedir, '_in_commit')
1126 if not os.path.isdir(cdir):
1128 query = 'rev=repository'
1129 tmpfile = os.path.join(cdir, n)
1130 shutil.copyfile(os.path.join(self.dir, n), tmpfile)
1131 # escaping '+' in the URL path (note: not in the URL query string) is
1132 # only a workaround for ruby on rails, which swallows it otherwise
1134 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1135 http_PUT(u, file = os.path.join(self.dir, n))
1136 os.rename(tmpfile, os.path.join(self.storedir, n))
1138 if os.path.isdir(cdir):
1140 if n in self.to_be_added:
1141 self.to_be_added.remove(n)
1143 def __generate_commitlist(self, todo_send):
1144 root = ET.Element('directory')
1145 keys = todo_send.keys()
1148 ET.SubElement(root, 'entry', name=i, md5=todo_send[i])
1151 def __send_commitlog(self, msg, local_filelist):
1152 """send the commitlog and the local filelist to the server"""
1153 query = {'cmd' : 'commitfilelist',
1154 'user' : conf.get_apiurl_usr(self.apiurl),
1156 if self.islink() and self.isexpanded():
1157 query['keeplink'] = '1'
1158 if conf.config['linkcontrol'] or self.isfrozen():
1159 query['linkrev'] = self.linkinfo.srcmd5
1161 query['repairlink'] = '1'
1162 query['linkrev'] = self.get_pulled_srcmd5()
1163 if self.islinkrepair():
1164 query['repairlink'] = '1'
1165 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
1166 f = http_POST(u, data=ET.tostring(local_filelist))
1167 root = ET.parse(f).getroot()
1170 def __get_todo_send(self, server_filelist):
1171 """parse todo from a previous __send_commitlog call"""
1172 error = server_filelist.get('error')
1175 elif error != 'missing':
1176 raise oscerr.PackageInternalError(self.prjname, self.name,
1177 '__get_todo_send: unexpected \'error\' attr: \'%s\'' % error)
1179 for n in server_filelist.findall('entry'):
1180 name = n.get('name')
1182 raise oscerr.APIError('missing \'name\' attribute:\n%s\n' % ET.tostring(server_filelist))
1183 todo.append(n.get('name'))
1186 def validate(self, validators_dir, verbose_validation=False):
1189 if validators_dir is None or self.name.startswith('_'):
1191 for validator in sorted(os.listdir(validators_dir)):
1192 if validator.startswith('.'):
1194 fn = os.path.join(validators_dir, validator)
1195 mode = os.stat(fn).st_mode
1196 if stat.S_ISREG(mode):
1197 if verbose_validation:
1198 print 'osc runs source validator: %s' % fn
1199 p = subprocess.Popen([fn, '--verbose'], close_fds=True)
1201 p = subprocess.Popen([fn], close_fds=True)
1203 raise oscerr.ExtRuntimeError('ERROR: source_validator failed:\n%s' % p.stdout, validator)
1205 def commit(self, msg='', validators_dir=None, verbose_validation=False):
1206 # commit only if the upstream revision is the same as the working copy's
1207 upstream_rev = self.latest_rev()
1208 if self.rev != upstream_rev:
1209 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
1211 if not validators_dir is None:
1212 self.validate(validators_dir, verbose_validation)
1215 self.todo = [i for i in self.to_be_added if not i in self.filenamelist] + self.filenamelist
1217 pathn = getTransActPath(self.dir)
1222 for filename in self.filenamelist + [i for i in self.to_be_added if not i in self.filenamelist]:
1223 if filename.startswith('_service:') or filename.startswith('_service_'):
1225 st = self.status(filename)
1227 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
1229 elif filename in self.todo:
1230 if st in ('A', 'R', 'M'):
1231 todo_send[filename] = dgst(os.path.join(self.absdir, filename))
1232 real_send.append(filename)
1233 print statfrmt('Sending', os.path.join(pathn, filename))
1234 elif st in (' ', '!', 'S'):
1235 if st == '!' and filename in self.to_be_added:
1236 print 'file \'%s\' is marked as \'A\' but does not exist' % filename
1238 f = self.findfilebyname(filename)
1240 raise oscerr.PackageInternalError(self.prjname, self.name,
1241 'error: file \'%s\' with state \'%s\' is not known by meta' \
1243 todo_send[filename] = f.md5
1245 todo_delete.append(filename)
1246 print statfrmt('Deleting', os.path.join(pathn, filename))
1247 elif st in ('R', 'M', 'D', ' ', '!', 'S'):
1248 # ignore missing new file (it's not part of the current commit)
1249 if st == '!' and filename in self.to_be_added:
1251 f = self.findfilebyname(filename)
1253 raise oscerr.PackageInternalError(self.prjname, self.name,
1254 'error: file \'%s\' with state \'%s\' is not known by meta' \
1256 todo_send[filename] = f.md5
1258 if not real_send and not todo_delete and not self.islinkrepair() and not self.ispulled():
1259 print 'nothing to do for package %s' % self.name
1262 print 'Transmitting file data ',
1263 filelist = self.__generate_commitlist(todo_send)
1264 sfilelist = self.__send_commitlog(msg, filelist)
1265 send = self.__get_todo_send(sfilelist)
1266 real_send = [i for i in real_send if not i in send]
1267 # abort after 3 tries
1269 while len(send) and tries:
1270 for filename in send[:]:
1271 sys.stdout.write('.')
1273 self.put_source_file(filename)
1274 send.remove(filename)
1276 sfilelist = self.__send_commitlog(msg, filelist)
1277 send = self.__get_todo_send(sfilelist)
1279 raise oscerr.PackageInternalError(self.prjname, self.name,
1280 'server does not accept filelist:\n%s\nmissing:\n%s\n' \
1281 % (ET.tostring(filelist), ET.tostring(sfilelist)))
1282 # these files already exist on the server
1283 # just copy them into the storedir
1284 for filename in real_send:
1285 self.put_source_file(filename, copy_only=True)
1287 self.rev = sfilelist.get('rev')
1289 print 'Committed revision %s.' % self.rev
1292 os.unlink(os.path.join(self.storedir, '_pulled'))
1293 if self.islinkrepair():
1294 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1295 self.linkrepair = False
1296 # XXX: mark package as invalid?
1297 print 'The source link has been repaired. This directory can now be removed.'
1299 if self.islink() and self.isexpanded():
1301 li.read(sfilelist.find('linkinfo'))
1302 if li.xsrcmd5 is None:
1303 raise oscerr.APIError('linkinfo has no xsrcmd5 attr:\n%s\n' % ET.tostring(sfilelist))
1304 sfilelist = ET.fromstring(self.get_files_meta(revision=li.xsrcmd5))
1305 for i in sfilelist.findall('entry'):
1306 if i.get('name') in self.skipped:
1307 i.set('skipped', 'true')
1308 store_write_string(self.absdir, '_files', ET.tostring(sfilelist) + '\n')
1309 for filename in todo_delete:
1310 self.to_be_deleted.remove(filename)
1311 self.delete_storefile(filename)
1312 self.write_deletelist()
1313 self.write_addlist()
1314 self.update_datastructs()
1316 print_request_list(self.apiurl, self.prjname, self.name)
1318 def __write_storelist(self, name, data):
1321 os.unlink(os.path.join(self.storedir, name))
1325 store_write_string(self.absdir, name, '%s\n' % '\n'.join(data))
1327 def write_conflictlist(self):
1328 self.__write_storelist('_in_conflict', self.in_conflict)
1330 def updatefile(self, n, revision, mtime=None):
1331 filename = os.path.join(self.dir, n)
1332 storefilename = os.path.join(self.storedir, n)
1333 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1334 origfile = os.path.join(self.storedir, '_in_update', n)
1335 if os.path.isfile(filename):
1336 shutil.copyfile(filename, origfile_tmp)
1337 os.rename(origfile_tmp, origfile)
1341 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=storefilename,
1342 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1344 shutil.copyfile(storefilename, filename)
1345 if not origfile is None:
1348 def mergefile(self, n, revision, mtime=None):
1349 filename = os.path.join(self.dir, n)
1350 storefilename = os.path.join(self.storedir, n)
1351 myfilename = os.path.join(self.dir, n + '.mine')
1352 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1353 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1354 origfile = os.path.join(self.storedir, '_in_update', n)
1355 shutil.copyfile(filename, origfile_tmp)
1356 os.rename(origfile_tmp, origfile)
1357 os.rename(filename, myfilename)
1359 get_source_file(self.apiurl, self.prjname, self.name, n,
1360 revision=revision, targetfilename=upfilename,
1361 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1363 if binary_file(myfilename) or binary_file(upfilename):
1365 shutil.copyfile(upfilename, filename)
1366 shutil.copyfile(upfilename, storefilename)
1368 self.in_conflict.append(n)
1369 self.write_conflictlist()
1373 # diff3 OPTIONS... MINE OLDER YOURS
1374 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1375 # we would rather use the subprocess module, but it is not availablebefore 2.4
1376 ret = subprocess.call(merge_cmd, shell=True)
1378 # "An exit status of 0 means `diff3' was successful, 1 means some
1379 # conflicts were found, and 2 means trouble."
1381 # merge was successful... clean up
1382 shutil.copyfile(upfilename, storefilename)
1383 os.unlink(upfilename)
1384 os.unlink(myfilename)
1388 # unsuccessful merge
1389 shutil.copyfile(upfilename, storefilename)
1391 self.in_conflict.append(n)
1392 self.write_conflictlist()
1395 raise oscerr.ExtRuntimeError('diff3 failed with exit code: %s' % ret, merge_cmd)
1397 def update_local_filesmeta(self, revision=None):
1399 Update the local _files file in the store.
1400 It is replaced with the version pulled from upstream.
1402 meta = self.get_files_meta(revision=revision)
1403 store_write_string(self.absdir, '_files', meta + '\n')
1405 def get_files_meta(self, revision='latest', skip_service=True):
1406 fm = show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, meta=self.meta)
1407 # look for "too large" files according to size limit and mark them
1408 root = ET.fromstring(fm)
1409 for e in root.findall('entry'):
1410 size = e.get('size')
1411 if size and self.size_limit and int(size) > self.size_limit \
1412 or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
1413 e.set('skipped', 'true')
1414 return ET.tostring(root)
1416 def update_datastructs(self):
1418 Update the internal data structures if the local _files
1419 file has changed (e.g. update_local_filesmeta() has been
1423 files_tree = read_filemeta(self.dir)
1424 files_tree_root = files_tree.getroot()
1426 self.rev = files_tree_root.get('rev')
1427 self.srcmd5 = files_tree_root.get('srcmd5')
1429 self.linkinfo = Linkinfo()
1430 self.linkinfo.read(files_tree_root.find('linkinfo'))
1432 self.filenamelist = []
1435 for node in files_tree_root.findall('entry'):
1437 f = File(node.get('name'),
1439 int(node.get('size')),
1440 int(node.get('mtime')))
1441 if node.get('skipped'):
1442 self.skipped.append(f.name)
1445 # okay, a very old version of _files, which didn't contain any metadata yet...
1446 f = File(node.get('name'), '', 0, 0)
1447 self.filelist.append(f)
1448 self.filenamelist.append(f.name)
1450 self.to_be_added = read_tobeadded(self.absdir)
1451 self.to_be_deleted = read_tobedeleted(self.absdir)
1452 self.in_conflict = read_inconflict(self.absdir)
1453 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1454 self.size_limit = read_sizelimit(self.dir)
1455 self.meta = self.ismetamode()
1457 # gather unversioned files, but ignore some stuff
1459 for i in os.listdir(self.dir):
1460 for j in conf.config['exclude_glob']:
1461 if fnmatch.fnmatch(i, j):
1462 self.excluded.append(i)
1464 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1465 if i not in self.excluded
1466 if i not in self.filenamelist ]
1469 """tells us if the package is a link (has 'linkinfo').
1470 A package with linkinfo is a package which links to another package.
1471 Returns True if the package is a link, otherwise False."""
1472 return self.linkinfo.islink()
1474 def isexpanded(self):
1475 """tells us if the package is a link which is expanded.
1476 Returns True if the package is expanded, otherwise False."""
1477 return self.linkinfo.isexpanded()
1479 def islinkrepair(self):
1480 """tells us if we are repairing a broken source link."""
1481 return self.linkrepair
1484 """tells us if we have pulled a link."""
1485 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1488 """tells us if the link is frozen."""
1489 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1491 def ismetamode(self):
1492 """tells us if the package is in meta mode"""
1493 return os.path.isfile(os.path.join(self.storedir, '_meta_mode'))
1495 def get_pulled_srcmd5(self):
1497 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1498 pulledrev = line.strip()
1501 def haslinkerror(self):
1503 Returns True if the link is broken otherwise False.
1504 If the package is not a link it returns False.
1506 return self.linkinfo.haserror()
1508 def linkerror(self):
1510 Returns an error message if the link is broken otherwise None.
1511 If the package is not a link it returns None.
1513 return self.linkinfo.error
1515 def update_local_pacmeta(self):
1517 Update the local _meta file in the store.
1518 It is replaced with the version pulled from upstream.
1520 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1521 store_write_string(self.absdir, '_meta', meta + '\n')
1523 def findfilebyname(self, n):
1524 for i in self.filelist:
1528 def get_status(self, excluded=False, *exclude_states):
1532 todo = self.filenamelist + self.to_be_added + \
1533 [i for i in self.filenamelist_unvers if not os.path.isdir(os.path.join(self.absdir, i))]
1535 todo.extend([i for i in self.excluded if i != store])
1538 for fname in sorted(todo):
1539 st = self.status(fname)
1540 if not st in exclude_states:
1541 res.append((st, fname))
1544 def status(self, n):
1548 file storefile file present STATUS
1549 exists exists in _files
1551 x - - 'A' and listed in _to_be_added
1552 x x - 'R' and listed in _to_be_added
1553 x x x ' ' if digest differs: 'M'
1554 and if in conflicts file: 'C'
1556 - x x 'D' and listed in _to_be_deleted
1557 x x x 'D' and listed in _to_be_deleted (e.g. if deleted file was modified)
1558 x x x 'C' and listed in _in_conflict
1559 x - x 'S' and listed in self.skipped
1560 - - x 'S' and listed in self.skipped
1566 known_by_meta = False
1568 exists_in_store = False
1569 if n in self.filenamelist:
1570 known_by_meta = True
1571 if os.path.exists(os.path.join(self.absdir, n)):
1573 if os.path.exists(os.path.join(self.storedir, n)):
1574 exists_in_store = True
1576 if n in self.to_be_deleted:
1578 elif n in self.in_conflict:
1580 elif n in self.skipped:
1582 elif n in self.to_be_added and exists and exists_in_store:
1584 elif n in self.to_be_added and exists:
1586 elif exists and exists_in_store and known_by_meta:
1587 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1591 elif n in self.to_be_added and not exists:
1593 elif not exists and exists_in_store and known_by_meta and not n in self.to_be_deleted:
1595 elif exists and not exists_in_store and not known_by_meta:
1597 elif not exists_in_store and known_by_meta:
1598 # XXX: this codepath shouldn't be reached (we restore the storefile
1599 # in update_datastructs)
1600 raise oscerr.PackageInternalError(self.prjname, self.name,
1601 'error: file \'%s\' is known by meta but no storefile exists.\n'
1602 'This might be caused by an old wc format. Please backup your current\n'
1603 'wc and checkout the package again. Afterwards copy all files (except the\n'
1604 '.osc/ dir) into the new package wc.' % n)
1606 # this case shouldn't happen (except there was a typo in the filename etc.)
1607 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % n)
1611 def get_diff(self, revision=None, ignoreUnversioned=False):
1613 diff_hdr = 'Index: %s\n'
1614 diff_hdr += '===================================================================\n'
1618 def diff_add_delete(fname, add, revision):
1620 diff.append(diff_hdr % fname)
1624 diff.append('--- %s\t(revision 0)\n' % fname)
1626 if revision and not fname in self.to_be_added:
1627 rev = 'working copy'
1628 diff.append('+++ %s\t(%s)\n' % (fname, rev))
1629 fname = os.path.join(self.absdir, fname)
1631 diff.append('--- %s\t(revision %s)\n' % (fname, revision or self.rev))
1632 diff.append('+++ %s\t(working copy)\n' % fname)
1633 fname = os.path.join(self.storedir, fname)
1636 if revision is not None and not add:
1637 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1638 get_source_file(self.apiurl, self.prjname, self.name, origname, tmpfile, revision)
1640 if binary_file(fname):
1645 diff.append('Binary file \'%s\' %s.\n' % (origname, what))
1648 ltmpl = '@@ -0,0 +1,%d @@\n'
1651 ltmpl = '@@ -1,%d +0,0 @@\n'
1652 lines = [tmpl % i for i in open(fname, 'r').readlines()]
1654 diff.append(ltmpl % len(lines))
1655 if not lines[-1].endswith('\n'):
1656 lines.append('\n\\ No newline at end of file\n')
1659 if tmpfile is not None:
1664 if revision is None:
1665 todo = self.todo or [i for i in self.filenamelist if not i in self.to_be_added]+self.to_be_added
1667 if fname in self.to_be_added and self.status(fname) == 'A':
1669 elif fname in self.to_be_deleted:
1670 deleted.append(fname)
1671 elif fname in self.filenamelist:
1672 kept.append(self.findfilebyname(fname))
1673 elif fname in self.to_be_added and self.status(fname) == '!':
1674 raise oscerr.OscIOError(None, 'file \'%s\' is marked as \'A\' but does not exist\n'\
1675 '(either add the missing file or revert it)' % fname)
1676 elif not ignoreUnversioned:
1677 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % fname)
1679 fm = self.get_files_meta(revision=revision)
1680 root = ET.fromstring(fm)
1681 rfiles = self.__get_files(root)
1682 # swap added and deleted
1683 kept, deleted, added, services = self.__get_rev_changes(rfiles)
1684 added = [f.name for f in added]
1685 added.extend([f for f in self.to_be_added if not f in kept])
1686 deleted = [f.name for f in deleted]
1687 deleted.extend(self.to_be_deleted)
1692 # print kept, added, deleted
1694 state = self.status(f.name)
1695 if state in ('S', '?', '!'):
1697 elif state == ' ' and revision is None:
1699 elif revision and self.findfilebyname(f.name).md5 == f.md5 and state != 'M':
1701 yield [diff_hdr % f.name]
1702 if revision is None:
1703 yield get_source_file_diff(self.absdir, f.name, self.rev)
1708 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1709 get_source_file(self.apiurl, self.prjname, self.name, f.name, tmpfile, revision)
1710 diff = get_source_file_diff(self.absdir, f.name, revision,
1711 os.path.basename(tmpfile), os.path.dirname(tmpfile), f.name)
1713 if tmpfile is not None:
1719 yield diff_add_delete(f, True, revision)
1721 yield diff_add_delete(f, False, revision)
1723 def merge(self, otherpac):
1724 self.todo += otherpac.todo
1738 '\n '.join(self.filenamelist),
1746 def read_meta_from_spec(self, spec = None):
1751 # scan for spec files
1752 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1753 if len(speclist) == 1:
1754 specfile = speclist[0]
1755 elif len(speclist) > 1:
1756 print 'the following specfiles were found:'
1757 for filename in speclist:
1759 print 'please specify one with --specfile'
1762 print 'no specfile was found - please specify one ' \
1766 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1767 self.summary = data.get('Summary', '')
1768 self.url = data.get('Url', '')
1769 self.descr = data.get('%description', '')
1772 def update_package_meta(self, force=False):
1774 for the updatepacmetafromspec subcommand
1775 argument force supress the confirm question
1778 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1780 root = ET.fromstring(m)
1781 root.find('title').text = self.summary
1782 root.find('description').text = ''.join(self.descr)
1783 url = root.find('url')
1785 url = ET.SubElement(root, 'url')
1788 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1789 mf = metafile(u, ET.tostring(root))
1792 print '*' * 36, 'old', '*' * 36
1794 print '*' * 36, 'new', '*' * 36
1795 print ET.tostring(root)
1797 repl = raw_input('Write? (y/N/e) ')
1808 def mark_frozen(self):
1809 store_write_string(self.absdir, '_frozenlink', '')
1811 print "The link in this package is currently broken. Checking"
1812 print "out the last working version instead; please use 'osc pull'"
1813 print "to repair the link."
1816 def unmark_frozen(self):
1817 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1818 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1820 def latest_rev(self):
1821 if self.islinkrepair():
1822 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1, meta=self.meta)
1823 elif self.islink() and self.isexpanded():
1824 if self.isfrozen() or self.ispulled():
1825 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1828 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, meta=self.meta)
1831 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1833 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base", meta=self.meta)
1836 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name, meta=self.meta)
1839 def __get_files(self, fmeta_root):
1841 if fmeta_root.get('rev') is None and len(fmeta_root.findall('entry')) > 0:
1842 raise oscerr.APIError('missing rev attribute in _files:\n%s' % ''.join(ET.tostring(fmeta_root)))
1843 for i in fmeta_root.findall('entry'):
1844 skipped = i.get('skipped') is not None
1845 f.append(File(i.get('name'), i.get('md5'),
1846 int(i.get('size')), int(i.get('mtime')), skipped))
1849 def __get_rev_changes(self, revfiles):
1856 revfilenames.append(f.name)
1857 # treat skipped like deleted files
1859 if f.name.startswith('_service:'):
1864 # treat skipped like added files
1865 # problem: this overwrites existing files during the update
1866 # (because skipped files aren't in self.filenamelist_unvers)
1867 if f.name in self.filenamelist and not f.name in self.skipped:
1871 for f in self.filelist:
1872 if not f.name in revfilenames:
1875 return kept, added, deleted, services
1877 def update(self, rev = None, service_files = False, size_limit = None):
1880 # size_limit is only temporary for this update
1881 old_size_limit = self.size_limit
1882 if not size_limit is None:
1883 self.size_limit = int(size_limit)
1884 if os.path.isfile(os.path.join(self.storedir, '_in_update', '_files')):
1885 print 'resuming broken update...'
1886 root = ET.parse(os.path.join(self.storedir, '_in_update', '_files')).getroot()
1887 rfiles = self.__get_files(root)
1888 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1889 # check if we aborted in the middle of a file update
1890 broken_file = os.listdir(os.path.join(self.storedir, '_in_update'))
1891 broken_file.remove('_files')
1892 if len(broken_file) == 1:
1893 origfile = os.path.join(self.storedir, '_in_update', broken_file[0])
1894 wcfile = os.path.join(self.absdir, broken_file[0])
1895 origfile_md5 = dgst(origfile)
1896 origfile_meta = self.findfilebyname(broken_file[0])
1897 if origfile.endswith('.copy'):
1898 # ok it seems we aborted at some point during the copy process
1899 # (copy process == copy wcfile to the _in_update dir). remove file+continue
1901 elif self.findfilebyname(broken_file[0]) is None:
1902 # should we remove this file from _in_update? if we don't
1903 # the user has no chance to continue without removing the file manually
1904 raise oscerr.PackageInternalError(self.prjname, self.name,
1905 '\'%s\' is not known by meta but exists in \'_in_update\' dir')
1906 elif os.path.isfile(wcfile) and dgst(wcfile) != origfile_md5:
1907 (fd, tmpfile) = tempfile.mkstemp(dir=self.absdir, prefix=broken_file[0]+'.')
1909 os.rename(wcfile, tmpfile)
1910 os.rename(origfile, wcfile)
1911 print 'warning: it seems you modified \'%s\' after the broken ' \
1912 'update. Restored original file and saved modified version ' \
1913 'to \'%s\'.' % (wcfile, tmpfile)
1914 elif not os.path.isfile(wcfile):
1915 # this is strange... because it existed before the update. restore it
1916 os.rename(origfile, wcfile)
1918 # everything seems to be ok
1920 elif len(broken_file) > 1:
1921 raise oscerr.PackageInternalError(self.prjname, self.name, 'too many files in \'_in_update\' dir')
1924 if os.path.exists(os.path.join(self.storedir, f.name)):
1925 if dgst(os.path.join(self.storedir, f.name)) == f.md5:
1933 if not service_files:
1935 self.__update(kept, added, deleted, services, ET.tostring(root), root.get('rev'))
1936 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1937 os.rmdir(os.path.join(self.storedir, '_in_update'))
1938 # ok everything is ok (hopefully)...
1939 fm = self.get_files_meta(revision=rev)
1940 root = ET.fromstring(fm)
1941 rfiles = self.__get_files(root)
1942 store_write_string(self.absdir, '_files', fm + '\n', subdir='_in_update')
1943 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1944 if not service_files:
1946 self.__update(kept, added, deleted, services, fm, root.get('rev'))
1947 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1948 if os.path.isdir(os.path.join(self.storedir, '_in_update')):
1949 os.rmdir(os.path.join(self.storedir, '_in_update'))
1950 self.size_limit = old_size_limit
1952 def __update(self, kept, added, deleted, services, fm, rev):
1953 pathn = getTransActPath(self.dir)
1954 # check for conflicts with existing files
1956 if f.name in self.filenamelist_unvers:
1957 raise oscerr.PackageFileConflict(self.prjname, self.name, f.name,
1958 'failed to add file \'%s\' file/dir with the same name already exists' % f.name)
1959 # ok, the update can't fail due to existing files
1961 self.updatefile(f.name, rev, f.mtime)
1962 print statfrmt('A', os.path.join(pathn, f.name))
1964 # if the storefile doesn't exist we're resuming an aborted update:
1965 # the file was already deleted but we cannot know this
1966 # OR we're processing a _service: file (simply keep the file)
1967 if os.path.isfile(os.path.join(self.storedir, f.name)) and self.status(f.name) != 'M':
1968 # if self.status(f.name) != 'M':
1969 self.delete_localfile(f.name)
1970 self.delete_storefile(f.name)
1971 print statfrmt('D', os.path.join(pathn, f.name))
1972 if f.name in self.to_be_deleted:
1973 self.to_be_deleted.remove(f.name)
1974 self.write_deletelist()
1977 state = self.status(f.name)
1978 # print f.name, state
1979 if state == 'M' and self.findfilebyname(f.name).md5 == f.md5:
1980 # remote file didn't change
1983 # try to merge changes
1984 merge_status = self.mergefile(f.name, rev, f.mtime)
1985 print statfrmt(merge_status, os.path.join(pathn, f.name))
1987 self.updatefile(f.name, rev, f.mtime)
1988 print 'Restored \'%s\'' % os.path.join(pathn, f.name)
1990 get_source_file(self.apiurl, self.prjname, self.name, f.name,
1991 targetfilename=os.path.join(self.storedir, f.name), revision=rev,
1992 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
1993 print 'skipping \'%s\' (this is due to conflicts)' % f.name
1994 elif state == 'D' and self.findfilebyname(f.name).md5 != f.md5:
1995 # XXX: in the worst case we might end up with f.name being
1996 # in _to_be_deleted and in _in_conflict... this needs to be checked
1997 if os.path.exists(os.path.join(self.absdir, f.name)):
1998 merge_status = self.mergefile(f.name, rev, f.mtime)
1999 print statfrmt(merge_status, os.path.join(pathn, f.name))
2000 if merge_status == 'C':
2001 # state changes from delete to conflict
2002 self.to_be_deleted.remove(f.name)
2003 self.write_deletelist()
2005 # XXX: we cannot recover this case because we've no file
2007 self.updatefile(f.name, rev, f.mtime)
2008 print statfrmt('U', os.path.join(pathn, f.name))
2009 elif state == ' ' and self.findfilebyname(f.name).md5 != f.md5:
2010 self.updatefile(f.name, rev, f.mtime)
2011 print statfrmt('U', os.path.join(pathn, f.name))
2013 # checkout service files
2015 get_source_file(self.apiurl, self.prjname, self.name, f.name,
2016 targetfilename=os.path.join(self.absdir, f.name), revision=rev,
2017 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
2018 print statfrmt('A', os.path.join(pathn, f.name))
2019 store_write_string(self.absdir, '_files', fm + '\n')
2021 self.update_local_pacmeta()
2022 self.update_datastructs()
2024 print 'At revision %s.' % self.rev
2026 def run_source_services(self, mode=None, singleservice=None):
2027 curdir = os.getcwd()
2028 os.chdir(self.absdir) # e.g. /usr/lib/obs/service/verify_file fails if not inside the project dir.
2030 if self.filenamelist.count('_service') or self.filenamelist_unvers.count('_service'):
2031 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
2033 si.getProjectGlobalServices(self.apiurl, self.prjname, self.name)
2034 si.execute(self.absdir, mode, singleservice)
2037 def prepare_filelist(self):
2038 """Prepare a list of files, which will be processed by process_filelist
2039 method. This allows easy modifications of a file list in commit
2043 self.todo = self.filenamelist + self.filenamelist_unvers
2047 for f in [f for f in self.todo if not os.path.isdir(f)]:
2049 status = self.status(f)
2054 ret += "%s %s %s\n" % (action, status, f)
2057 # Edit a filelist for package \'%s\'
2059 # l, leave = leave a file as is
2060 # r, remove = remove a file
2061 # a, add = add a file
2063 # If you remove file from a list, it will be unchanged
2064 # If you remove all, commit will be aborted""" % self.name
2068 def edit_filelist(self):
2069 """Opens a package list in editor for editing. This allows easy
2070 modifications of it just by simple text editing
2074 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
2075 f = os.fdopen(fd, 'w')
2076 f.write(self.prepare_filelist())
2078 mtime_orig = os.stat(filename).st_mtime
2081 run_editor(filename)
2082 mtime = os.stat(filename).st_mtime
2083 if mtime_orig < mtime:
2084 filelist = open(filename).readlines()
2088 raise oscerr.UserAbort()
2090 return self.process_filelist(filelist)
2092 def process_filelist(self, filelist):
2093 """Process a filelist - it add/remove or leave files. This depends on
2094 user input. If no file is processed, it raises an ValueError
2098 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
2100 foo = line.split(' ')
2102 action, state, name = (foo[0], ' ', foo[3])
2104 action, state, name = (foo[0], foo[1], foo[2])
2107 action = action.lower()
2110 if action in ('r', 'remove'):
2111 if self.status(name) == '?':
2113 if name in self.todo:
2114 self.todo.remove(name)
2116 self.delete_file(name, True)
2117 elif action in ('a', 'add'):
2118 if self.status(name) != '?':
2119 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
2122 elif action in ('l', 'leave'):
2125 raise ValueError("Unknow action `%s'" % action)
2128 raise ValueError("Empty filelist")
2130 def revert(self, filename):
2131 if not filename in self.filenamelist and not filename in self.to_be_added:
2132 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % filename)
2133 elif filename in self.skipped:
2134 raise oscerr.OscIOError(None, 'file \'%s\' is marked as skipped and cannot be reverted' % filename)
2135 if filename in self.filenamelist and not os.path.exists(os.path.join(self.storedir, filename)):
2136 raise oscerr.PackageInternalError('file \'%s\' is listed in filenamelist but no storefile exists' % filename)
2137 state = self.status(filename)
2138 if not (state == 'A' or state == '!' and filename in self.to_be_added):
2139 shutil.copyfile(os.path.join(self.storedir, filename), os.path.join(self.absdir, filename))
2141 self.to_be_deleted.remove(filename)
2142 self.write_deletelist()
2144 self.clear_from_conflictlist(filename)
2145 elif state in ('A', 'R') or state == '!' and filename in self.to_be_added:
2146 self.to_be_added.remove(filename)
2147 self.write_addlist()
2150 def init_package(apiurl, project, package, dir, size_limit=None, meta=False, progress_obj=None):
2153 if not os.path.exists(dir):
2155 elif not os.path.isdir(dir):
2156 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
2157 if os.path.exists(os.path.join(dir, store)):
2158 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
2160 os.mkdir(os.path.join(dir, store))
2161 store_write_project(dir, project)
2162 store_write_string(dir, '_package', package + '\n')
2163 store_write_apiurl(dir, apiurl)
2165 store_write_string(dir, '_meta_mode', '')
2167 store_write_string(dir, '_size_limit', str(size_limit) + '\n')
2168 store_write_string(dir, '_files', '<directory />' + '\n')
2169 store_write_string(dir, '_osclib_version', __store_version__ + '\n')
2170 return Package(dir, progress_obj=progress_obj, size_limit=size_limit)
2173 class AbstractState:
2175 Base class which represents state-like objects (<review />, <state />).
2177 def __init__(self, tag):
2180 def get_node_attrs(self):
2181 """return attributes for the tag/element"""
2182 raise NotImplementedError()
2184 def get_node_name(self):
2185 """return tag/element name"""
2188 def get_comment(self):
2189 """return data from <comment /> tag"""
2190 raise NotImplementedError()
2193 """serialize object to XML"""
2194 root = ET.Element(self.get_node_name())
2195 for attr in self.get_node_attrs():
2196 val = getattr(self, attr)
2199 if self.get_comment():
2200 ET.SubElement(root, 'comment').text = self.get_comment()
2204 """return "pretty" XML data"""
2205 root = self.to_xml()
2207 return ET.tostring(root)
2210 class ReviewState(AbstractState):
2211 """Represents the review state in a request"""
2212 def __init__(self, review_node):
2213 if not review_node.get('state'):
2214 raise oscerr.APIError('invalid review node (state attr expected): %s' % \
2215 ET.tostring(review_node))
2216 AbstractState.__init__(self, review_node.tag)
2217 self.state = review_node.get('state')
2218 self.by_user = review_node.get('by_user')
2219 self.by_group = review_node.get('by_group')
2220 self.by_project = review_node.get('by_project')
2221 self.by_package = review_node.get('by_package')
2222 self.who = review_node.get('who')
2223 self.when = review_node.get('when')
2225 if not review_node.find('comment') is None and \
2226 review_node.find('comment').text:
2227 self.comment = review_node.find('comment').text.strip()
2229 def get_node_attrs(self):
2230 return ('state', 'by_user', 'by_group', 'by_project', 'by_package', 'who', 'when')
2232 def get_comment(self):
2236 class RequestState(AbstractState):
2237 """Represents the state of a request"""
2238 def __init__(self, state_node):
2239 if not state_node.get('name'):
2240 raise oscerr.APIError('invalid request state node (name attr expected): %s' % \
2241 ET.tostring(state_node))
2242 AbstractState.__init__(self, state_node.tag)
2243 self.name = state_node.get('name')
2244 self.who = state_node.get('who')
2245 self.when = state_node.get('when')
2247 if not state_node.find('comment') is None and \
2248 state_node.find('comment').text:
2249 self.comment = state_node.find('comment').text.strip()
2251 def get_node_attrs(self):
2252 return ('name', 'who', 'when')
2254 def get_comment(self):
2260 Represents a <action /> element of a Request.
2261 This class is quite common so that it can be used for all different
2262 action types. Note: instances only provide attributes for their specific
2265 r = Action('set_bugowner', tgt_project='foo', person_name='buguser')
2266 # available attributes: r.type (== 'set_bugowner'), r.tgt_project (== 'foo'), r.tgt_package (== None)
2268 <action type="set_bugowner">
2269 <target project="foo" />
2270 <person name="buguser" />
2273 r = Action('delete', tgt_project='foo', tgt_package='bar')
2274 # available attributes: r.type (== 'delete'), r.tgt_project (== 'foo'), r.tgt_package (=='bar')
2276 <action type="delete">
2277 <target package="bar" project="foo" />
2281 # allowed types + the corresponding (allowed) attributes
2282 type_args = {'submit': ('src_project', 'src_package', 'src_rev', 'tgt_project', 'tgt_package', 'opt_sourceupdate',
2283 'acceptinfo_rev', 'acceptinfo_srcmd5', 'acceptinfo_xsrcmd5', 'acceptinfo_osrcmd5',
2284 'acceptinfo_oxsrcmd5', 'opt_updatelink'),
2285 'add_role': ('tgt_project', 'tgt_package', 'person_name', 'person_role', 'group_name', 'group_role'),
2286 'set_bugowner': ('tgt_project', 'tgt_package', 'person_name'),
2287 'delete': ('tgt_project', 'tgt_package'),
2288 'change_devel': ('src_project', 'src_package', 'tgt_project', 'tgt_package')}
2289 # attribute prefix to element name map (only needed for abbreviated attributes)
2290 prefix_to_elm = {'src': 'source', 'tgt': 'target', 'opt': 'options'}
2292 def __init__(self, type, **kwargs):
2293 if not type in Action.type_args.keys():
2294 raise oscerr.WrongArgs('invalid action type: \'%s\'' % type)
2296 for i in kwargs.keys():
2297 if not i in Action.type_args[type]:
2298 raise oscerr.WrongArgs('invalid argument: \'%s\'' % i)
2299 # set all type specific attributes
2300 for i in Action.type_args[type]:
2301 if kwargs.has_key(i):
2302 setattr(self, i, kwargs[i])
2304 setattr(self, i, None)
2308 Serialize object to XML.
2309 The xml tag names and attributes are constructed from the instance's attributes.
2311 self.group_name -> tag name is "group", attribute name is "name"
2312 self.src_project -> tag name is "source" (translated via prefix_to_elm dict),
2313 attribute name is "project"
2314 Attributes prefixed with "opt_" need a special handling, the resulting xml should
2315 look like this: opt_updatelink -> <options><updatelink>value</updatelink></options>.
2316 Attributes which are "None" will be skipped.
2318 root = ET.Element('action', type=self.type)
2319 for i in Action.type_args[self.type]:
2320 prefix, attr = i.split('_', 1)
2321 val = getattr(self, i)
2324 elm = root.find(Action.prefix_to_elm.get(prefix, prefix))
2326 elm = ET.Element(Action.prefix_to_elm.get(prefix, prefix))
2329 ET.SubElement(elm, attr).text = val
2335 """return "pretty" XML data"""
2336 root = self.to_xml()
2338 return ET.tostring(root)
2341 def from_xml(action_node):
2342 """create action from XML"""
2343 if action_node is None or \
2344 not action_node.get('type') in Action.type_args.keys() or \
2345 not action_node.tag in ('action', 'submit'):
2346 raise oscerr.WrongArgs('invalid argument')
2347 elm_to_prefix = dict([(i[1], i[0]) for i in Action.prefix_to_elm.items()])
2349 for node in action_node:
2350 prefix = elm_to_prefix.get(node.tag, node.tag)
2352 data = [('opt_%s' % opt.tag, opt.text.strip()) for opt in node if opt.text]
2354 data = [('%s_%s' % (prefix, k), v) for k, v in node.items()]
2355 kwargs.update(dict(data))
2356 return Action(action_node.get('type'), **kwargs)
2360 """Represents a request (<request />)"""
2363 self._init_attributes()
2365 def _init_attributes(self):
2366 """initialize attributes with default values"""
2369 self.description = ''
2372 self.statehistory = []
2375 def read(self, root):
2376 """read in a request"""
2377 self._init_attributes()
2378 if not root.get('id'):
2379 raise oscerr.APIError('invalid request: %s\n' % ET.tostring(root))
2380 self.reqid = root.get('id')
2381 if root.find('state') is None:
2382 raise oscerr.APIError('invalid request (state expected): %s\n' % ET.tostring(root))
2383 self.state = RequestState(root.find('state'))
2384 action_nodes = root.findall('action')
2385 if not action_nodes:
2386 # check for old-style requests
2387 for i in root.findall('submit'):
2388 i.set('type', 'submit')
2389 action_nodes.append(i)
2390 for action in action_nodes:
2391 self.actions.append(Action.from_xml(action))
2392 for review in root.findall('review'):
2393 self.reviews.append(ReviewState(review))
2394 for hist_state in root.findall('history'):
2395 self.statehistory.append(RequestState(hist_state))
2396 if not root.find('title') is None:
2397 self.title = root.find('title').text.strip()
2398 if not root.find('description') is None and root.find('description').text:
2399 self.description = root.find('description').text.strip()
2401 def add_action(self, type, **kwargs):
2402 """add a new action to the request"""
2403 self.actions.append(Action(type, **kwargs))
2405 def get_actions(self, *types):
2407 get all actions with a specific type
2408 (if types is empty return all actions)
2412 return [i for i in self.actions if i.type in types]
2414 def get_creator(self):
2415 """return the creator of the request"""
2416 if len(self.statehistory):
2417 return self.statehistory[0].who
2418 return self.state.who
2421 """serialize object to XML"""
2422 root = ET.Element('request')
2423 if not self.reqid is None:
2424 root.set('id', self.reqid)
2425 for action in self.actions:
2426 root.append(action.to_xml())
2427 if not self.state is None:
2428 root.append(self.state.to_xml())
2429 for review in self.reviews:
2430 root.append(review.to_xml())
2431 for hist in self.statehistory:
2432 root.append(hist.to_xml())
2434 ET.SubElement(root, 'title').text = self.title
2435 if self.description:
2436 ET.SubElement(root, 'description').text = self.description
2440 """return "pretty" XML data"""
2441 root = self.to_xml()
2443 return ET.tostring(root)
2446 def format_review(review, show_srcupdate=False):
2448 format a review depending on the reviewer's type.
2449 A dict which contains the formatted str's is returned.
2452 d = {'state': '%s:' % review.state}
2453 if review.by_package:
2454 d['by'] = '%s/%s' % (review.by_project, review.by_package)
2455 d['type'] = 'Package'
2456 elif review.by_project:
2457 d['by'] = '%s' % review.by_project
2458 d['type'] = 'Project'
2459 elif review.by_group:
2460 d['by'] = '%s' % review.by_group
2463 d['by'] = '%s' % review.by_user
2466 d['by'] += '(%s)' % review.who
2470 def format_action(action, show_srcupdate=False):
2472 format an action depending on the action's type.
2473 A dict which contains the formatted str's is returned.
2475 def prj_pkg_join(prj, pkg):
2478 return '%s/%s' % (prj, pkg)
2480 d = {'type': '%s:' % action.type}
2481 if action.type == 'set_bugowner':
2482 d['source'] = action.person_name
2483 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2484 elif action.type == 'change_devel':
2485 d['source'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2486 d['target'] = 'developed in %s' % prj_pkg_join(action.src_project, action.src_package)
2487 elif action.type == 'submit':
2489 if action.opt_sourceupdate and show_srcupdate:
2490 srcupdate = '(%s)' % action.opt_sourceupdate
2491 d['source'] = '%s%s ->' % (prj_pkg_join(action.src_project, action.src_package), srcupdate)
2492 tgt_package = action.tgt_package
2493 if action.src_package == action.tgt_package:
2495 d['target'] = prj_pkg_join(action.tgt_project, tgt_package)
2496 elif action.type == 'add_role':
2498 if action.person_name and action.person_role:
2499 roles.append('person: %s as %s' % (action.person_name, action.person_role))
2500 if action.group_name and action.group_role:
2501 roles.append('group: %s as %s' % (action.group_name, action.group_role))
2502 d['source'] = ', '.join(roles)
2503 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2504 elif action.type == 'delete':
2506 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2509 def list_view(self):
2510 """return "list view" format"""
2512 lines = ['%6s State:%-10s By:%-12s When:%-19s' % (self.reqid, self.state.name, self.state.who, self.state.when)]
2513 tmpl = ' %(type)-16s %(source)-50s %(target)s'
2514 for action in self.actions:
2515 lines.append(tmpl % Request.format_action(action))
2516 tmpl = ' Review by %(type)-10s is %(state)-10s %(by)-50s'
2517 for review in self.reviews:
2518 lines.append(tmpl % Request.format_review(review))
2519 history = ['%s(%s)' % (hist.name, hist.who) for hist in self.statehistory]
2521 lines.append(' From: %s' % ' -> '.join(history))
2522 if self.description:
2523 lines.append(textwrap.fill(self.description, width=80, initial_indent=' Descr: ',
2524 subsequent_indent=' '))
2525 return '\n'.join(lines)
2528 """return "detailed" format"""
2529 lines = ['Request: #%s\n' % self.reqid]
2530 for action in self.actions:
2531 tmpl = ' %(type)-13s %(source)s %(target)s'
2532 if action.type == 'delete':
2533 # remove 1 whitespace because source is empty
2534 tmpl = ' %(type)-12s %(source)s %(target)s'
2535 lines.append(tmpl % Request.format_action(action, show_srcupdate=True))
2536 lines.append('\n\nMessage:')
2537 if self.description:
2538 lines.append(self.description)
2540 lines.append('<no message>')
2542 lines.append('\nState: %-10s %-12s %s' % (self.state.name, self.state.when, self.state.who))
2543 lines.append('Comment: %s' % (self.state.comment or '<no comment>'))
2546 tmpl = '%(state)-10s %(by)-50s %(when)-12s %(who)-20s %(comment)s'
2548 for review in reversed(self.reviews):
2549 d = {'state': review.state}
2551 d['by'] = "User: " + review.by_user
2553 d['by'] = "Group: " + review.by_group
2554 if review.by_package:
2555 d['by'] = "Package: " + review.by_project + "/" + review.by_package
2556 elif review.by_project:
2557 d['by'] = "Project: " + review.by_project
2558 d['when'] = review.when or ''
2559 d['who'] = review.who or ''
2560 d['comment'] = review.comment or ''
2561 reviews.append(tmpl % d)
2563 lines.append('\nReview: %s' % indent.join(reviews))
2565 tmpl = '%(name)-10s %(when)-12s %(who)s'
2567 for hist in reversed(self.statehistory):
2568 d = {'name': hist.name, 'when': hist.when,
2570 histories.append(tmpl % d)
2572 lines.append('\nHistory: %s' % indent.join(histories))
2574 return '\n'.join(lines)
2576 def __cmp__(self, other):
2577 return cmp(int(self.reqid), int(other.reqid))
2579 def create(self, apiurl):
2580 """create a new request"""
2581 u = makeurl(apiurl, ['request'], query='cmd=create')
2582 f = http_POST(u, data=self.to_str())
2583 root = ET.fromstring(f.read())
2587 """format time as Apr 02 18:19
2589 depending on whether it is in the current year
2593 if time.localtime()[0] == time.localtime(t)[0]:
2595 return time.strftime('%b %d %H:%M',time.localtime(t))
2597 return time.strftime('%b %d %Y',time.localtime(t))
2600 def is_project_dir(d):
2603 return os.path.exists(os.path.join(d, store, '_project')) and not \
2604 os.path.exists(os.path.join(d, store, '_package'))
2607 def is_package_dir(d):
2610 return os.path.exists(os.path.join(d, store, '_project')) and \
2611 os.path.exists(os.path.join(d, store, '_package'))
2613 def parse_disturl(disturl):
2614 """Parse a disturl, returns tuple (apiurl, project, source, repository,
2615 revision), else raises an oscerr.WrongArgs exception
2620 m = DISTURL_RE.match(disturl)
2622 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
2624 apiurl = m.group('apiurl')
2625 if apiurl.split('.')[0] != 'api':
2626 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
2627 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
2629 def parse_buildlogurl(buildlogurl):
2630 """Parse a build log url, returns a tuple (apiurl, project, package,
2631 repository, arch), else raises oscerr.WrongArgs exception"""
2633 global BUILDLOGURL_RE
2635 m = BUILDLOGURL_RE.match(buildlogurl)
2637 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
2639 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
2642 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
2643 This is handy to allow copy/paste a project/package combination in this form.
2645 Trailing slashes are removed before the split, because the split would
2646 otherwise give an additional empty string.
2654 def expand_proj_pack(args, idx=0, howmany=0):
2655 """looks for occurance of '.' at the position idx.
2656 If howmany is 2, both proj and pack are expanded together
2657 using the current directory, or none of them, if not possible.
2658 If howmany is 0, proj is expanded if possible, then, if there
2659 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
2660 expanded, if possible.
2661 If howmany is 1, only proj is expanded if possible.
2663 If args[idx] does not exists, an implicit '.' is assumed.
2664 if not enough elements up to idx exist, an error is raised.
2666 See also parseargs(args), slash_split(args), findpacs(args)
2667 All these need unification, somehow.
2670 # print args,idx,howmany
2673 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
2675 if len(args) == idx:
2677 if args[idx+0] == '.':
2678 if howmany == 0 and len(args) > idx+1:
2679 if args[idx+1] == '.':
2681 # remove one dot and make sure to expand both proj and pack
2686 # print args,idx,howmany
2688 args[idx+0] = store_read_project('.')
2691 package = store_read_package('.')
2692 args.insert(idx+1, package)
2696 package = store_read_package('.')
2697 args.insert(idx+1, package)
2701 def findpacs(files, progress_obj=None):
2702 """collect Package objects belonging to the given files
2703 and make sure each Package is returned only once"""
2706 p = filedir_to_pac(f, progress_obj)
2709 if i.name == p.name:
2719 def filedir_to_pac(f, progress_obj=None):
2720 """Takes a working copy path, or a path to a file inside a working copy,
2721 and returns a Package object instance
2723 If the argument was a filename, add it onto the "todo" list of the Package """
2725 if os.path.isdir(f):
2727 p = Package(wd, progress_obj=progress_obj)
2729 wd = os.path.dirname(f) or os.curdir
2730 p = Package(wd, progress_obj=progress_obj)
2731 p.todo = [ os.path.basename(f) ]
2735 def read_filemeta(dir):
2738 msg = '\'%s\' is not a valid working copy.\n' % dir
2739 if not is_package_dir(dir):
2740 raise oscerr.NoWorkingCopy(msg)
2742 filesmeta = os.path.join(dir, store, '_files')
2743 if not os.path.isfile(filesmeta):
2744 print >>sys.stderr, "Warning: file _files is missing, creating a default one"
2745 store_write_string(os.path.join(dir, store), '_files', '<directory \>')
2748 r = ET.parse(filesmeta)
2749 except SyntaxError, e:
2750 raise oscerr.NoWorkingCopy(msg +
2751 'When parsing .osc/_files, the following error was encountered:\n'
2755 def store_readlist(dir, name):
2759 if os.path.exists(os.path.join(dir, store, name)):
2760 r = [line.strip() for line in open(os.path.join(dir, store, name), 'r')]
2763 def read_tobeadded(dir):
2764 return store_readlist(dir, '_to_be_added')
2766 def read_tobedeleted(dir):
2767 return store_readlist(dir, '_to_be_deleted')
2769 def read_sizelimit(dir):
2773 fname = os.path.join(dir, store, '_size_limit')
2775 if os.path.exists(fname):
2776 r = open(fname).readline().strip()
2778 if r is None or not r.isdigit():
2782 def read_inconflict(dir):
2783 return store_readlist(dir, '_in_conflict')
2785 def parseargs(list_of_args):
2786 """Convenience method osc's commandline argument parsing.
2788 If called with an empty tuple (or list), return a list containing the current directory.
2789 Otherwise, return a list of the arguments."""
2791 return list(list_of_args)
2796 def statfrmt(statusletter, filename):
2797 return '%s %s' % (statusletter, filename)
2800 def pathjoin(a, *p):
2801 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2802 path = os.path.join(a, *p)
2803 if path.startswith('./'):
2808 def makeurl(baseurl, l, query=[]):
2809 """Given a list of path compoments, construct a complete URL.
2811 Optional parameters for a query string can be given as a list, as a
2812 dictionary, or as an already assembled string.
2813 In case of a dictionary, the parameters will be urlencoded by this
2814 function. In case of a list not -- this is to be backwards compatible.
2817 if conf.config['verbose'] > 1:
2818 print 'makeurl:', baseurl, l, query
2820 if type(query) == type(list()):
2821 query = '&'.join(query)
2822 elif type(query) == type(dict()):
2823 query = urlencode(query)
2825 scheme, netloc = urlsplit(baseurl)[0:2]
2826 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2829 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2830 """wrapper around urllib2.urlopen for error handling,
2831 and to support additional (PUT, DELETE) methods"""
2835 if conf.config['http_debug']:
2836 print >>sys.stderr, '\n\n--', method, url
2838 if method == 'POST' and not file and not data:
2839 # adding data to an urllib2 request transforms it into a POST
2842 req = urllib2.Request(url)
2843 api_host_options = {}
2844 if conf.is_known_apiurl(url):
2845 # ok no external request
2846 urllib2.install_opener(conf._build_opener(url))
2847 api_host_options = conf.get_apiurl_api_host_options(url)
2848 for header, value in api_host_options['http_headers']:
2849 req.add_header(header, value)
2851 req.get_method = lambda: method
2853 # POST requests are application/x-www-form-urlencoded per default
2854 # since we change the request into PUT, we also need to adjust the content type header
2855 if method == 'PUT' or (method == 'POST' and data):
2856 req.add_header('Content-Type', 'application/octet-stream')
2858 if type(headers) == type({}):
2859 for i in headers.keys():
2861 req.add_header(i, headers[i])
2863 if file and not data:
2864 size = os.path.getsize(file)
2866 data = open(file, 'rb').read()
2869 filefd = open(file, 'rb')
2871 if sys.platform[:3] != 'win':
2872 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2874 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2876 except EnvironmentError, e:
2878 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2879 '\non a filesystem which does not support this.' % (e, file))
2880 elif hasattr(e, 'winerror') and e.winerror == 5:
2881 # falling back to the default io
2882 data = open(file, 'rb').read()
2886 if conf.config['debug']: print >>sys.stderr, method, url
2888 old_timeout = socket.getdefaulttimeout()
2889 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2890 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2891 socket.setdefaulttimeout(timeout)
2893 fd = urllib2.urlopen(req, data=data)
2895 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2896 socket.setdefaulttimeout(old_timeout)
2897 if hasattr(conf.cookiejar, 'save'):
2898 conf.cookiejar.save(ignore_discard=True)
2900 if filefd: filefd.close()
2905 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2906 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2907 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2908 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2911 def check_store_version(dir):
2914 versionfile = os.path.join(dir, store, '_osclib_version')
2916 v = open(versionfile).read().strip()
2921 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2922 if os.path.exists(os.path.join(dir, '.svn')):
2923 msg = msg + '\nTry svn instead of osc.'
2924 raise oscerr.NoWorkingCopy(msg)
2926 if v != __store_version__:
2927 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2928 # version is fine, no migration needed
2929 f = open(versionfile, 'w')
2930 f.write(__store_version__ + '\n')
2933 msg = 'The osc metadata of your working copy "%s"' % dir
2934 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2935 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2936 raise oscerr.WorkingCopyWrongVersion, msg
2939 def meta_get_packagelist(apiurl, prj, deleted=None):
2943 query['deleted'] = 1
2945 u = makeurl(apiurl, ['source', prj], query)
2947 root = ET.parse(f).getroot()
2948 return [ node.get('name') for node in root.findall('entry') ]
2951 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2952 """return a list of file names,
2953 or a list File() instances if verbose=True"""
2959 query['rev'] = revision
2961 query['rev'] = 'latest'
2963 u = makeurl(apiurl, ['source', prj, package], query=query)
2965 root = ET.parse(f).getroot()
2968 return [ node.get('name') for node in root.findall('entry') ]
2972 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2973 rev = root.get('rev')
2974 for node in root.findall('entry'):
2975 f = File(node.get('name'),
2977 int(node.get('size')),
2978 int(node.get('mtime')))
2984 def meta_get_project_list(apiurl, deleted=None):
2987 query['deleted'] = 1
2989 u = makeurl(apiurl, ['source'], query)
2991 root = ET.parse(f).getroot()
2992 return sorted([ node.get('name') for node in root if node.get('name')])
2995 def show_project_meta(apiurl, prj):
2996 url = makeurl(apiurl, ['source', prj, '_meta'])
2998 return f.readlines()
3001 def show_project_conf(apiurl, prj):
3002 url = makeurl(apiurl, ['source', prj, '_config'])
3004 return f.readlines()
3007 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
3008 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
3012 except urllib2.HTTPError, e:
3013 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
3017 def show_package_meta(apiurl, prj, pac, meta=False):
3022 # packages like _pattern and _project do not have a _meta file
3023 if pac.startswith('_pattern') or pac.startswith('_project'):
3026 url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
3029 return f.readlines()
3030 except urllib2.HTTPError, e:
3031 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
3035 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
3037 path.append('source')
3043 path.append('_attribute')
3045 path.append(attribute)
3048 query.append("with_default=1")
3050 query.append("with_project=1")
3051 url = makeurl(apiurl, path, query)
3054 return f.readlines()
3055 except urllib2.HTTPError, e:
3056 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
3060 def show_develproject(apiurl, prj, pac):
3061 m = show_package_meta(apiurl, prj, pac)
3063 return ET.fromstring(''.join(m)).find('devel').get('project')
3068 def show_package_disabled_repos(apiurl, prj, pac):
3069 m = show_package_meta(apiurl, prj, pac)
3070 #FIXME: don't work if all repos of a project are disabled and only some are enabled since <disable/> is empty
3072 root = ET.fromstring(''.join(m))
3073 elm = root.find('build')
3074 r = [ node.get('repository') for node in elm.findall('disable')]
3080 def show_pattern_metalist(apiurl, prj):
3081 url = makeurl(apiurl, ['source', prj, '_pattern'])
3085 except urllib2.HTTPError, e:
3086 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
3088 r = [ node.get('name') for node in tree.getroot() ]
3093 def show_pattern_meta(apiurl, prj, pattern):
3094 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
3097 return f.readlines()
3098 except urllib2.HTTPError, e:
3099 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
3104 """metafile that can be manipulated and is stored back after manipulation."""
3105 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
3109 self.change_is_required = change_is_required
3110 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
3111 f = os.fdopen(fd, 'w')
3112 f.write(''.join(input))
3114 self.hash_orig = dgst(self.filename)
3117 if self.change_is_required and self.hash_orig == dgst(self.filename):
3118 print 'File unchanged. Not saving.'
3119 os.unlink(self.filename)
3122 print 'Sending meta data...'
3123 # don't do any exception handling... it's up to the caller what to do in case
3125 http_PUT(self.url, file=self.filename)
3126 os.unlink(self.filename)
3132 run_editor(self.filename)
3136 except urllib2.HTTPError, e:
3137 error_help = "%d" % e.code
3138 if e.headers.get('X-Opensuse-Errorcode'):
3139 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
3141 print >>sys.stderr, 'BuildService API error:', error_help
3142 # examine the error - we can't raise an exception because we might want
3145 if '<summary>' in data:
3146 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
3147 ri = raw_input('Try again? ([y/N]): ')
3148 if ri not in ['y', 'Y']:
3154 if os.path.exists(self.filename):
3155 print 'discarding %s' % self.filename
3156 os.unlink(self.filename)
3159 # different types of metadata
3160 metatypes = { 'prj': { 'path': 'source/%s/_meta',
3161 'template': new_project_templ,
3164 'pkg': { 'path' : 'source/%s/%s/_meta',
3165 'template': new_package_templ,
3168 'attribute': { 'path' : 'source/%s/%s/_meta',
3169 'template': new_attribute_templ,
3172 'prjconf': { 'path': 'source/%s/_config',
3176 'user': { 'path': 'person/%s',
3177 'template': new_user_template,
3180 'pattern': { 'path': 'source/%s/_pattern/%s',
3181 'template': new_pattern_template,
3186 def meta_exists(metatype,
3195 apiurl = conf.config['apiurl']
3196 url = make_meta_url(metatype, path_args, apiurl)
3198 data = http_GET(url).readlines()
3199 except urllib2.HTTPError, e:
3200 if e.code == 404 and create_new:
3201 data = metatypes[metatype]['template']
3203 data = StringIO(data % template_args).readlines()
3208 def make_meta_url(metatype, path_args=None, apiurl=None):
3212 apiurl = conf.config['apiurl']
3213 if metatype not in metatypes.keys():
3214 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
3215 path = metatypes[metatype]['path']
3218 path = path % path_args
3220 return makeurl(apiurl, [path])
3223 def edit_meta(metatype,
3228 change_is_required=False,
3234 apiurl = conf.config['apiurl']
3236 data = meta_exists(metatype,
3239 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
3243 change_is_required = True
3245 url = make_meta_url(metatype, path_args, apiurl)
3246 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
3254 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, meta=False):
3257 query['rev'] = revision
3259 query['rev'] = 'latest'
3261 query['linkrev'] = linkrev
3262 elif conf.config['linkcontrol']:
3263 query['linkrev'] = 'base'
3269 query['emptylink'] = 1
3270 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
3273 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False):
3274 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta)
3275 return ET.fromstring(''.join(m)).get('srcmd5')
3278 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False):
3279 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta)
3281 # only source link packages have a <linkinfo> element.
3282 li_node = ET.fromstring(''.join(m)).find('linkinfo')
3290 raise oscerr.LinkExpandError(prj, pac, li.error)
3294 def show_upstream_rev(apiurl, prj, pac, meta=False):
3295 m = show_files_meta(apiurl, prj, pac, meta=meta)
3296 return ET.fromstring(''.join(m)).get('rev')
3299 def read_meta_from_spec(specfile, *args):
3300 import codecs, locale, re
3302 Read tags and sections from spec file. To read out
3303 a tag the passed argument mustn't end with a colon. To
3304 read out a section the passed argument must start with
3306 This method returns a dictionary which contains the
3310 if not os.path.isfile(specfile):
3311 raise oscerr.OscIOError(None, '\'%s\' is not a regular file' % specfile)
3314 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
3315 except UnicodeDecodeError:
3316 lines = open(specfile).readlines()
3323 if itm.startswith('%'):