1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.132git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 new_project_templ = """\
39 <project name="%(name)s">
41 <title></title> <!-- Short title of NewProject -->
42 <description></description>
43 <!-- This is for a longer description of the purpose of the project -->
45 <person role="maintainer" userid="%(user)s" />
46 <person role="bugowner" userid="%(user)s" />
47 <!-- remove this block to publish your packages on the mirrors -->
58 <!-- remove this comment to enable one or more build targets
60 <repository name="openSUSE_Factory">
61 <path project="openSUSE:Factory" repository="standard" />
65 <repository name="openSUSE_11.2">
66 <path project="openSUSE:11.2" repository="standard"/>
70 <repository name="openSUSE_11.1">
71 <path project="openSUSE:11.1" repository="standard"/>
75 <repository name="Fedora_12">
76 <path project="Fedora:12" repository="standard" />
80 <repository name="SLE_11">
81 <path project="SUSE:SLE-11" repository="standard" />
90 new_package_templ = """\
91 <package name="%(name)s">
93 <title></title> <!-- Title of package -->
95 <description></description> <!-- for long description -->
97 <!-- following roles are inherited from the parent project
98 <person role="maintainer" userid="%(user)s"/>
99 <person role="bugowner" userid="%(user)s"/>
102 <url>PUT_UPSTREAM_URL_HERE</url>
106 use one of the examples below to disable building of this package
107 on a certain architecture, in a certain repository,
108 or a combination thereof:
110 <disable arch="x86_64"/>
111 <disable repository="SUSE_SLE-10"/>
112 <disable repository="SUSE_SLE-10" arch="x86_64"/>
114 Possible sections where you can use the tags above:
124 Please have a look at:
125 http://en.opensuse.org/Restricted_formats
126 Packages containing formats listed there are NOT allowed to
127 be packaged in the openSUSE Buildservice and will be deleted!
134 new_attribute_templ = """\
136 <attribute namespace="" name="">
142 new_user_template = """\
144 <login>%(user)s</login>
145 <email>PUT_EMAIL_ADDRESS_HERE</email>
146 <realname>PUT_REAL_NAME_HERE</realname>
148 <project name="home:%(user)s"/>
164 new_pattern_template = """\
165 <!-- See https://gitorious.org/opensuse/libzypp/blobs/master/zypp/parser/yum/schema/patterns.rng -->
171 buildstatus_symbols = {'succeeded': '.',
173 'expansion error': 'U', # obsolete with OBS 2.0
187 # os.path.samefile is available only under Unix
188 def os_path_samefile(path1, path2):
190 return os.path.samefile(path1, path2)
192 return os.path.realpath(path1) == os.path.realpath(path2)
195 """represent a file, including its metadata"""
196 def __init__(self, name, md5, size, mtime, skipped=False):
201 self.skipped = skipped
209 """Source service content
212 """creates an empty serviceinfo instance"""
217 def read(self, serviceinfo_node, append=False):
218 """read in the source services <services> element passed as
221 if serviceinfo_node == None:
223 if not append or self.services == None:
225 services = serviceinfo_node.findall('service')
227 for service in services:
228 name = service.get('name')
229 mode = service.get('mode', None)
230 data = { 'name' : name, 'mode' : '' }
234 for param in service.findall('param'):
235 option = param.get('name', None)
239 name += " --" + option + " '" + value + "'"
240 data['command'] = name
241 self.services.append(data)
243 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
244 raise oscerr.APIError(msg)
246 def getProjectGlobalServices(self, apiurl, project, package):
247 # get all project wide services in one file, we don't store it yet
248 u = makeurl(apiurl, ['source', project, package], query='cmd=getprojectservices')
251 root = ET.parse(f).getroot()
252 self.read(root, True)
253 self.project = project
254 self.package = package
255 except urllib2.HTTPError, e:
256 if e.code != 403 and e.code != 400:
259 def addVerifyFile(self, serviceinfo_node, filename):
262 f = open(filename, 'r')
263 digest = hashlib.sha256(f.read()).hexdigest()
267 s = ET.Element( "service", name="verify_file" )
268 ET.SubElement(s, "param", name="file").text = filename
269 ET.SubElement(s, "param", name="verifier").text = "sha256"
270 ET.SubElement(s, "param", name="checksum").text = digest
276 def addDownloadUrl(self, serviceinfo_node, url_string):
277 from urlparse import urlparse
278 url = urlparse( url_string )
279 protocol = url.scheme
284 s = ET.Element( "service", name="download_url" )
285 ET.SubElement(s, "param", name="protocol").text = protocol
286 ET.SubElement(s, "param", name="host").text = host
287 ET.SubElement(s, "param", name="path").text = path
292 def addGitUrl(self, serviceinfo_node, url_string):
294 s = ET.Element( "service", name="tar_scm" )
295 ET.SubElement(s, "param", name="url").text = url_string
296 ET.SubElement(s, "param", name="scm").text = "git"
300 def addRecompressTar(self, serviceinfo_node):
302 s = ET.Element( "service", name="recompress" )
303 ET.SubElement(s, "param", name="file").text = "*.tar"
304 ET.SubElement(s, "param", name="compression").text = "bz2"
308 def execute(self, dir, callmode = None, singleservice = None, verbose = None):
311 # cleanup existing generated files
312 for filename in os.listdir(dir):
313 if filename.startswith('_service:') or filename.startswith('_service_'):
314 os.unlink(os.path.join(dir, filename))
316 allservices = self.services or []
317 if singleservice and not singleservice in allservices:
318 # set array to the manual specified singleservice, if it is not part of _service file
319 data = { 'name' : singleservice, 'command' : singleservice, 'mode' : '' }
322 # set environment when using OBS 2.3 or later
323 if self.project != None:
324 os.putenv("OBS_SERVICE_PROJECT", self.project)
325 os.putenv("OBS_SERVICE_PACKAGE", self.package)
329 for service in allservices:
330 if singleservice and service['name'] != singleservice:
332 if service['mode'] == "disabled" and callmode != "disabled":
334 if service['mode'] != "disabled" and callmode == "disabled":
336 if service['mode'] != "trylocal" and service['mode'] != "localonly" and callmode == "trylocal":
338 call = service['command']
339 temp_dir = tempfile.mkdtemp()
340 name = call.split(None, 1)[0]
341 if not os.path.exists("/usr/lib/obs/service/"+name):
342 raise oscerr.PackageNotInstalled("obs-service-"+name)
343 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
344 if conf.config['verbose'] > 1 or verbose:
345 print "Run source service:", c
346 r = subprocess.call(c, shell=True)
348 print "Aborting: service call failed: " + c
349 # FIXME: addDownloadUrlService calls si.execute after
350 # updating _services.
353 if service['mode'] == "disabled" or service['mode'] == "trylocal" or service['mode'] == "localonly" or callmode == "local" or callmode == "trylocal":
354 for filename in os.listdir(temp_dir):
355 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, filename) )
357 for filename in os.listdir(temp_dir):
358 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, "_service:"+name+":"+filename) )
364 """linkinfo metadata (which is part of the xml representing a directory
367 """creates an empty linkinfo instance"""
377 def read(self, linkinfo_node):
378 """read in the linkinfo metadata from the <linkinfo> element passed as
380 If the passed element is None, the method does nothing.
382 if linkinfo_node == None:
384 self.project = linkinfo_node.get('project')
385 self.package = linkinfo_node.get('package')
386 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
387 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
388 self.srcmd5 = linkinfo_node.get('srcmd5')
389 self.error = linkinfo_node.get('error')
390 self.rev = linkinfo_node.get('rev')
391 self.baserev = linkinfo_node.get('baserev')
394 """returns True if the linkinfo is not empty, otherwise False"""
395 if self.xsrcmd5 or self.lsrcmd5:
399 def isexpanded(self):
400 """returns True if the package is an expanded link"""
401 if self.lsrcmd5 and not self.xsrcmd5:
406 """returns True if the link is in error state (could not be applied)"""
412 """return an informatory string representation"""
413 if self.islink() and not self.isexpanded():
414 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
415 % (self.project, self.package, self.xsrcmd5, self.rev)
416 elif self.islink() and self.isexpanded():
418 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
419 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
421 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
422 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
427 # http://effbot.org/zone/element-lib.htm#prettyprint
428 def xmlindent(elem, level=0):
431 if not elem.text or not elem.text.strip():
434 xmlindent(e, level+1)
435 if not e.tail or not e.tail.strip():
437 if not e.tail or not e.tail.strip():
440 if level and (not elem.tail or not elem.tail.strip()):
444 """represent a project directory, holding packages"""
445 REQ_STOREFILES = ('_project', '_apiurl')
446 if conf.config['do_package_tracking']:
447 REQ_STOREFILES += ('_packages',)
448 def __init__(self, dir, getPackageList=True, progress_obj=None, wc_check=True):
451 self.absdir = os.path.abspath(dir)
452 self.progress_obj = progress_obj
454 self.name = store_read_project(self.dir)
455 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
459 dirty_files = self.wc_check()
461 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
462 'Please run \'osc repairwc %s\' and check the state\n' \
463 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
464 raise oscerr.WorkingCopyInconsistent(self.name, None, dirty_files, msg)
467 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
469 self.pacs_available = []
471 if conf.config['do_package_tracking']:
472 self.pac_root = self.read_packages().getroot()
473 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
474 self.pacs_excluded = [ i for i in os.listdir(self.dir)
475 for j in conf.config['exclude_glob']
476 if fnmatch.fnmatch(i, j) ]
477 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
478 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
479 # in the self.pacs_broken list
480 self.pacs_broken = []
481 for p in self.pacs_have:
482 if not os.path.isdir(os.path.join(self.absdir, p)):
483 # all states will be replaced with the '!'-state
484 # (except it is already marked as deleted ('D'-state))
485 self.pacs_broken.append(p)
487 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
489 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
494 for fname in Project.REQ_STOREFILES:
495 if not os.path.exists(os.path.join(self.absdir, store, fname)):
496 dirty_files.append(fname)
499 def wc_repair(self, apiurl=None):
501 if not os.path.exists(os.path.join(self.dir, store, '_apiurl')) or apiurl:
503 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
504 'no \'apiurl\' was passed to wc_repair'
505 # hmm should we raise oscerr.WrongArgs?
506 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
508 conf.parse_apisrv_url(None, apiurl)
509 store_write_apiurl(self.dir, apiurl)
510 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
512 def checkout_missing_pacs(self, expand_link=False):
513 for pac in self.pacs_missing:
515 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
516 # pac is not under version control but a local file/dir exists
517 msg = 'can\'t add package \'%s\': Object already exists' % pac
518 raise oscerr.PackageExists(self.name, pac, msg)
520 print 'checking out new package %s' % pac
521 checkout_package(self.apiurl, self.name, pac, \
522 pathname=getTransActPath(os.path.join(self.dir, pac)), \
523 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
525 def status(self, pac):
526 exists = os.path.exists(os.path.join(self.absdir, pac))
527 st = self.get_state(pac)
528 if st is None and exists:
531 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % pac)
532 elif st in ('A', ' ') and not exists:
534 elif st == 'D' and not exists:
539 def get_status(self, *exclude_states):
541 for pac in self.pacs_have:
542 st = self.status(pac)
543 if not st in exclude_states:
544 res.append((st, pac))
545 if not '?' in exclude_states:
546 res.extend([('?', pac) for pac in self.pacs_unvers])
549 def get_pacobj(self, pac, *pac_args, **pac_kwargs):
551 st = self.status(pac)
552 if st in ('?', '!') or st == 'D' and not os.path.exists(os.path.join(self.dir, pac)):
554 return Package(os.path.join(self.dir, pac), *pac_args, **pac_kwargs)
555 except oscerr.OscIOError:
558 def set_state(self, pac, state):
559 node = self.get_package_node(pac)
561 self.new_package_entry(pac, state)
563 node.set('state', state)
565 def get_package_node(self, pac):
566 for node in self.pac_root.findall('package'):
567 if pac == node.get('name'):
571 def del_package_node(self, pac):
572 for node in self.pac_root.findall('package'):
573 if pac == node.get('name'):
574 self.pac_root.remove(node)
576 def get_state(self, pac):
577 node = self.get_package_node(pac)
579 return node.get('state')
583 def new_package_entry(self, name, state):
584 ET.SubElement(self.pac_root, 'package', name=name, state=state)
586 def read_packages(self):
589 packages_file = os.path.join(self.absdir, store, '_packages')
590 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
591 return ET.parse(packages_file)
593 # scan project for existing packages and migrate them
595 for data in os.listdir(self.dir):
596 pac_dir = os.path.join(self.absdir, data)
597 # we cannot use self.pacs_available because we cannot guarantee that the package list
598 # was fetched from the server
599 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
600 and Package(pac_dir).name == data:
601 cur_pacs.append(ET.Element('package', name=data, state=' '))
602 store_write_initial_packages(self.absdir, self.name, cur_pacs)
603 return ET.parse(os.path.join(self.absdir, store, '_packages'))
605 def write_packages(self):
606 xmlindent(self.pac_root)
607 store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root))
609 def addPackage(self, pac):
611 for i in conf.config['exclude_glob']:
612 if fnmatch.fnmatch(pac, i):
613 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
614 raise oscerr.OscIOError(None, msg)
615 state = self.get_state(pac)
616 if state == None or state == 'D':
617 self.new_package_entry(pac, 'A')
618 self.write_packages()
619 # sometimes the new pac doesn't exist in the list because
620 # it would take too much time to update all data structs regularly
621 if pac in self.pacs_unvers:
622 self.pacs_unvers.remove(pac)
624 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
626 def delPackage(self, pac, force = False):
627 state = self.get_state(pac.name)
629 if state == ' ' or state == 'D':
631 for filename in pac.filenamelist + pac.filenamelist_unvers:
632 filestate = pac.status(filename)
633 if filestate == 'M' or filestate == 'C' or \
634 filestate == 'A' or filestate == '?':
637 del_files.append(filename)
638 if can_delete or force:
639 for filename in del_files:
640 pac.delete_localfile(filename)
641 if pac.status(filename) != '?':
642 # this is not really necessary
643 pac.put_on_deletelist(filename)
644 print statfrmt('D', getTransActPath(os.path.join(pac.dir, filename)))
645 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
646 pac.write_deletelist()
647 self.set_state(pac.name, 'D')
648 self.write_packages()
650 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
653 delete_dir(pac.absdir)
654 self.del_package_node(pac.name)
655 self.write_packages()
656 print statfrmt('D', pac.name)
658 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
660 print 'package is not under version control'
662 print 'unsupported state'
664 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
667 Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj).update()
669 # we need to make sure that the _packages file will be written (even if an exception
672 # update complete project
673 # packages which no longer exists upstream
674 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
676 for pac in upstream_del:
677 if self.status(pac) != '!' or pac in self.pacs_broken:
678 p = Package(os.path.join(self.dir, pac))
679 self.delPackage(p, force = True)
680 delete_storedir(p.storedir)
685 self.pac_root.remove(self.get_package_node(p.name))
686 self.pacs_have.remove(pac)
688 for pac in self.pacs_have:
689 state = self.get_state(pac)
690 if pac in self.pacs_broken:
691 if self.get_state(pac) != 'A':
692 checkout_package(self.apiurl, self.name, pac,
693 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
694 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
697 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
699 if expand_link and p.islink() and not p.isexpanded():
702 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
704 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
707 rev = p.linkinfo.xsrcmd5
708 print 'Expanding to rev', rev
709 elif unexpand_link and p.islink() and p.isexpanded():
710 rev = p.linkinfo.lsrcmd5
711 print 'Unexpanding to rev', rev
712 elif p.islink() and p.isexpanded():
714 print 'Updating %s' % p.name
715 p.update(rev, service_files)
719 # TODO: Package::update has to fixed to behave like svn does
720 if pac in self.pacs_broken:
721 checkout_package(self.apiurl, self.name, pac,
722 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
723 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
725 Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj).update()
726 elif state == 'A' and pac in self.pacs_available:
727 # file/dir called pac already exists and is under version control
728 msg = 'can\'t add package \'%s\': Object already exists' % pac
729 raise oscerr.PackageExists(self.name, pac, msg)
734 print 'unexpected state.. package \'%s\'' % pac
736 self.checkout_missing_pacs(expand_link=not unexpand_link)
738 self.write_packages()
740 def commit(self, pacs = (), msg = '', files = {}, verbose = False, skip_local_service_run = False):
745 if files.has_key(pac):
747 state = self.get_state(pac)
749 self.commitNewPackage(pac, msg, todo, verbose=verbose, skip_local_service_run=skip_local_service_run)
751 self.commitDelPackage(pac)
753 # display the correct dir when sending the changes
754 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
757 p = Package(os.path.join(self.dir, pac))
759 p.commit(msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
760 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
761 print 'osc: \'%s\' is not under version control' % pac
762 elif pac in self.pacs_broken:
763 print 'osc: \'%s\' package not found' % pac
765 self.commitExtPackage(pac, msg, todo, verbose=verbose)
767 self.write_packages()
769 # if we have packages marked as '!' we cannot commit
770 for pac in self.pacs_broken:
771 if self.get_state(pac) != 'D':
772 msg = 'commit failed: package \'%s\' is missing' % pac
773 raise oscerr.PackageMissing(self.name, pac, msg)
775 for pac in self.pacs_have:
776 state = self.get_state(pac)
779 Package(os.path.join(self.dir, pac)).commit(msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
781 self.commitDelPackage(pac)
783 self.commitNewPackage(pac, msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
785 self.write_packages()
787 def commitNewPackage(self, pac, msg = '', files = [], verbose = False, skip_local_service_run = False):
788 """creates and commits a new package if it does not exist on the server"""
789 if pac in self.pacs_available:
790 print 'package \'%s\' already exists' % pac
792 user = conf.get_apiurl_usr(self.apiurl)
793 edit_meta(metatype='pkg',
794 path_args=(quote_plus(self.name), quote_plus(pac)),
799 # display the correct dir when sending the changes
801 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
805 p = Package(os.path.join(self.dir, pac))
807 print statfrmt('Sending', os.path.normpath(p.dir))
808 p.commit(msg=msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
809 self.set_state(pac, ' ')
812 def commitDelPackage(self, pac):
813 """deletes a package on the server and in the working copy"""
815 # display the correct dir when sending the changes
816 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
819 pac_dir = os.path.join(self.dir, pac)
820 p = Package(os.path.join(self.dir, pac))
821 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
822 delete_storedir(p.storedir)
828 pac_dir = os.path.join(self.dir, pac)
829 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
830 print statfrmt('Deleting', getTransActPath(pac_dir))
831 delete_package(self.apiurl, self.name, pac)
832 self.del_package_node(pac)
834 def commitExtPackage(self, pac, msg, files = [], verbose=False):
835 """commits a package from an external project"""
836 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
839 pac_path = os.path.join(self.dir, pac)
841 project = store_read_project(pac_path)
842 package = store_read_package(pac_path)
843 apiurl = store_read_apiurl(pac_path, defaulturl=False)
844 if not meta_exists(metatype='pkg',
845 path_args=(quote_plus(project), quote_plus(package)),
846 template_args=None, create_new=False, apiurl=apiurl):
847 user = conf.get_apiurl_usr(self.apiurl)
848 edit_meta(metatype='pkg',
849 path_args=(quote_plus(project), quote_plus(package)),
850 template_args=({'name': pac, 'user': user}), apiurl=apiurl)
851 p = Package(pac_path)
853 p.commit(msg=msg, verbose=verbose)
857 r.append('*****************************************************')
858 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
859 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
860 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
861 r.append('*****************************************************')
865 def init_project(apiurl, dir, project, package_tracking=True, getPackageList=True, progress_obj=None, wc_check=True):
868 if not os.path.exists(dir):
869 # use makedirs (checkout_no_colon config option might be enabled)
871 elif not os.path.isdir(dir):
872 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
873 if os.path.exists(os.path.join(dir, store)):
874 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
876 os.mkdir(os.path.join(dir, store))
878 store_write_project(dir, project)
879 store_write_apiurl(dir, apiurl)
881 store_write_initial_packages(dir, project, [])
882 return Project(dir, getPackageList, progress_obj, wc_check)
886 """represent a package (its directory) and read/keep/write its metadata"""
888 # should _meta be a required file?
889 REQ_STOREFILES = ('_project', '_package', '_apiurl', '_files', '_osclib_version')
890 OPT_STOREFILES = ('_to_be_added', '_to_be_deleted', '_in_conflict', '_in_update',
891 '_in_commit', '_meta', '_meta_mode', '_frozenlink', '_pulled', '_linkrepair',
892 '_size_limit', '_commit_msg')
894 def __init__(self, workingdir, progress_obj=None, size_limit=None, wc_check=True):
897 self.dir = workingdir
898 self.absdir = os.path.abspath(self.dir)
899 self.storedir = os.path.join(self.absdir, store)
900 self.progress_obj = progress_obj
901 self.size_limit = size_limit
902 if size_limit and size_limit == 0:
903 self.size_limit = None
905 check_store_version(self.dir)
907 self.prjname = store_read_project(self.dir)
908 self.name = store_read_package(self.dir)
909 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
911 self.update_datastructs()
914 dirty_files = self.wc_check()
916 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
917 'Please run \'osc repairwc %s\' (Note this might _remove_\n' \
918 'files from the .osc/ dir). Please check the state\n' \
919 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
920 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, dirty_files, msg)
926 for fname in self.filenamelist:
927 if not os.path.exists(os.path.join(self.storedir, fname)) and not fname in self.skipped:
928 dirty_files.append(fname)
929 for fname in Package.REQ_STOREFILES:
930 if not os.path.isfile(os.path.join(self.storedir, fname)):
931 dirty_files.append(fname)
932 for fname in os.listdir(self.storedir):
933 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
934 fname.startswith('_build'):
936 elif fname in self.filenamelist and fname in self.skipped:
937 dirty_files.append(fname)
938 elif not fname in self.filenamelist:
939 dirty_files.append(fname)
940 for fname in self.to_be_deleted[:]:
941 if not fname in self.filenamelist:
942 dirty_files.append(fname)
943 for fname in self.in_conflict[:]:
944 if not fname in self.filenamelist:
945 dirty_files.append(fname)
948 def wc_repair(self, apiurl=None):
949 if not os.path.exists(os.path.join(self.storedir, '_apiurl')) or apiurl:
951 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
952 'no \'apiurl\' was passed to wc_repair'
953 # hmm should we raise oscerr.WrongArgs?
954 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
956 conf.parse_apisrv_url(None, apiurl)
957 store_write_apiurl(self.dir, apiurl)
958 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
959 # all files which are present in the filelist have to exist in the storedir
960 for f in self.filelist:
961 # XXX: should we also check the md5?
962 if not os.path.exists(os.path.join(self.storedir, f.name)) and not f.name in self.skipped:
963 # if get_source_file fails we're screwed up...
964 get_source_file(self.apiurl, self.prjname, self.name, f.name,
965 targetfilename=os.path.join(self.storedir, f.name), revision=self.rev,
967 for fname in os.listdir(self.storedir):
968 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
969 fname.startswith('_build'):
971 elif not fname in self.filenamelist or fname in self.skipped:
972 # this file does not belong to the storedir so remove it
973 os.unlink(os.path.join(self.storedir, fname))
974 for fname in self.to_be_deleted[:]:
975 if not fname in self.filenamelist:
976 self.to_be_deleted.remove(fname)
977 self.write_deletelist()
978 for fname in self.in_conflict[:]:
979 if not fname in self.filenamelist:
980 self.in_conflict.remove(fname)
981 self.write_conflictlist()
984 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
985 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
988 def addfile(self, n):
989 if not os.path.exists(os.path.join(self.absdir, n)):
990 raise oscerr.OscIOError(None, 'error: file \'%s\' does not exist' % n)
991 if n in self.to_be_deleted:
992 self.to_be_deleted.remove(n)
993 # self.delete_storefile(n)
994 self.write_deletelist()
995 elif n in self.filenamelist or n in self.to_be_added:
996 raise oscerr.PackageFileConflict(self.prjname, self.name, n, 'osc: warning: \'%s\' is already under version control' % n)
997 # shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
999 pathname = os.path.join(self.dir, n)
1002 self.to_be_added.append(n)
1003 self.write_addlist()
1004 print statfrmt('A', pathname)
1006 def delete_file(self, n, force=False):
1007 """deletes a file if possible and marks the file as deleted"""
1010 state = self.status(n)
1011 except IOError, ioe:
1014 if state in ['?', 'A', 'M', 'R', 'C'] and not force:
1015 return (False, state)
1016 # special handling for skipped files: if file exists, simply delete it
1018 exists = os.path.exists(os.path.join(self.dir, n))
1019 self.delete_localfile(n)
1020 return (exists, 'S')
1022 self.delete_localfile(n)
1023 was_added = n in self.to_be_added
1024 if state in ('A', 'R') or state == '!' and was_added:
1025 self.to_be_added.remove(n)
1026 self.write_addlist()
1028 # don't remove "merge files" (*.r, *.mine...)
1029 # that's why we don't use clear_from_conflictlist
1030 self.in_conflict.remove(n)
1031 self.write_conflictlist()
1032 if not state in ('A', '?') and not (state == '!' and was_added):
1033 self.put_on_deletelist(n)
1034 self.write_deletelist()
1035 return (True, state)
1037 def delete_storefile(self, n):
1038 try: os.unlink(os.path.join(self.storedir, n))
1041 def delete_localfile(self, n):
1042 try: os.unlink(os.path.join(self.dir, n))
1045 def put_on_deletelist(self, n):
1046 if n not in self.to_be_deleted:
1047 self.to_be_deleted.append(n)
1049 def put_on_conflictlist(self, n):
1050 if n not in self.in_conflict:
1051 self.in_conflict.append(n)
1053 def put_on_addlist(self, n):
1054 if n not in self.to_be_added:
1055 self.to_be_added.append(n)
1057 def clear_from_conflictlist(self, n):
1058 """delete an entry from the file, and remove the file if it would be empty"""
1059 if n in self.in_conflict:
1061 filename = os.path.join(self.dir, n)
1062 storefilename = os.path.join(self.storedir, n)
1063 myfilename = os.path.join(self.dir, n + '.mine')
1064 if self.islinkrepair() or self.ispulled():
1065 upfilename = os.path.join(self.dir, n + '.new')
1067 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1070 os.unlink(myfilename)
1071 # the working copy may be updated, so the .r* ending may be obsolete...
1072 # then we don't care
1073 os.unlink(upfilename)
1074 if self.islinkrepair() or self.ispulled():
1075 os.unlink(os.path.join(self.dir, n + '.old'))
1079 self.in_conflict.remove(n)
1081 self.write_conflictlist()
1083 # XXX: this isn't used at all
1084 def write_meta_mode(self):
1085 # XXX: the "elif" is somehow a contradiction (with current and the old implementation
1086 # it's not possible to "leave" the metamode again) (except if you modify pac.meta
1087 # which is really ugly:) )
1089 store_write_string(self.absdir, '_meta_mode', '')
1090 elif self.ismetamode():
1091 os.unlink(os.path.join(self.storedir, '_meta_mode'))
1093 def write_sizelimit(self):
1094 if self.size_limit and self.size_limit <= 0:
1096 os.unlink(os.path.join(self.storedir, '_size_limit'))
1100 store_write_string(self.absdir, '_size_limit', str(self.size_limit) + '\n')
1102 def write_addlist(self):
1103 self.__write_storelist('_to_be_added', self.to_be_added)
1105 def write_deletelist(self):
1106 self.__write_storelist('_to_be_deleted', self.to_be_deleted)
1108 def delete_source_file(self, n):
1109 """delete local a source file"""
1110 self.delete_localfile(n)
1111 self.delete_storefile(n)
1113 def delete_remote_source_file(self, n):
1114 """delete a remote source file (e.g. from the server)"""
1115 query = 'rev=upload'
1116 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1119 def put_source_file(self, n, copy_only=False):
1120 cdir = os.path.join(self.storedir, '_in_commit')
1122 if not os.path.isdir(cdir):
1124 query = 'rev=repository'
1125 tmpfile = os.path.join(cdir, n)
1126 shutil.copyfile(os.path.join(self.dir, n), tmpfile)
1127 # escaping '+' in the URL path (note: not in the URL query string) is
1128 # only a workaround for ruby on rails, which swallows it otherwise
1130 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1131 http_PUT(u, file = os.path.join(self.dir, n))
1132 os.rename(tmpfile, os.path.join(self.storedir, n))
1134 if os.path.isdir(cdir):
1136 if n in self.to_be_added:
1137 self.to_be_added.remove(n)
1139 def __generate_commitlist(self, todo_send):
1140 root = ET.Element('directory')
1141 keys = todo_send.keys()
1144 ET.SubElement(root, 'entry', name=i, md5=todo_send[i])
1147 def __send_commitlog(self, msg, local_filelist):
1148 """send the commitlog and the local filelist to the server"""
1149 query = {'cmd' : 'commitfilelist',
1150 'user' : conf.get_apiurl_usr(self.apiurl),
1152 if self.islink() and self.isexpanded():
1153 query['keeplink'] = '1'
1154 if conf.config['linkcontrol'] or self.isfrozen():
1155 query['linkrev'] = self.linkinfo.srcmd5
1157 query['repairlink'] = '1'
1158 query['linkrev'] = self.get_pulled_srcmd5()
1159 if self.islinkrepair():
1160 query['repairlink'] = '1'
1161 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
1162 f = http_POST(u, data=ET.tostring(local_filelist))
1163 root = ET.parse(f).getroot()
1166 def __get_todo_send(self, server_filelist):
1167 """parse todo from a previous __send_commitlog call"""
1168 error = server_filelist.get('error')
1171 elif error != 'missing':
1172 raise oscerr.PackageInternalError(self.prjname, self.name,
1173 '__get_todo_send: unexpected \'error\' attr: \'%s\'' % error)
1175 for n in server_filelist.findall('entry'):
1176 name = n.get('name')
1178 raise oscerr.APIError('missing \'name\' attribute:\n%s\n' % ET.tostring(server_filelist))
1179 todo.append(n.get('name'))
1182 def commit(self, msg='', verbose=False, skip_local_service_run=False):
1183 # commit only if the upstream revision is the same as the working copy's
1184 upstream_rev = self.latest_rev()
1185 if self.rev != upstream_rev:
1186 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
1188 if not skip_local_service_run:
1189 r = self.run_source_services(mode="trylocal", verbose=verbose)
1191 raise oscerr.ServiceRuntimeError(r)
1194 self.todo = [i for i in self.to_be_added if not i in self.filenamelist] + self.filenamelist
1196 pathn = getTransActPath(self.dir)
1201 for filename in self.filenamelist + [i for i in self.to_be_added if not i in self.filenamelist]:
1202 if filename.startswith('_service:') or filename.startswith('_service_'):
1204 st = self.status(filename)
1206 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
1208 elif filename in self.todo:
1209 if st in ('A', 'R', 'M'):
1210 todo_send[filename] = dgst(os.path.join(self.absdir, filename))
1211 real_send.append(filename)
1212 print statfrmt('Sending', os.path.join(pathn, filename))
1213 elif st in (' ', '!', 'S'):
1214 if st == '!' and filename in self.to_be_added:
1215 print 'file \'%s\' is marked as \'A\' but does not exist' % filename
1217 f = self.findfilebyname(filename)
1219 raise oscerr.PackageInternalError(self.prjname, self.name,
1220 'error: file \'%s\' with state \'%s\' is not known by meta' \
1222 todo_send[filename] = f.md5
1224 todo_delete.append(filename)
1225 print statfrmt('Deleting', os.path.join(pathn, filename))
1226 elif st in ('R', 'M', 'D', ' ', '!', 'S'):
1227 # ignore missing new file (it's not part of the current commit)
1228 if st == '!' and filename in self.to_be_added:
1230 f = self.findfilebyname(filename)
1232 raise oscerr.PackageInternalError(self.prjname, self.name,
1233 'error: file \'%s\' with state \'%s\' is not known by meta' \
1235 todo_send[filename] = f.md5
1237 if not real_send and not todo_delete and not self.islinkrepair() and not self.ispulled():
1238 print 'nothing to do for package %s' % self.name
1241 print 'Transmitting file data ',
1242 filelist = self.__generate_commitlist(todo_send)
1243 sfilelist = self.__send_commitlog(msg, filelist)
1244 send = self.__get_todo_send(sfilelist)
1245 real_send = [i for i in real_send if not i in send]
1246 # abort after 3 tries
1248 while len(send) and tries:
1249 for filename in send[:]:
1250 sys.stdout.write('.')
1252 self.put_source_file(filename)
1253 send.remove(filename)
1255 sfilelist = self.__send_commitlog(msg, filelist)
1256 send = self.__get_todo_send(sfilelist)
1258 raise oscerr.PackageInternalError(self.prjname, self.name,
1259 'server does not accept filelist:\n%s\nmissing:\n%s\n' \
1260 % (ET.tostring(filelist), ET.tostring(sfilelist)))
1261 # these files already exist on the server
1262 # just copy them into the storedir
1263 for filename in real_send:
1264 self.put_source_file(filename, copy_only=True)
1266 self.rev = sfilelist.get('rev')
1268 print 'Committed revision %s.' % self.rev
1271 os.unlink(os.path.join(self.storedir, '_pulled'))
1272 if self.islinkrepair():
1273 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1274 self.linkrepair = False
1275 # XXX: mark package as invalid?
1276 print 'The source link has been repaired. This directory can now be removed.'
1278 if self.islink() and self.isexpanded():
1280 li.read(sfilelist.find('linkinfo'))
1281 if li.xsrcmd5 is None:
1282 raise oscerr.APIError('linkinfo has no xsrcmd5 attr:\n%s\n' % ET.tostring(sfilelist))
1283 sfilelist = ET.fromstring(self.get_files_meta(revision=li.xsrcmd5))
1284 for i in sfilelist.findall('entry'):
1285 if i.get('name') in self.skipped:
1286 i.set('skipped', 'true')
1287 store_write_string(self.absdir, '_files', ET.tostring(sfilelist) + '\n')
1288 for filename in todo_delete:
1289 self.to_be_deleted.remove(filename)
1290 self.delete_storefile(filename)
1291 self.write_deletelist()
1292 self.write_addlist()
1293 self.update_datastructs()
1295 print_request_list(self.apiurl, self.prjname, self.name)
1297 # FIXME: add testcases for this codepath
1298 sinfo = sfilelist.find('serviceinfo')
1299 if sinfo is not None:
1300 print 'Waiting for server side source service run'
1301 u = makeurl(self.apiurl, ['source', self.prjname, self.name])
1302 while sinfo is not None and sinfo.get('code') == 'running':
1303 sys.stdout.write('.')
1305 # does it make sense to add some delay?
1306 sfilelist = ET.fromstring(http_GET(u).read())
1307 # if sinfo is None another commit might have occured in the "meantime"
1308 sinfo = sfilelist.find('serviceinfo')
1310 rev=self.latest_rev()
1311 self.update(rev=rev)
1313 def __write_storelist(self, name, data):
1316 os.unlink(os.path.join(self.storedir, name))
1320 store_write_string(self.absdir, name, '%s\n' % '\n'.join(data))
1322 def write_conflictlist(self):
1323 self.__write_storelist('_in_conflict', self.in_conflict)
1325 def updatefile(self, n, revision, mtime=None):
1326 filename = os.path.join(self.dir, n)
1327 storefilename = os.path.join(self.storedir, n)
1328 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1329 origfile = os.path.join(self.storedir, '_in_update', n)
1330 if os.path.isfile(filename):
1331 shutil.copyfile(filename, origfile_tmp)
1332 os.rename(origfile_tmp, origfile)
1336 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=storefilename,
1337 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1339 shutil.copyfile(storefilename, filename)
1340 if not origfile is None:
1343 def mergefile(self, n, revision, mtime=None):
1344 filename = os.path.join(self.dir, n)
1345 storefilename = os.path.join(self.storedir, n)
1346 myfilename = os.path.join(self.dir, n + '.mine')
1347 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1348 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1349 origfile = os.path.join(self.storedir, '_in_update', n)
1350 shutil.copyfile(filename, origfile_tmp)
1351 os.rename(origfile_tmp, origfile)
1352 os.rename(filename, myfilename)
1354 get_source_file(self.apiurl, self.prjname, self.name, n,
1355 revision=revision, targetfilename=upfilename,
1356 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1358 if binary_file(myfilename) or binary_file(upfilename):
1360 shutil.copyfile(upfilename, filename)
1361 shutil.copyfile(upfilename, storefilename)
1363 self.in_conflict.append(n)
1364 self.write_conflictlist()
1368 # diff3 OPTIONS... MINE OLDER YOURS
1369 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1370 # we would rather use the subprocess module, but it is not availablebefore 2.4
1371 ret = subprocess.call(merge_cmd, shell=True)
1373 # "An exit status of 0 means `diff3' was successful, 1 means some
1374 # conflicts were found, and 2 means trouble."
1376 # merge was successful... clean up
1377 shutil.copyfile(upfilename, storefilename)
1378 os.unlink(upfilename)
1379 os.unlink(myfilename)
1383 # unsuccessful merge
1384 shutil.copyfile(upfilename, storefilename)
1386 self.in_conflict.append(n)
1387 self.write_conflictlist()
1390 raise oscerr.ExtRuntimeError('diff3 failed with exit code: %s' % ret, merge_cmd)
1392 def update_local_filesmeta(self, revision=None):
1394 Update the local _files file in the store.
1395 It is replaced with the version pulled from upstream.
1397 meta = self.get_files_meta(revision=revision)
1398 store_write_string(self.absdir, '_files', meta + '\n')
1400 def get_files_meta(self, revision='latest', skip_service=True):
1401 fm = show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, meta=self.meta)
1402 # look for "too large" files according to size limit and mark them
1403 root = ET.fromstring(fm)
1404 for e in root.findall('entry'):
1405 size = e.get('size')
1406 if size and self.size_limit and int(size) > self.size_limit \
1407 or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
1408 e.set('skipped', 'true')
1409 return ET.tostring(root)
1411 def update_datastructs(self):
1413 Update the internal data structures if the local _files
1414 file has changed (e.g. update_local_filesmeta() has been
1418 files_tree = read_filemeta(self.dir)
1419 files_tree_root = files_tree.getroot()
1421 self.rev = files_tree_root.get('rev')
1422 self.srcmd5 = files_tree_root.get('srcmd5')
1424 self.linkinfo = Linkinfo()
1425 self.linkinfo.read(files_tree_root.find('linkinfo'))
1427 self.filenamelist = []
1430 for node in files_tree_root.findall('entry'):
1432 f = File(node.get('name'),
1434 int(node.get('size')),
1435 int(node.get('mtime')))
1436 if node.get('skipped'):
1437 self.skipped.append(f.name)
1440 # okay, a very old version of _files, which didn't contain any metadata yet...
1441 f = File(node.get('name'), '', 0, 0)
1442 self.filelist.append(f)
1443 self.filenamelist.append(f.name)
1445 self.to_be_added = read_tobeadded(self.absdir)
1446 self.to_be_deleted = read_tobedeleted(self.absdir)
1447 self.in_conflict = read_inconflict(self.absdir)
1448 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1449 self.size_limit = read_sizelimit(self.dir)
1450 self.meta = self.ismetamode()
1452 # gather unversioned files, but ignore some stuff
1454 for i in os.listdir(self.dir):
1455 for j in conf.config['exclude_glob']:
1456 if fnmatch.fnmatch(i, j):
1457 self.excluded.append(i)
1459 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1460 if i not in self.excluded
1461 if i not in self.filenamelist ]
1464 """tells us if the package is a link (has 'linkinfo').
1465 A package with linkinfo is a package which links to another package.
1466 Returns True if the package is a link, otherwise False."""
1467 return self.linkinfo.islink()
1469 def isexpanded(self):
1470 """tells us if the package is a link which is expanded.
1471 Returns True if the package is expanded, otherwise False."""
1472 return self.linkinfo.isexpanded()
1474 def islinkrepair(self):
1475 """tells us if we are repairing a broken source link."""
1476 return self.linkrepair
1479 """tells us if we have pulled a link."""
1480 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1483 """tells us if the link is frozen."""
1484 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1486 def ismetamode(self):
1487 """tells us if the package is in meta mode"""
1488 return os.path.isfile(os.path.join(self.storedir, '_meta_mode'))
1490 def get_pulled_srcmd5(self):
1492 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1493 pulledrev = line.strip()
1496 def haslinkerror(self):
1498 Returns True if the link is broken otherwise False.
1499 If the package is not a link it returns False.
1501 return self.linkinfo.haserror()
1503 def linkerror(self):
1505 Returns an error message if the link is broken otherwise None.
1506 If the package is not a link it returns None.
1508 return self.linkinfo.error
1510 def update_local_pacmeta(self):
1512 Update the local _meta file in the store.
1513 It is replaced with the version pulled from upstream.
1515 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1516 store_write_string(self.absdir, '_meta', meta + '\n')
1518 def findfilebyname(self, n):
1519 for i in self.filelist:
1523 def get_status(self, excluded=False, *exclude_states):
1527 todo = self.filenamelist + self.to_be_added + \
1528 [i for i in self.filenamelist_unvers if not os.path.isdir(os.path.join(self.absdir, i))]
1530 todo.extend([i for i in self.excluded if i != store])
1533 for fname in sorted(todo):
1534 st = self.status(fname)
1535 if not st in exclude_states:
1536 res.append((st, fname))
1539 def status(self, n):
1543 file storefile file present STATUS
1544 exists exists in _files
1546 x - - 'A' and listed in _to_be_added
1547 x x - 'R' and listed in _to_be_added
1548 x x x ' ' if digest differs: 'M'
1549 and if in conflicts file: 'C'
1551 - x x 'D' and listed in _to_be_deleted
1552 x x x 'D' and listed in _to_be_deleted (e.g. if deleted file was modified)
1553 x x x 'C' and listed in _in_conflict
1554 x - x 'S' and listed in self.skipped
1555 - - x 'S' and listed in self.skipped
1561 known_by_meta = False
1563 exists_in_store = False
1564 if n in self.filenamelist:
1565 known_by_meta = True
1566 if os.path.exists(os.path.join(self.absdir, n)):
1568 if os.path.exists(os.path.join(self.storedir, n)):
1569 exists_in_store = True
1571 if n in self.to_be_deleted:
1573 elif n in self.in_conflict:
1575 elif n in self.skipped:
1577 elif n in self.to_be_added and exists and exists_in_store:
1579 elif n in self.to_be_added and exists:
1581 elif exists and exists_in_store and known_by_meta:
1582 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1586 elif n in self.to_be_added and not exists:
1588 elif not exists and exists_in_store and known_by_meta and not n in self.to_be_deleted:
1590 elif exists and not exists_in_store and not known_by_meta:
1592 elif not exists_in_store and known_by_meta:
1593 # XXX: this codepath shouldn't be reached (we restore the storefile
1594 # in update_datastructs)
1595 raise oscerr.PackageInternalError(self.prjname, self.name,
1596 'error: file \'%s\' is known by meta but no storefile exists.\n'
1597 'This might be caused by an old wc format. Please backup your current\n'
1598 'wc and checkout the package again. Afterwards copy all files (except the\n'
1599 '.osc/ dir) into the new package wc.' % n)
1601 # this case shouldn't happen (except there was a typo in the filename etc.)
1602 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % n)
1606 def get_diff(self, revision=None, ignoreUnversioned=False):
1608 diff_hdr = 'Index: %s\n'
1609 diff_hdr += '===================================================================\n'
1613 def diff_add_delete(fname, add, revision):
1615 diff.append(diff_hdr % fname)
1619 diff.append('--- %s\t(revision 0)\n' % fname)
1621 if revision and not fname in self.to_be_added:
1622 rev = 'working copy'
1623 diff.append('+++ %s\t(%s)\n' % (fname, rev))
1624 fname = os.path.join(self.absdir, fname)
1626 diff.append('--- %s\t(revision %s)\n' % (fname, revision or self.rev))
1627 diff.append('+++ %s\t(working copy)\n' % fname)
1628 fname = os.path.join(self.storedir, fname)
1631 if revision is not None and not add:
1632 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1633 get_source_file(self.apiurl, self.prjname, self.name, origname, tmpfile, revision)
1635 if binary_file(fname):
1640 diff.append('Binary file \'%s\' %s.\n' % (origname, what))
1643 ltmpl = '@@ -0,0 +1,%d @@\n'
1646 ltmpl = '@@ -1,%d +0,0 @@\n'
1647 lines = [tmpl % i for i in open(fname, 'r').readlines()]
1649 diff.append(ltmpl % len(lines))
1650 if not lines[-1].endswith('\n'):
1651 lines.append('\n\\ No newline at end of file\n')
1654 if tmpfile is not None:
1659 if revision is None:
1660 todo = self.todo or [i for i in self.filenamelist if not i in self.to_be_added]+self.to_be_added
1662 if fname in self.to_be_added and self.status(fname) == 'A':
1664 elif fname in self.to_be_deleted:
1665 deleted.append(fname)
1666 elif fname in self.filenamelist:
1667 kept.append(self.findfilebyname(fname))
1668 elif fname in self.to_be_added and self.status(fname) == '!':
1669 raise oscerr.OscIOError(None, 'file \'%s\' is marked as \'A\' but does not exist\n'\
1670 '(either add the missing file or revert it)' % fname)
1671 elif not ignoreUnversioned:
1672 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % fname)
1674 fm = self.get_files_meta(revision=revision)
1675 root = ET.fromstring(fm)
1676 rfiles = self.__get_files(root)
1677 # swap added and deleted
1678 kept, deleted, added, services = self.__get_rev_changes(rfiles)
1679 added = [f.name for f in added]
1680 added.extend([f for f in self.to_be_added if not f in kept])
1681 deleted = [f.name for f in deleted]
1682 deleted.extend(self.to_be_deleted)
1687 # print kept, added, deleted
1689 state = self.status(f.name)
1690 if state in ('S', '?', '!'):
1692 elif state == ' ' and revision is None:
1694 elif revision and self.findfilebyname(f.name).md5 == f.md5 and state != 'M':
1696 yield [diff_hdr % f.name]
1697 if revision is None:
1698 yield get_source_file_diff(self.absdir, f.name, self.rev)
1703 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1704 get_source_file(self.apiurl, self.prjname, self.name, f.name, tmpfile, revision)
1705 diff = get_source_file_diff(self.absdir, f.name, revision,
1706 os.path.basename(tmpfile), os.path.dirname(tmpfile), f.name)
1708 if tmpfile is not None:
1714 yield diff_add_delete(f, True, revision)
1716 yield diff_add_delete(f, False, revision)
1718 def merge(self, otherpac):
1719 self.todo += otherpac.todo
1733 '\n '.join(self.filenamelist),
1741 def read_meta_from_spec(self, spec = None):
1746 # scan for spec files
1747 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1748 if len(speclist) == 1:
1749 specfile = speclist[0]
1750 elif len(speclist) > 1:
1751 print 'the following specfiles were found:'
1752 for filename in speclist:
1754 print 'please specify one with --specfile'
1757 print 'no specfile was found - please specify one ' \
1761 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1762 self.summary = data.get('Summary', '')
1763 self.url = data.get('Url', '')
1764 self.descr = data.get('%description', '')
1767 def update_package_meta(self, force=False):
1769 for the updatepacmetafromspec subcommand
1770 argument force supress the confirm question
1773 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1775 root = ET.fromstring(m)
1776 root.find('title').text = self.summary
1777 root.find('description').text = ''.join(self.descr)
1778 url = root.find('url')
1780 url = ET.SubElement(root, 'url')
1783 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1784 mf = metafile(u, ET.tostring(root))
1787 print '*' * 36, 'old', '*' * 36
1789 print '*' * 36, 'new', '*' * 36
1790 print ET.tostring(root)
1792 repl = raw_input('Write? (y/N/e) ')
1803 def mark_frozen(self):
1804 store_write_string(self.absdir, '_frozenlink', '')
1806 print "The link in this package is currently broken. Checking"
1807 print "out the last working version instead; please use 'osc pull'"
1808 print "to repair the link."
1811 def unmark_frozen(self):
1812 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1813 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1815 def latest_rev(self, include_service_files=False):
1816 if self.islinkrepair():
1817 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1, meta=self.meta, include_service_files=include_service_files)
1818 elif self.islink() and self.isexpanded():
1819 if self.isfrozen() or self.ispulled():
1820 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta, include_service_files=include_service_files)
1823 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, meta=self.meta, include_service_files=include_service_files)
1826 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta, include_service_files=include_service_files)
1828 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base", meta=self.meta, include_service_files=include_service_files)
1831 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name, meta=self.meta, include_service_files=include_service_files)
1834 def __get_files(self, fmeta_root):
1836 if fmeta_root.get('rev') is None and len(fmeta_root.findall('entry')) > 0:
1837 raise oscerr.APIError('missing rev attribute in _files:\n%s' % ''.join(ET.tostring(fmeta_root)))
1838 for i in fmeta_root.findall('entry'):
1839 skipped = i.get('skipped') is not None
1840 f.append(File(i.get('name'), i.get('md5'),
1841 int(i.get('size')), int(i.get('mtime')), skipped))
1844 def __get_rev_changes(self, revfiles):
1851 revfilenames.append(f.name)
1852 # treat skipped like deleted files
1854 if f.name.startswith('_service:'):
1859 # treat skipped like added files
1860 # problem: this overwrites existing files during the update
1861 # (because skipped files aren't in self.filenamelist_unvers)
1862 if f.name in self.filenamelist and not f.name in self.skipped:
1866 for f in self.filelist:
1867 if not f.name in revfilenames:
1870 return kept, added, deleted, services
1872 def update(self, rev = None, service_files = False, size_limit = None):
1875 # size_limit is only temporary for this update
1876 old_size_limit = self.size_limit
1877 if not size_limit is None:
1878 self.size_limit = int(size_limit)
1879 if os.path.isfile(os.path.join(self.storedir, '_in_update', '_files')):
1880 print 'resuming broken update...'
1881 root = ET.parse(os.path.join(self.storedir, '_in_update', '_files')).getroot()
1882 rfiles = self.__get_files(root)
1883 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1884 # check if we aborted in the middle of a file update
1885 broken_file = os.listdir(os.path.join(self.storedir, '_in_update'))
1886 broken_file.remove('_files')
1887 if len(broken_file) == 1:
1888 origfile = os.path.join(self.storedir, '_in_update', broken_file[0])
1889 wcfile = os.path.join(self.absdir, broken_file[0])
1890 origfile_md5 = dgst(origfile)
1891 origfile_meta = self.findfilebyname(broken_file[0])
1892 if origfile.endswith('.copy'):
1893 # ok it seems we aborted at some point during the copy process
1894 # (copy process == copy wcfile to the _in_update dir). remove file+continue
1896 elif self.findfilebyname(broken_file[0]) is None:
1897 # should we remove this file from _in_update? if we don't
1898 # the user has no chance to continue without removing the file manually
1899 raise oscerr.PackageInternalError(self.prjname, self.name,
1900 '\'%s\' is not known by meta but exists in \'_in_update\' dir')
1901 elif os.path.isfile(wcfile) and dgst(wcfile) != origfile_md5:
1902 (fd, tmpfile) = tempfile.mkstemp(dir=self.absdir, prefix=broken_file[0]+'.')
1904 os.rename(wcfile, tmpfile)
1905 os.rename(origfile, wcfile)
1906 print 'warning: it seems you modified \'%s\' after the broken ' \
1907 'update. Restored original file and saved modified version ' \
1908 'to \'%s\'.' % (wcfile, tmpfile)
1909 elif not os.path.isfile(wcfile):
1910 # this is strange... because it existed before the update. restore it
1911 os.rename(origfile, wcfile)
1913 # everything seems to be ok
1915 elif len(broken_file) > 1:
1916 raise oscerr.PackageInternalError(self.prjname, self.name, 'too many files in \'_in_update\' dir')
1919 if os.path.exists(os.path.join(self.storedir, f.name)):
1920 if dgst(os.path.join(self.storedir, f.name)) == f.md5:
1928 if not service_files:
1930 self.__update(kept, added, deleted, services, ET.tostring(root), root.get('rev'))
1931 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1932 os.rmdir(os.path.join(self.storedir, '_in_update'))
1933 # ok everything is ok (hopefully)...
1934 fm = self.get_files_meta(revision=rev)
1935 root = ET.fromstring(fm)
1936 rfiles = self.__get_files(root)
1937 store_write_string(self.absdir, '_files', fm + '\n', subdir='_in_update')
1938 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1939 if not service_files:
1941 self.__update(kept, added, deleted, services, fm, root.get('rev'))
1942 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1943 if os.path.isdir(os.path.join(self.storedir, '_in_update')):
1944 os.rmdir(os.path.join(self.storedir, '_in_update'))
1945 self.size_limit = old_size_limit
1947 def __update(self, kept, added, deleted, services, fm, rev):
1948 pathn = getTransActPath(self.dir)
1949 # check for conflicts with existing files
1951 if f.name in self.filenamelist_unvers:
1952 raise oscerr.PackageFileConflict(self.prjname, self.name, f.name,
1953 'failed to add file \'%s\' file/dir with the same name already exists' % f.name)
1954 # ok, the update can't fail due to existing files
1956 self.updatefile(f.name, rev, f.mtime)
1957 print statfrmt('A', os.path.join(pathn, f.name))
1959 # if the storefile doesn't exist we're resuming an aborted update:
1960 # the file was already deleted but we cannot know this
1961 # OR we're processing a _service: file (simply keep the file)
1962 if os.path.isfile(os.path.join(self.storedir, f.name)) and self.status(f.name) != 'M':
1963 # if self.status(f.name) != 'M':
1964 self.delete_localfile(f.name)
1965 self.delete_storefile(f.name)
1966 print statfrmt('D', os.path.join(pathn, f.name))
1967 if f.name in self.to_be_deleted:
1968 self.to_be_deleted.remove(f.name)
1969 self.write_deletelist()
1972 state = self.status(f.name)
1973 # print f.name, state
1974 if state == 'M' and self.findfilebyname(f.name).md5 == f.md5:
1975 # remote file didn't change
1978 # try to merge changes
1979 merge_status = self.mergefile(f.name, rev, f.mtime)
1980 print statfrmt(merge_status, os.path.join(pathn, f.name))
1982 self.updatefile(f.name, rev, f.mtime)
1983 print 'Restored \'%s\'' % os.path.join(pathn, f.name)
1985 get_source_file(self.apiurl, self.prjname, self.name, f.name,
1986 targetfilename=os.path.join(self.storedir, f.name), revision=rev,
1987 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
1988 print 'skipping \'%s\' (this is due to conflicts)' % f.name
1989 elif state == 'D' and self.findfilebyname(f.name).md5 != f.md5:
1990 # XXX: in the worst case we might end up with f.name being
1991 # in _to_be_deleted and in _in_conflict... this needs to be checked
1992 if os.path.exists(os.path.join(self.absdir, f.name)):
1993 merge_status = self.mergefile(f.name, rev, f.mtime)
1994 print statfrmt(merge_status, os.path.join(pathn, f.name))
1995 if merge_status == 'C':
1996 # state changes from delete to conflict
1997 self.to_be_deleted.remove(f.name)
1998 self.write_deletelist()
2000 # XXX: we cannot recover this case because we've no file
2002 self.updatefile(f.name, rev, f.mtime)
2003 print statfrmt('U', os.path.join(pathn, f.name))
2004 elif state == ' ' and self.findfilebyname(f.name).md5 != f.md5:
2005 self.updatefile(f.name, rev, f.mtime)
2006 print statfrmt('U', os.path.join(pathn, f.name))
2008 # checkout service files
2010 get_source_file(self.apiurl, self.prjname, self.name, f.name,
2011 targetfilename=os.path.join(self.absdir, f.name), revision=rev,
2012 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
2013 print statfrmt('A', os.path.join(pathn, f.name))
2014 store_write_string(self.absdir, '_files', fm + '\n')
2016 self.update_local_pacmeta()
2017 self.update_datastructs()
2019 print 'At revision %s.' % self.rev
2021 def run_source_services(self, mode=None, singleservice=None, verbose=None):
2022 if self.name.startswith("_"):
2024 curdir = os.getcwd()
2025 os.chdir(self.absdir) # e.g. /usr/lib/obs/service/verify_file fails if not inside the project dir.
2027 if os.path.exists('_service'):
2028 if self.filenamelist.count('_service') or self.filenamelist_unvers.count('_service'):
2029 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
2031 si.getProjectGlobalServices(self.apiurl, self.prjname, self.name)
2032 r = si.execute(self.absdir, mode, singleservice, verbose)
2036 def revert(self, filename):
2037 if not filename in self.filenamelist and not filename in self.to_be_added:
2038 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % filename)
2039 elif filename in self.skipped:
2040 raise oscerr.OscIOError(None, 'file \'%s\' is marked as skipped and cannot be reverted' % filename)
2041 if filename in self.filenamelist and not os.path.exists(os.path.join(self.storedir, filename)):
2042 raise oscerr.PackageInternalError('file \'%s\' is listed in filenamelist but no storefile exists' % filename)
2043 state = self.status(filename)
2044 if not (state == 'A' or state == '!' and filename in self.to_be_added):
2045 shutil.copyfile(os.path.join(self.storedir, filename), os.path.join(self.absdir, filename))
2047 self.to_be_deleted.remove(filename)
2048 self.write_deletelist()
2050 self.clear_from_conflictlist(filename)
2051 elif state in ('A', 'R') or state == '!' and filename in self.to_be_added:
2052 self.to_be_added.remove(filename)
2053 self.write_addlist()
2056 def init_package(apiurl, project, package, dir, size_limit=None, meta=False, progress_obj=None):
2059 if not os.path.exists(dir):
2061 elif not os.path.isdir(dir):
2062 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
2063 if os.path.exists(os.path.join(dir, store)):
2064 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
2066 os.mkdir(os.path.join(dir, store))
2067 store_write_project(dir, project)
2068 store_write_string(dir, '_package', package + '\n')
2069 store_write_apiurl(dir, apiurl)
2071 store_write_string(dir, '_meta_mode', '')
2073 store_write_string(dir, '_size_limit', str(size_limit) + '\n')
2074 store_write_string(dir, '_files', '<directory />' + '\n')
2075 store_write_string(dir, '_osclib_version', __store_version__ + '\n')
2076 return Package(dir, progress_obj=progress_obj, size_limit=size_limit)
2079 class AbstractState:
2081 Base class which represents state-like objects (<review />, <state />).
2083 def __init__(self, tag):
2086 def get_node_attrs(self):
2087 """return attributes for the tag/element"""
2088 raise NotImplementedError()
2090 def get_node_name(self):
2091 """return tag/element name"""
2094 def get_comment(self):
2095 """return data from <comment /> tag"""
2096 raise NotImplementedError()
2099 """serialize object to XML"""
2100 root = ET.Element(self.get_node_name())
2101 for attr in self.get_node_attrs():
2102 val = getattr(self, attr)
2105 if self.get_comment():
2106 ET.SubElement(root, 'comment').text = self.get_comment()
2110 """return "pretty" XML data"""
2111 root = self.to_xml()
2113 return ET.tostring(root)
2116 class ReviewState(AbstractState):
2117 """Represents the review state in a request"""
2118 def __init__(self, review_node):
2119 if not review_node.get('state'):
2120 raise oscerr.APIError('invalid review node (state attr expected): %s' % \
2121 ET.tostring(review_node))
2122 AbstractState.__init__(self, review_node.tag)
2123 self.state = review_node.get('state')
2124 self.by_user = review_node.get('by_user')
2125 self.by_group = review_node.get('by_group')
2126 self.by_project = review_node.get('by_project')
2127 self.by_package = review_node.get('by_package')
2128 self.who = review_node.get('who')
2129 self.when = review_node.get('when')
2131 if not review_node.find('comment') is None and \
2132 review_node.find('comment').text:
2133 self.comment = review_node.find('comment').text.strip()
2135 def get_node_attrs(self):
2136 return ('state', 'by_user', 'by_group', 'by_project', 'by_package', 'who', 'when')
2138 def get_comment(self):
2142 class RequestState(AbstractState):
2143 """Represents the state of a request"""
2144 def __init__(self, state_node):
2145 if not state_node.get('name'):
2146 raise oscerr.APIError('invalid request state node (name attr expected): %s' % \
2147 ET.tostring(state_node))
2148 AbstractState.__init__(self, state_node.tag)
2149 self.name = state_node.get('name')
2150 self.who = state_node.get('who')
2151 self.when = state_node.get('when')
2153 if not state_node.find('comment') is None and \
2154 state_node.find('comment').text:
2155 self.comment = state_node.find('comment').text.strip()
2157 def get_node_attrs(self):
2158 return ('name', 'who', 'when')
2160 def get_comment(self):
2166 Represents a <action /> element of a Request.
2167 This class is quite common so that it can be used for all different
2168 action types. Note: instances only provide attributes for their specific
2171 r = Action('set_bugowner', tgt_project='foo', person_name='buguser')
2172 # available attributes: r.type (== 'set_bugowner'), r.tgt_project (== 'foo'), r.tgt_package (== None)
2174 <action type="set_bugowner">
2175 <target project="foo" />
2176 <person name="buguser" />
2179 r = Action('delete', tgt_project='foo', tgt_package='bar')
2180 # available attributes: r.type (== 'delete'), r.tgt_project (== 'foo'), r.tgt_package (=='bar')
2182 <action type="delete">
2183 <target package="bar" project="foo" />
2187 # allowed types + the corresponding (allowed) attributes
2188 type_args = {'submit': ('src_project', 'src_package', 'src_rev', 'tgt_project', 'tgt_package', 'opt_sourceupdate',
2189 'acceptinfo_rev', 'acceptinfo_srcmd5', 'acceptinfo_xsrcmd5', 'acceptinfo_osrcmd5',
2190 'acceptinfo_oxsrcmd5', 'opt_updatelink'),
2191 'add_role': ('tgt_project', 'tgt_package', 'person_name', 'person_role', 'group_name', 'group_role'),
2192 'set_bugowner': ('tgt_project', 'tgt_package', 'person_name'), # obsoleted by add_role
2193 'maintenance_release': ('src_project', 'src_package', 'src_rev', 'tgt_project', 'tgt_package', 'person_name'),
2194 'maintenance_incident': ('src_project', 'tgt_project', 'person_name'),
2195 'delete': ('tgt_project', 'tgt_package'),
2196 'change_devel': ('src_project', 'src_package', 'tgt_project', 'tgt_package')}
2197 # attribute prefix to element name map (only needed for abbreviated attributes)
2198 prefix_to_elm = {'src': 'source', 'tgt': 'target', 'opt': 'options'}
2200 def __init__(self, type, **kwargs):
2201 if not type in Action.type_args.keys():
2202 raise oscerr.WrongArgs('invalid action type: \'%s\'' % type)
2204 for i in kwargs.keys():
2205 if not i in Action.type_args[type]:
2206 raise oscerr.WrongArgs('invalid argument: \'%s\'' % i)
2207 # set all type specific attributes
2208 for i in Action.type_args[type]:
2209 if kwargs.has_key(i):
2210 setattr(self, i, kwargs[i])
2212 setattr(self, i, None)
2216 Serialize object to XML.
2217 The xml tag names and attributes are constructed from the instance's attributes.
2219 self.group_name -> tag name is "group", attribute name is "name"
2220 self.src_project -> tag name is "source" (translated via prefix_to_elm dict),
2221 attribute name is "project"
2222 Attributes prefixed with "opt_" need a special handling, the resulting xml should
2223 look like this: opt_updatelink -> <options><updatelink>value</updatelink></options>.
2224 Attributes which are "None" will be skipped.
2226 root = ET.Element('action', type=self.type)
2227 for i in Action.type_args[self.type]:
2228 prefix, attr = i.split('_', 1)
2229 val = getattr(self, i)
2232 elm = root.find(Action.prefix_to_elm.get(prefix, prefix))
2234 elm = ET.Element(Action.prefix_to_elm.get(prefix, prefix))
2237 ET.SubElement(elm, attr).text = val
2243 """return "pretty" XML data"""
2244 root = self.to_xml()
2246 return ET.tostring(root)
2249 def from_xml(action_node):
2250 """create action from XML"""
2251 if action_node is None or \
2252 not action_node.get('type') in Action.type_args.keys() or \
2253 not action_node.tag in ('action', 'submit'):
2254 raise oscerr.WrongArgs('invalid argument')
2255 elm_to_prefix = dict([(i[1], i[0]) for i in Action.prefix_to_elm.items()])
2257 for node in action_node:
2258 prefix = elm_to_prefix.get(node.tag, node.tag)
2260 data = [('opt_%s' % opt.tag, opt.text.strip()) for opt in node if opt.text]
2262 data = [('%s_%s' % (prefix, k), v) for k, v in node.items()]
2263 kwargs.update(dict(data))
2264 return Action(action_node.get('type'), **kwargs)
2268 """Represents a request (<request />)"""
2271 self._init_attributes()
2273 def _init_attributes(self):
2274 """initialize attributes with default values"""
2277 self.description = ''
2280 self.statehistory = []
2283 def read(self, root):
2284 """read in a request"""
2285 self._init_attributes()
2286 if not root.get('id'):
2287 raise oscerr.APIError('invalid request: %s\n' % ET.tostring(root))
2288 self.reqid = root.get('id')
2289 if root.find('state') is None:
2290 raise oscerr.APIError('invalid request (state expected): %s\n' % ET.tostring(root))
2291 self.state = RequestState(root.find('state'))
2292 action_nodes = root.findall('action')
2293 if not action_nodes:
2294 # check for old-style requests
2295 for i in root.findall('submit'):
2296 i.set('type', 'submit')
2297 action_nodes.append(i)
2298 for action in action_nodes:
2299 self.actions.append(Action.from_xml(action))
2300 for review in root.findall('review'):
2301 self.reviews.append(ReviewState(review))
2302 for hist_state in root.findall('history'):
2303 self.statehistory.append(RequestState(hist_state))
2304 if not root.find('title') is None:
2305 self.title = root.find('title').text.strip()
2306 if not root.find('description') is None and root.find('description').text:
2307 self.description = root.find('description').text.strip()
2309 def add_action(self, type, **kwargs):
2310 """add a new action to the request"""
2311 self.actions.append(Action(type, **kwargs))
2313 def get_actions(self, *types):
2315 get all actions with a specific type
2316 (if types is empty return all actions)
2320 return [i for i in self.actions if i.type in types]
2322 def get_creator(self):
2323 """return the creator of the request"""
2324 if len(self.statehistory):
2325 return self.statehistory[0].who
2326 return self.state.who
2329 """serialize object to XML"""
2330 root = ET.Element('request')
2331 if not self.reqid is None:
2332 root.set('id', self.reqid)
2333 for action in self.actions:
2334 root.append(action.to_xml())
2335 if not self.state is None:
2336 root.append(self.state.to_xml())
2337 for review in self.reviews:
2338 root.append(review.to_xml())
2339 for hist in self.statehistory:
2340 root.append(hist.to_xml())
2342 ET.SubElement(root, 'title').text = self.title
2343 if self.description:
2344 ET.SubElement(root, 'description').text = self.description
2348 """return "pretty" XML data"""
2349 root = self.to_xml()
2351 return ET.tostring(root)
2354 def format_review(review, show_srcupdate=False):
2356 format a review depending on the reviewer's type.
2357 A dict which contains the formatted str's is returned.
2360 d = {'state': '%s:' % review.state}
2361 if review.by_package:
2362 d['by'] = '%s/%s' % (review.by_project, review.by_package)
2363 d['type'] = 'Package'
2364 elif review.by_project:
2365 d['by'] = '%s' % review.by_project
2366 d['type'] = 'Project'
2367 elif review.by_group:
2368 d['by'] = '%s' % review.by_group
2371 d['by'] = '%s' % review.by_user
2374 d['by'] += '(%s)' % review.who
2378 def format_action(action, show_srcupdate=False):
2380 format an action depending on the action's type.
2381 A dict which contains the formatted str's is returned.
2383 def prj_pkg_join(prj, pkg):
2386 return '%s/%s' % (prj, pkg)
2388 d = {'type': '%s:' % action.type}
2389 if action.type == 'set_bugowner':
2390 d['source'] = action.person_name
2391 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2392 elif action.type == 'change_devel':
2393 d['source'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2394 d['target'] = 'developed in %s' % prj_pkg_join(action.src_project, action.src_package)
2395 elif action.type == 'maintenance_incident':
2396 d['source'] = '%s ->' % action.src_project
2397 d['target'] = action.tgt_project
2398 elif action.type == 'maintenance_release':
2399 d['source'] = '%s ->' % prj_pkg_join(action.src_project, action.src_package)
2400 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2401 elif action.type == 'submit':
2403 if action.opt_sourceupdate and show_srcupdate:
2404 srcupdate = '(%s)' % action.opt_sourceupdate
2405 d['source'] = '%s%s ->' % (prj_pkg_join(action.src_project, action.src_package), srcupdate)
2406 tgt_package = action.tgt_package
2407 if action.src_package == action.tgt_package:
2409 d['target'] = prj_pkg_join(action.tgt_project, tgt_package)
2410 elif action.type == 'add_role':
2412 if action.person_name and action.person_role:
2413 roles.append('person: %s as %s' % (action.person_name, action.person_role))
2414 if action.group_name and action.group_role:
2415 roles.append('group: %s as %s' % (action.group_name, action.group_role))
2416 d['source'] = ', '.join(roles)
2417 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2418 elif action.type == 'delete':
2420 d['target'] = prj_pkg_join(action.tgt_project, action.tgt_package)
2423 def list_view(self):
2424 """return "list view" format"""
2426 lines = ['%6s State:%-10s By:%-12s When:%-19s' % (self.reqid, self.state.name, self.state.who, self.state.when)]
2427 tmpl = ' %(type)-16s %(source)-50s %(target)s'
2428 for action in self.actions:
2429 lines.append(tmpl % Request.format_action(action))
2430 tmpl = ' Review by %(type)-10s is %(state)-10s %(by)-50s'
2431 for review in self.reviews:
2432 lines.append(tmpl % Request.format_review(review))
2433 history = ['%s(%s)' % (hist.name, hist.who) for hist in self.statehistory]
2435 lines.append(' From: %s' % ' -> '.join(history))
2436 if self.description:
2437 lines.append(textwrap.fill(self.description, width=80, initial_indent=' Descr: ',
2438 subsequent_indent=' '))
2439 lines.append(textwrap.fill(self.state.comment, width=80, initial_indent=' Comment: ',
2440 subsequent_indent=' '))
2441 return '\n'.join(lines)
2444 """return "detailed" format"""
2445 lines = ['Request: #%s\n' % self.reqid]
2446 for action in self.actions:
2447 tmpl = ' %(type)-13s %(source)s %(target)s'
2448 if action.type == 'delete':
2449 # remove 1 whitespace because source is empty
2450 tmpl = ' %(type)-12s %(source)s %(target)s'
2451 lines.append(tmpl % Request.format_action(action, show_srcupdate=True))
2452 lines.append('\n\nMessage:')
2453 if self.description:
2454 lines.append(self.description)
2456 lines.append('<no message>')
2458 lines.append('\nState: %-10s %-12s %s' % (self.state.name, self.state.when, self.state.who))
2459 lines.append('Comment: %s' % (self.state.comment or '<no comment>'))
2462 tmpl = '%(state)-10s %(by)-50s %(when)-12s %(who)-20s %(comment)s'
2464 for review in reversed(self.reviews):
2465 d = {'state': review.state}
2467 d['by'] = "User: " + review.by_user
2469 d['by'] = "Group: " + review.by_group
2470 if review.by_package:
2471 d['by'] = "Package: " + review.by_project + "/" + review.by_package
2472 elif review.by_project:
2473 d['by'] = "Project: " + review.by_project
2474 d['when'] = review.when or ''
2475 d['who'] = review.who or ''
2476 d['comment'] = review.comment or ''
2477 reviews.append(tmpl % d)
2479 lines.append('\nReview: %s' % indent.join(reviews))
2481 tmpl = '%(name)-10s %(when)-12s %(who)s'
2483 for hist in reversed(self.statehistory):
2484 d = {'name': hist.name, 'when': hist.when,
2486 histories.append(tmpl % d)
2488 lines.append('\nHistory: %s' % indent.join(histories))
2490 return '\n'.join(lines)
2492 def __cmp__(self, other):
2493 return cmp(int(self.reqid), int(other.reqid))
2495 def create(self, apiurl):
2496 """create a new request"""
2497 u = makeurl(apiurl, ['request'], query='cmd=create')
2498 f = http_POST(u, data=self.to_str())
2499 root = ET.fromstring(f.read())
2503 """format time as Apr 02 18:19
2505 depending on whether it is in the current year
2509 if time.localtime()[0] == time.localtime(t)[0]:
2511 return time.strftime('%b %d %H:%M',time.localtime(t))
2513 return time.strftime('%b %d %Y',time.localtime(t))
2516 def is_project_dir(d):
2519 return os.path.exists(os.path.join(d, store, '_project')) and not \
2520 os.path.exists(os.path.join(d, store, '_package'))
2523 def is_package_dir(d):
2526 return os.path.exists(os.path.join(d, store, '_project')) and \
2527 os.path.exists(os.path.join(d, store, '_package'))
2529 def parse_disturl(disturl):
2530 """Parse a disturl, returns tuple (apiurl, project, source, repository,
2531 revision), else raises an oscerr.WrongArgs exception
2536 m = DISTURL_RE.match(disturl)
2538 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
2540 apiurl = m.group('apiurl')
2541 if apiurl.split('.')[0] != 'api':
2542 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
2543 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
2545 def parse_buildlogurl(buildlogurl):
2546 """Parse a build log url, returns a tuple (apiurl, project, package,
2547 repository, arch), else raises oscerr.WrongArgs exception"""
2549 global BUILDLOGURL_RE
2551 m = BUILDLOGURL_RE.match(buildlogurl)
2553 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
2555 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
2558 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
2559 This is handy to allow copy/paste a project/package combination in this form.
2561 Trailing slashes are removed before the split, because the split would
2562 otherwise give an additional empty string.
2570 def expand_proj_pack(args, idx=0, howmany=0):
2571 """looks for occurance of '.' at the position idx.
2572 If howmany is 2, both proj and pack are expanded together
2573 using the current directory, or none of them, if not possible.
2574 If howmany is 0, proj is expanded if possible, then, if there
2575 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
2576 expanded, if possible.
2577 If howmany is 1, only proj is expanded if possible.
2579 If args[idx] does not exists, an implicit '.' is assumed.
2580 if not enough elements up to idx exist, an error is raised.
2582 See also parseargs(args), slash_split(args), findpacs(args)
2583 All these need unification, somehow.
2586 # print args,idx,howmany
2589 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
2591 if len(args) == idx:
2593 if args[idx+0] == '.':
2594 if howmany == 0 and len(args) > idx+1:
2595 if args[idx+1] == '.':
2597 # remove one dot and make sure to expand both proj and pack
2602 # print args,idx,howmany
2604 args[idx+0] = store_read_project('.')
2607 package = store_read_package('.')
2608 args.insert(idx+1, package)
2612 package = store_read_package('.')
2613 args.insert(idx+1, package)
2617 def findpacs(files, progress_obj=None):
2618 """collect Package objects belonging to the given files
2619 and make sure each Package is returned only once"""
2622 p = filedir_to_pac(f, progress_obj)
2625 if i.name == p.name:
2635 def filedir_to_pac(f, progress_obj=None):
2636 """Takes a working copy path, or a path to a file inside a working copy,
2637 and returns a Package object instance
2639 If the argument was a filename, add it onto the "todo" list of the Package """
2641 if os.path.isdir(f):
2643 p = Package(wd, progress_obj=progress_obj)
2645 wd = os.path.dirname(f) or os.curdir
2646 p = Package(wd, progress_obj=progress_obj)
2647 p.todo = [ os.path.basename(f) ]
2651 def read_filemeta(dir):
2654 msg = '\'%s\' is not a valid working copy.' % dir
2655 filesmeta = os.path.join(dir, store, '_files')
2656 if not is_package_dir(dir):
2657 raise oscerr.NoWorkingCopy(msg)
2658 if not os.path.isfile(filesmeta):
2659 raise oscerr.NoWorkingCopy('%s (%s does not exist)' % (msg, filesmeta))
2662 r = ET.parse(filesmeta)
2663 except SyntaxError, e:
2664 raise oscerr.NoWorkingCopy('%s\nWhen parsing .osc/_files, the following error was encountered:\n%s' % (msg, e))
2667 def store_readlist(dir, name):
2671 if os.path.exists(os.path.join(dir, store, name)):
2672 r = [line.strip() for line in open(os.path.join(dir, store, name), 'r')]
2675 def read_tobeadded(dir):
2676 return store_readlist(dir, '_to_be_added')
2678 def read_tobedeleted(dir):
2679 return store_readlist(dir, '_to_be_deleted')
2681 def read_sizelimit(dir):
2685 fname = os.path.join(dir, store, '_size_limit')
2687 if os.path.exists(fname):
2688 r = open(fname).readline().strip()
2690 if r is None or not r.isdigit():
2694 def read_inconflict(dir):
2695 return store_readlist(dir, '_in_conflict')
2697 def parseargs(list_of_args):
2698 """Convenience method osc's commandline argument parsing.
2700 If called with an empty tuple (or list), return a list containing the current directory.
2701 Otherwise, return a list of the arguments."""
2703 return list(list_of_args)
2708 def statfrmt(statusletter, filename):
2709 return '%s %s' % (statusletter, filename)
2712 def pathjoin(a, *p):
2713 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2714 path = os.path.join(a, *p)
2715 if path.startswith('./'):
2720 def makeurl(baseurl, l, query=[]):
2721 """Given a list of path compoments, construct a complete URL.
2723 Optional parameters for a query string can be given as a list, as a
2724 dictionary, or as an already assembled string.
2725 In case of a dictionary, the parameters will be urlencoded by this
2726 function. In case of a list not -- this is to be backwards compatible.
2729 if conf.config['verbose'] > 1:
2730 print 'makeurl:', baseurl, l, query
2732 if type(query) == type(list()):
2733 query = '&'.join(query)
2734 elif type(query) == type(dict()):
2735 query = urlencode(query)
2737 scheme, netloc = urlsplit(baseurl)[0:2]
2738 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2741 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2742 """wrapper around urllib2.urlopen for error handling,
2743 and to support additional (PUT, DELETE) methods"""
2747 if conf.config['http_debug']:
2748 print >>sys.stderr, '\n\n--', method, url
2750 if method == 'POST' and not file and not data:
2751 # adding data to an urllib2 request transforms it into a POST
2754 req = urllib2.Request(url)
2755 api_host_options = {}
2756 if conf.is_known_apiurl(url):
2757 # ok no external request
2758 urllib2.install_opener(conf._build_opener(url))
2759 api_host_options = conf.get_apiurl_api_host_options(url)
2760 for header, value in api_host_options['http_headers']:
2761 req.add_header(header, value)
2763 req.get_method = lambda: method
2765 # POST requests are application/x-www-form-urlencoded per default
2766 # since we change the request into PUT, we also need to adjust the content type header
2767 if method == 'PUT' or (method == 'POST' and data):
2768 req.add_header('Content-Type', 'application/octet-stream')
2770 if type(headers) == type({}):
2771 for i in headers.keys():
2773 req.add_header(i, headers[i])
2775 if file and not data:
2776 size = os.path.getsize(file)
2778 data = open(file, 'rb').read()
2781 filefd = open(file, 'rb')
2783 if sys.platform[:3] != 'win':
2784 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2786 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2788 except EnvironmentError, e:
2790 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2791 '\non a filesystem which does not support this.' % (e, file))
2792 elif hasattr(e, 'winerror') and e.winerror == 5:
2793 # falling back to the default io
2794 data = open(file, 'rb').read()
2798 if conf.config['debug']: print >>sys.stderr, method, url
2800 old_timeout = socket.getdefaulttimeout()
2801 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2802 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2803 socket.setdefaulttimeout(timeout)
2805 fd = urllib2.urlopen(req, data=data)
2807 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2808 socket.setdefaulttimeout(old_timeout)
2809 if hasattr(conf.cookiejar, 'save'):
2810 conf.cookiejar.save(ignore_discard=True)
2812 if filefd: filefd.close()
2817 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2818 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2819 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2820 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2823 def check_store_version(dir):
2826 versionfile = os.path.join(dir, store, '_osclib_version')
2828 v = open(versionfile).read().strip()
2833 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2834 if os.path.exists(os.path.join(dir, '.svn')):
2835 msg = msg + '\nTry svn instead of osc.'
2836 raise oscerr.NoWorkingCopy(msg)
2838 if v != __store_version__:
2839 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2840 # version is fine, no migration needed
2841 f = open(versionfile, 'w')
2842 f.write(__store_version__ + '\n')
2845 msg = 'The osc metadata of your working copy "%s"' % dir
2846 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2847 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2848 raise oscerr.WorkingCopyWrongVersion, msg
2851 def meta_get_packagelist(apiurl, prj, deleted=None):
2855 query['deleted'] = 1
2857 u = makeurl(apiurl, ['source', prj], query)
2859 root = ET.parse(f).getroot()
2860 return [ node.get('name') for node in root.findall('entry') ]
2863 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None, meta=False):
2864 """return a list of file names,
2865 or a list File() instances if verbose=True"""
2873 query['rev'] = revision
2875 query['rev'] = 'latest'
2877 u = makeurl(apiurl, ['source', prj, package], query=query)
2879 root = ET.parse(f).getroot()
2882 return [ node.get('name') for node in root.findall('entry') ]
2886 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2887 rev = root.get('rev')
2888 for node in root.findall('entry'):
2889 f = File(node.get('name'),
2891 int(node.get('size')),
2892 int(node.get('mtime')))
2898 def meta_get_project_list(apiurl, deleted=None):
2901 query['deleted'] = 1
2903 u = makeurl(apiurl, ['source'], query)
2905 root = ET.parse(f).getroot()
2906 return sorted([ node.get('name') for node in root if node.get('name')])
2909 def show_project_meta(apiurl, prj):
2910 url = makeurl(apiurl, ['source', prj, '_meta'])
2912 return f.readlines()
2915 def show_project_conf(apiurl, prj):
2916 url = makeurl(apiurl, ['source', prj, '_config'])
2918 return f.readlines()
2921 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2922 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2926 except urllib2.HTTPError, e:
2927 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2931 def show_package_meta(apiurl, prj, pac, meta=False):
2936 # packages like _pattern and _project do not have a _meta file
2937 if pac.startswith('_pattern') or pac.startswith('_project'):
2940 url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
2943 return f.readlines()
2944 except urllib2.HTTPError, e:
2945 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2949 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2951 path.append('source')
2957 path.append('_attribute')
2959 path.append(attribute)
2962 query.append("with_default=1")
2964 query.append("with_project=1")
2965 url = makeurl(apiurl, path, query)
2968 return f.readlines()
2969 except urllib2.HTTPError, e:
2970 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2974 def show_develproject(apiurl, prj, pac, xml_node=False):
2975 m = show_package_meta(apiurl, prj, pac)
2976 node = ET.fromstring(''.join(m)).find('devel')
2977 if not node is None:
2980 return node.get('project')
2984 def show_package_disabled_repos(apiurl, prj, pac):
2985 m = show_package_meta(apiurl, prj, pac)
2986 #FIXME: don't work if all repos of a project are disabled and only some are enabled since <disable/> is empty
2988 root = ET.fromstring(''.join(m))
2989 elm = root.find('build')
2990 r = [ node.get('repository') for node in elm.findall('disable')]
2996 def show_pattern_metalist(apiurl, prj):
2997 url = makeurl(apiurl, ['source', prj, '_pattern'])
3001 except urllib2.HTTPError, e:
3002 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
3004 r = [ node.get('name') for node in tree.getroot() ]
3009 def show_pattern_meta(apiurl, prj, pattern):
3010 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
3013 return f.readlines()
3014 except urllib2.HTTPError, e:
3015 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
3020 """metafile that can be manipulated and is stored back after manipulation."""
3021 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
3025 self.change_is_required = change_is_required
3026 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
3027 f = os.fdopen(fd, 'w')
3028 f.write(''.join(input))
3030 self.hash_orig = dgst(self.filename)
3033 if self.change_is_required and self.hash_orig == dgst(self.filename):
3034 print 'File unchanged. Not saving.'
3035 os.unlink(self.filename)
3038 print 'Sending meta data...'
3039 # don't do any exception handling... it's up to the caller what to do in case
3041 http_PUT(self.url, file=self.filename)
3042 os.unlink(self.filename)
3048 run_editor(self.filename)
3052 except urllib2.HTTPError, e:
3053 error_help = "%d" % e.code
3054 if e.headers.get('X-Opensuse-Errorcode'):
3055 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
3057 print >>sys.stderr, 'BuildService API error:', error_help
3058 # examine the error - we can't raise an exception because we might want
3061 if '<summary>' in data:
3062 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
3063 ri = raw_input('Try again? ([y/N]): ')
3064 if ri not in ['y', 'Y']:
3070 if os.path.exists(self.filename):
3071 print 'discarding %s' % self.filename
3072 os.unlink(self.filename)
3075 # different types of metadata
3076 metatypes = { 'prj': { 'path': 'source/%s/_meta',
3077 'template': new_project_templ,
3080 'pkg': { 'path' : 'source/%s/%s/_meta',
3081 'template': new_package_templ,
3084 'attribute': { 'path' : 'source/%s/%s/_meta',
3085 'template': new_attribute_templ,
3088 'prjconf': { 'path': 'source/%s/_config',
3092 'user': { 'path': 'person/%s',
3093 'template': new_user_template,
3096 'pattern': { 'path': 'source/%s/_pattern/%s',
3097 'template': new_pattern_template,
3102 def meta_exists(metatype,
3111 apiurl = conf.config['apiurl']
3112 url = make_meta_url(metatype, path_args, apiurl)
3114 data = http_GET(url).readlines()
3115 except urllib2.HTTPError, e:
3116 if e.code == 404 and create_new:
3117 data = metatypes[metatype]['template']
3119 data = StringIO(data % template_args).readlines()
3125 def make_meta_url(metatype, path_args=None, apiurl=None, force=False):
3129 apiurl = conf.config['apiurl']
3130 if metatype not in metatypes.keys():
3131 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
3132 path = metatypes[metatype]['path']
3135 path = path % path_args
3139 query = { 'force': '1' }
3141 return makeurl(apiurl, [path], query)
3144 def edit_meta(metatype,
3150 change_is_required=False,
3156 apiurl = conf.config['apiurl']
3158 data = meta_exists(metatype,
3161 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
3165 change_is_required = True
3167 url = make_meta_url(metatype, path_args, apiurl, force)
3168 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
3176 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, meta=False):
3179 query['rev'] = revision
3181 query['rev'] = 'latest'
3183 query['linkrev'] = linkrev
3184 elif conf.config['linkcontrol']:
3185 query['linkrev'] = 'base'
3191 query['emptylink'] = 1
3192 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
3195 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False, include_service_files=False):
3196 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta)
3197 et = ET.fromstring(''.join(m))
3198 if include_service_files:
3200 if et.find('serviceinfo') and et.find('serviceinfo').get('xsrcmd5'):
3201 return et.find('serviceinfo').get('xsrcmd5')
3204 return et.get('srcmd5')
3207 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False, include_service_files=False):
3208 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta, expand=include_service_files)
3209 et = ET.fromstring(''.join(m))
3210 if include_service_files:
3211 return et.get('srcmd5')
3213 # only source link packages have a <linkinfo> element.
3214 li_node = et.find('linkinfo')
3222 raise oscerr.LinkExpandError(prj, pac, li.error)
3226 def show_upstream_rev(apiurl, prj, pac, revision=None, expand=False, linkrev=None, meta=False, include_service_files=False):
3227 m = show_files_meta(apiurl, prj, pac, revision=revision, expand=expand, linkrev=linkrev, meta=meta)
3228 et = ET.fromstring(''.join(m))
3229 if include_service_files:
3231 return et.find('serviceinfo').get('xsrcmd5')
3234 return et.get('rev')
3237 def read_meta_from_spec(specfile, *args):
3238 import codecs, locale, re
3240 Read tags and sections from spec file. To read out
3241 a tag the passed argument mustn't end with a colon. To
3242 read out a section the passed argument must start with
3244 This method returns a dictionary which contains the
3248 if not os.path.isfile(specfile):
3249 raise oscerr.OscIOError(None, '\'%s\' is not a regular file' % specfile)
3252 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
3253 except UnicodeDecodeError:
3254 lines = open(specfile).readlines()
3261 if itm.startswith('%'):
3262 sections.append(itm)
3266 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
3268 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
3269 if m and m.group('val'):
3270 spec_data[tag] = m.group('val').strip()
3272 section_pat = '^%s\s*?$'
3273 for section in sections:
3274 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
3276 start = lines.index(m.group()+'\n') + 1
3278 for line in lines[start:]:
3279 if line.startswith('%'):
3282 spec_data[section] = data
3286 def get_default_editor():
3288 system = platform.system()
3289 if system == 'Windows':
3291 if system == 'Linux':
3294 dist = platform.linux_distribution()[0]
3295 except AttributeError:
3296 dist = platform.dist()[0]
3297 if dist == 'debian':
3299 elif dist == 'fedora':
3304 def get_default_pager():
3306 system = platform.system()
3307 if system == 'Windows':
3309 if system == 'Linux':
3312 dist = platform.linux_distribution()[0]
3313 except AttributeError:
3314 dist = platform.dist()[0]
3315 if dist == 'debian':
3320 def run_pager(message, tmp_suffix=''):
3321 import tempfile, sys
3326 if not sys.stdout.isatty():