summaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
authorMichael Stahl <mstahl@redhat.com>2013-11-09 22:05:23 +0100
committerMichael Stahl <mstahl@redhat.com>2013-11-10 00:03:23 +0100
commit1b82dba6f7c11eeb48dae690052ec56ef37f41e4 (patch)
treeea7996025a5863d9e8eaa2855e6e81d71d1794ac /bin
parent5c6e934b839a8b1462ecca4c687e263c85c4224b (diff)
get-bugzilla-attachments-by-mimetype: port to Python 3 syntax
Change-Id: I928eb1baa7390301036585d84895f44eb4c38d20
Diffstat (limited to 'bin')
-rwxr-xr-xbin/get-bugzilla-attachments-by-mimetype60
1 files changed, 30 insertions, 30 deletions
diff --git a/bin/get-bugzilla-attachments-by-mimetype b/bin/get-bugzilla-attachments-by-mimetype
index 5c3de39d28fe..100e78eb1994 100755
--- a/bin/get-bugzilla-attachments-by-mimetype
+++ b/bin/get-bugzilla-attachments-by-mimetype
@@ -34,39 +34,39 @@ def urlopen_retry(url):
try:
return urllib.urlopen(url)
except IOError as e:
- print "caught IOError: ", e
+ print("caught IOError: " + e)
if maxretries == i:
raise
- print "retrying..."
+ print("retrying...")
def get_from_bug_url_via_xml(url, mimetype, prefix, suffix):
id = url.rsplit('=', 2)[1]
- print "id is", prefix, id, suffix
+ print("id is " + prefix + id + " " + suffix)
if os.path.isfile(suffix + '/' + prefix + id + '-1.' + suffix):
- print "assuming", id, "is up to date"
+ print("assuming " + id + " is up to date")
else:
- print "parsing", id
+ print("parsing", id)
sock = urlopen_retry(url+"&ctype=xml")
dom = minidom.parse(sock)
sock.close()
attachmentid=0
for attachment in dom.getElementsByTagName('attachment'):
attachmentid += 1
- print " mimetype is",
+ print(" mimetype is")
for node in attachment.childNodes:
if node.nodeName == 'type':
- print node.firstChild.nodeValue,
+ print(node.firstChild.nodeValue)
if node.firstChild.nodeValue.lower() != mimetype.lower():
- print 'skipping'
+ print('skipping')
break
elif node.nodeName == 'data':
# check if attachment is deleted (i.e. https://bugs.kde.org/show_bug.cgi?id=53343&ctype=xml)
if not node.firstChild:
- print 'deleted attachment, skipping'
+ print('deleted attachment, skipping')
continue
download = suffix + '/' +prefix + id + '-' + str(attachmentid) + '.' + suffix
- print 'downloading as', download
+ print('downloading as ' + download)
f = open(download, 'w')
f.write(base64.b64decode(node.firstChild.nodeValue))
f.close()
@@ -74,11 +74,11 @@ def get_from_bug_url_via_xml(url, mimetype, prefix, suffix):
def get_novell_bug_via_xml(url, mimetype, prefix, suffix):
id = url.rsplit('=', 2)[1]
- print "id is", prefix, id, suffix
+ print("id is " + prefix + id + " " + suffix)
if os.path.isfile(suffix + '/' + prefix + id + '-1.' + suffix):
- print "assuming", id, "is up to date"
+ print("assuming " + id + " is up to date")
else:
- print "parsing", id
+ print("parsing " + id)
sock = urlopen_retry(url+"&ctype=xml")
dom = minidom.parse(sock)
sock.close()
@@ -94,18 +94,18 @@ def get_novell_bug_via_xml(url, mimetype, prefix, suffix):
realAttachmentId = match.group(1)
handle = urlopen_retry(novellattach + realAttachmentId)
if not handle:
- print "attachment %s is not accessible", realAttachmentId
- continue
- print " mimetype is",
+ print("attachment %s is not accessible" % realAttachmentId)
+ continue
+ print(" mimetype is")
remoteMime = handle.info().gettype()
- print remoteMime,
+ print(remoteMime)
if remoteMime != mimetype:
- print "skipping"
+ print("skipping")
continue
download = suffix + '/' + prefix + id + '-' + str(attachmentid) + '.' + suffix
- print 'downloading as', download
+ print('downloading as ' + download)
f = open(download, 'w')
f.write(handle.read())
f.close()
@@ -121,14 +121,14 @@ def get_through_rpc_query(rpcurl, showurl, mimetype, prefix, suffix):
query['value0-0-0']=mimetype
result = proxy.Bug.search(query)
bugs = result['bugs']
- print len(bugs), 'bugs to process'
+ print(str(len(bugs)) + ' bugs to process')
for bug in bugs:
url = showurl + str(bug['id'])
get_from_bug_url_via_xml(url, mimetype, prefix, suffix)
- except xmlrpclib.Fault, err:
- print "A fault occurred"
- print "Fault code: %s" % err.faultCode
- print err.faultString
+ except xmlrpclib.Fault as err:
+ print("A fault occurred")
+ print("Fault code: %s" % err.faultCode)
+ print(err.faultString)
def get_through_rss_query_url(url, mimetype, prefix, suffix):
try:
@@ -145,12 +145,12 @@ def get_through_rss_query_url(url, mimetype, prefix, suffix):
try:
get_bug_function(entry['id'], mimetype, prefix, suffix)
except:
- print entry['id'], "failed:", sys.exc_info()[0]
+ print(entry['id'] + " failed: " + sys.exc_info()[0])
pass
def get_through_rss_query(queryurl, mimetype, prefix, suffix):
url = queryurl + '?query_format=advanced&field0-0-0=attachments.mimetype&type0-0-0=equals&value0-0-0=' + escape(mimetype) + '&ctype=rss'
- print 'url is', url
+ print('url is ' + url)
get_through_rss_query_url(url, mimetype, prefix, suffix)
def get_launchpad_bugs(prefix):
@@ -168,7 +168,7 @@ def get_launchpad_bugs(prefix):
for bugtask in libobugs:
bug = bugtask.bug
id = str(bug.id)
- print "parsing ", id, "status:", bugtask.status, "title:", bug.title[:50]
+ print("parsing " + id + " status: " + bugtask.status + " title: " + bug.title[:50])
attachmentid = 0
for attachment in bug.attachments:
attachmentid += 1
@@ -187,10 +187,10 @@ def get_launchpad_bugs(prefix):
download = suffix + '/' + prefix + id + '-' + str(attachmentid) + '.' + suffix
if os.path.isfile(download):
- print "assuming", id, "is up to date"
+ print("assuming " + id + " is up to date")
break
- print 'mimetype is', handle.content_type, 'downloading as', download
+ print('mimetype is ' + handle.content_type + ' downloading as ' + download)
f = open(download, "w")
f.write(handle.read())
@@ -356,6 +356,6 @@ for (mimetype,extension) in mimetypes.items():
try:
get_launchpad_bugs("lp")
except ImportError:
- print "launchpadlib unavailable, skipping Ubuntu tracker"
+ print("launchpadlib unavailable, skipping Ubuntu tracker")
# vim:set shiftwidth=4 softtabstop=4 expandtab: