Page MenuHomePhabricator

D55199.diff
No OneTemporary

File Metadata

Created
Tue, Jul 16, 12:00 AM

D55199.diff

Index: docs/tools/dump_ast_matchers.py
===================================================================
--- docs/tools/dump_ast_matchers.py
+++ docs/tools/dump_ast_matchers.py
@@ -5,7 +5,10 @@
import collections
import re
-import urllib2
+try:
+ from urllib.request import urlopen
+except ImportError:
+ from urllib2 import urlopen
MATCHERS_FILE = '../../include/clang/ASTMatchers/ASTMatchers.h'
@@ -42,7 +45,7 @@
if url not in doxygen_probes:
try:
print('Probing %s...' % url)
- urllib2.urlopen(url)
+ urlopen(url)
doxygen_probes[url] = True
except:
doxygen_probes[url] = False
Index: docs/tools/dump_format_style.py
===================================================================
--- docs/tools/dump_format_style.py
+++ docs/tools/dump_format_style.py
@@ -6,7 +6,6 @@
import collections
import os
import re
-import urllib2
CLANG_DIR = os.path.join(os.path.dirname(__file__), '../..')
FORMAT_STYLE_FILE = os.path.join(CLANG_DIR, 'include/clang/Format/Format.h')
Index: tools/scan-view/bin/scan-view
===================================================================
--- tools/scan-view/bin/scan-view
+++ tools/scan-view/bin/scan-view
@@ -11,7 +11,10 @@
import posixpath
import threading
import time
-import urllib
+try:
+ from urllib.request import urlopen
+except ImportError:
+ from urllib2 import urlopen
import webbrowser
# How long to wait for server to start.
@@ -29,7 +32,7 @@
def url_is_up(url):
try:
- o = urllib.urlopen(url)
+ o = urlopen(url)
except IOError:
return False
o.close()
@@ -37,7 +40,6 @@
def start_browser(port, options):
- import urllib
import webbrowser
url = 'http://%s:%d' % (options.host, port)
Index: tools/scan-view/share/ScanView.py
===================================================================
--- tools/scan-view/share/ScanView.py
+++ tools/scan-view/share/ScanView.py
@@ -6,7 +6,12 @@
from SimpleHTTPServer import SimpleHTTPRequestHandler
import os
import sys
-import urllib, urlparse
+try:
+ from urlparse import urlparse
+ from urllib import unquote
+except ImportError:
+ from urllib.parse import urlparse, unquote
+
import posixpath
import StringIO
import re
@@ -198,8 +203,8 @@
value = ''
else:
name, value = chunk.split('=', 1)
- name = urllib.unquote(name.replace('+', ' '))
- value = urllib.unquote(value.replace('+', ' '))
+ name = unquote(name.replace('+', ' '))
+ value = unquote(value.replace('+', ' '))
item = fields.get(name)
if item is None:
fields[name] = [value]
@@ -654,9 +659,9 @@
fields = {}
self.fields = fields
- o = urlparse.urlparse(self.path)
+ o = urlparse(self.path)
self.fields = parse_query(o.query, fields)
- path = posixpath.normpath(urllib.unquote(o.path))
+ path = posixpath.normpath(unquote(o.path))
# Split the components and strip the root prefix.
components = path.split('/')[1:]

Event Timeline