PageRenderTime 56ms CodeModel.GetById 13ms RepoModel.GetById 0ms app.codeStats 0ms

/thirdparty/breakpad/third_party/protobuf/protobuf/gtest/scripts/upload.py

http://github.com/tomahawk-player/tomahawk
Python | 1387 lines | 1265 code | 23 blank | 99 comment | 49 complexity | f92904342be5677b10a8010e33fdebb7 MD5 | raw file
Possible License(s): LGPL-2.1, BSD-3-Clause, GPL-3.0, GPL-2.0
  1. #!/usr/bin/env python
  2. #
  3. # Copyright 2007 Google Inc.
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. """Tool for uploading diffs from a version control system to the codereview app.
  17. Usage summary: upload.py [options] [-- diff_options]
  18. Diff options are passed to the diff command of the underlying system.
  19. Supported version control systems:
  20. Git
  21. Mercurial
  22. Subversion
  23. It is important for Git/Mercurial users to specify a tree/node/branch to diff
  24. against by using the '--rev' option.
  25. """
  26. # This code is derived from appcfg.py in the App Engine SDK (open source),
  27. # and from ASPN recipe #146306.
  28. import cookielib
  29. import getpass
  30. import logging
  31. import md5
  32. import mimetypes
  33. import optparse
  34. import os
  35. import re
  36. import socket
  37. import subprocess
  38. import sys
  39. import urllib
  40. import urllib2
  41. import urlparse
  42. try:
  43. import readline
  44. except ImportError:
  45. pass
  46. # The logging verbosity:
  47. # 0: Errors only.
  48. # 1: Status messages.
  49. # 2: Info logs.
  50. # 3: Debug logs.
  51. verbosity = 1
  52. # Max size of patch or base file.
  53. MAX_UPLOAD_SIZE = 900 * 1024
  54. def GetEmail(prompt):
  55. """Prompts the user for their email address and returns it.
  56. The last used email address is saved to a file and offered up as a suggestion
  57. to the user. If the user presses enter without typing in anything the last
  58. used email address is used. If the user enters a new address, it is saved
  59. for next time we prompt.
  60. """
  61. last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
  62. last_email = ""
  63. if os.path.exists(last_email_file_name):
  64. try:
  65. last_email_file = open(last_email_file_name, "r")
  66. last_email = last_email_file.readline().strip("\n")
  67. last_email_file.close()
  68. prompt += " [%s]" % last_email
  69. except IOError, e:
  70. pass
  71. email = raw_input(prompt + ": ").strip()
  72. if email:
  73. try:
  74. last_email_file = open(last_email_file_name, "w")
  75. last_email_file.write(email)
  76. last_email_file.close()
  77. except IOError, e:
  78. pass
  79. else:
  80. email = last_email
  81. return email
  82. def StatusUpdate(msg):
  83. """Print a status message to stdout.
  84. If 'verbosity' is greater than 0, print the message.
  85. Args:
  86. msg: The string to print.
  87. """
  88. if verbosity > 0:
  89. print msg
  90. def ErrorExit(msg):
  91. """Print an error message to stderr and exit."""
  92. print >>sys.stderr, msg
  93. sys.exit(1)
  94. class ClientLoginError(urllib2.HTTPError):
  95. """Raised to indicate there was an error authenticating with ClientLogin."""
  96. def __init__(self, url, code, msg, headers, args):
  97. urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
  98. self.args = args
  99. self.reason = args["Error"]
  100. class AbstractRpcServer(object):
  101. """Provides a common interface for a simple RPC server."""
  102. def __init__(self, host, auth_function, host_override=None, extra_headers={},
  103. save_cookies=False):
  104. """Creates a new HttpRpcServer.
  105. Args:
  106. host: The host to send requests to.
  107. auth_function: A function that takes no arguments and returns an
  108. (email, password) tuple when called. Will be called if authentication
  109. is required.
  110. host_override: The host header to send to the server (defaults to host).
  111. extra_headers: A dict of extra headers to append to every request.
  112. save_cookies: If True, save the authentication cookies to local disk.
  113. If False, use an in-memory cookiejar instead. Subclasses must
  114. implement this functionality. Defaults to False.
  115. """
  116. self.host = host
  117. self.host_override = host_override
  118. self.auth_function = auth_function
  119. self.authenticated = False
  120. self.extra_headers = extra_headers
  121. self.save_cookies = save_cookies
  122. self.opener = self._GetOpener()
  123. if self.host_override:
  124. logging.info("Server: %s; Host: %s", self.host, self.host_override)
  125. else:
  126. logging.info("Server: %s", self.host)
  127. def _GetOpener(self):
  128. """Returns an OpenerDirector for making HTTP requests.
  129. Returns:
  130. A urllib2.OpenerDirector object.
  131. """
  132. raise NotImplementedError()
  133. def _CreateRequest(self, url, data=None):
  134. """Creates a new urllib request."""
  135. logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
  136. req = urllib2.Request(url, data=data)
  137. if self.host_override:
  138. req.add_header("Host", self.host_override)
  139. for key, value in self.extra_headers.iteritems():
  140. req.add_header(key, value)
  141. return req
  142. def _GetAuthToken(self, email, password):
  143. """Uses ClientLogin to authenticate the user, returning an auth token.
  144. Args:
  145. email: The user's email address
  146. password: The user's password
  147. Raises:
  148. ClientLoginError: If there was an error authenticating with ClientLogin.
  149. HTTPError: If there was some other form of HTTP error.
  150. Returns:
  151. The authentication token returned by ClientLogin.
  152. """
  153. account_type = "GOOGLE"
  154. if self.host.endswith(".google.com"):
  155. # Needed for use inside Google.
  156. account_type = "HOSTED"
  157. req = self._CreateRequest(
  158. url="https://www.google.com/accounts/ClientLogin",
  159. data=urllib.urlencode({
  160. "Email": email,
  161. "Passwd": password,
  162. "service": "ah",
  163. "source": "rietveld-codereview-upload",
  164. "accountType": account_type,
  165. }),
  166. )
  167. try:
  168. response = self.opener.open(req)
  169. response_body = response.read()
  170. response_dict = dict(x.split("=")
  171. for x in response_body.split("\n") if x)
  172. return response_dict["Auth"]
  173. except urllib2.HTTPError, e:
  174. if e.code == 403:
  175. body = e.read()
  176. response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
  177. raise ClientLoginError(req.get_full_url(), e.code, e.msg,
  178. e.headers, response_dict)
  179. else:
  180. raise
  181. def _GetAuthCookie(self, auth_token):
  182. """Fetches authentication cookies for an authentication token.
  183. Args:
  184. auth_token: The authentication token returned by ClientLogin.
  185. Raises:
  186. HTTPError: If there was an error fetching the authentication cookies.
  187. """
  188. # This is a dummy value to allow us to identify when we're successful.
  189. continue_location = "http://localhost/"
  190. args = {"continue": continue_location, "auth": auth_token}
  191. req = self._CreateRequest("http://%s/_ah/login?%s" %
  192. (self.host, urllib.urlencode(args)))
  193. try:
  194. response = self.opener.open(req)
  195. except urllib2.HTTPError, e:
  196. response = e
  197. if (response.code != 302 or
  198. response.info()["location"] != continue_location):
  199. raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
  200. response.headers, response.fp)
  201. self.authenticated = True
  202. def _Authenticate(self):
  203. """Authenticates the user.
  204. The authentication process works as follows:
  205. 1) We get a username and password from the user
  206. 2) We use ClientLogin to obtain an AUTH token for the user
  207. (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
  208. 3) We pass the auth token to /_ah/login on the server to obtain an
  209. authentication cookie. If login was successful, it tries to redirect
  210. us to the URL we provided.
  211. If we attempt to access the upload API without first obtaining an
  212. authentication cookie, it returns a 401 response and directs us to
  213. authenticate ourselves with ClientLogin.
  214. """
  215. for i in range(3):
  216. credentials = self.auth_function()
  217. try:
  218. auth_token = self._GetAuthToken(credentials[0], credentials[1])
  219. except ClientLoginError, e:
  220. if e.reason == "BadAuthentication":
  221. print >>sys.stderr, "Invalid username or password."
  222. continue
  223. if e.reason == "CaptchaRequired":
  224. print >>sys.stderr, (
  225. "Please go to\n"
  226. "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
  227. "and verify you are a human. Then try again.")
  228. break
  229. if e.reason == "NotVerified":
  230. print >>sys.stderr, "Account not verified."
  231. break
  232. if e.reason == "TermsNotAgreed":
  233. print >>sys.stderr, "User has not agreed to TOS."
  234. break
  235. if e.reason == "AccountDeleted":
  236. print >>sys.stderr, "The user account has been deleted."
  237. break
  238. if e.reason == "AccountDisabled":
  239. print >>sys.stderr, "The user account has been disabled."
  240. break
  241. if e.reason == "ServiceDisabled":
  242. print >>sys.stderr, ("The user's access to the service has been "
  243. "disabled.")
  244. break
  245. if e.reason == "ServiceUnavailable":
  246. print >>sys.stderr, "The service is not available; try again later."
  247. break
  248. raise
  249. self._GetAuthCookie(auth_token)
  250. return
  251. def Send(self, request_path, payload=None,
  252. content_type="application/octet-stream",
  253. timeout=None,
  254. **kwargs):
  255. """Sends an RPC and returns the response.
  256. Args:
  257. request_path: The path to send the request to, eg /api/appversion/create.
  258. payload: The body of the request, or None to send an empty request.
  259. content_type: The Content-Type header to use.
  260. timeout: timeout in seconds; default None i.e. no timeout.
  261. (Note: for large requests on OS X, the timeout doesn't work right.)
  262. kwargs: Any keyword arguments are converted into query string parameters.
  263. Returns:
  264. The response body, as a string.
  265. """
  266. # TODO: Don't require authentication. Let the server say
  267. # whether it is necessary.
  268. if not self.authenticated:
  269. self._Authenticate()
  270. old_timeout = socket.getdefaulttimeout()
  271. socket.setdefaulttimeout(timeout)
  272. try:
  273. tries = 0
  274. while True:
  275. tries += 1
  276. args = dict(kwargs)
  277. url = "http://%s%s" % (self.host, request_path)
  278. if args:
  279. url += "?" + urllib.urlencode(args)
  280. req = self._CreateRequest(url=url, data=payload)
  281. req.add_header("Content-Type", content_type)
  282. try:
  283. f = self.opener.open(req)
  284. response = f.read()
  285. f.close()
  286. return response
  287. except urllib2.HTTPError, e:
  288. if tries > 3:
  289. raise
  290. elif e.code == 401:
  291. self._Authenticate()
  292. ## elif e.code >= 500 and e.code < 600:
  293. ## # Server Error - try again.
  294. ## continue
  295. else:
  296. raise
  297. finally:
  298. socket.setdefaulttimeout(old_timeout)
  299. class HttpRpcServer(AbstractRpcServer):
  300. """Provides a simplified RPC-style interface for HTTP requests."""
  301. def _Authenticate(self):
  302. """Save the cookie jar after authentication."""
  303. super(HttpRpcServer, self)._Authenticate()
  304. if self.save_cookies:
  305. StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
  306. self.cookie_jar.save()
  307. def _GetOpener(self):
  308. """Returns an OpenerDirector that supports cookies and ignores redirects.
  309. Returns:
  310. A urllib2.OpenerDirector object.
  311. """
  312. opener = urllib2.OpenerDirector()
  313. opener.add_handler(urllib2.ProxyHandler())
  314. opener.add_handler(urllib2.UnknownHandler())
  315. opener.add_handler(urllib2.HTTPHandler())
  316. opener.add_handler(urllib2.HTTPDefaultErrorHandler())
  317. opener.add_handler(urllib2.HTTPSHandler())
  318. opener.add_handler(urllib2.HTTPErrorProcessor())
  319. if self.save_cookies:
  320. self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
  321. self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
  322. if os.path.exists(self.cookie_file):
  323. try:
  324. self.cookie_jar.load()
  325. self.authenticated = True
  326. StatusUpdate("Loaded authentication cookies from %s" %
  327. self.cookie_file)
  328. except (cookielib.LoadError, IOError):
  329. # Failed to load cookies - just ignore them.
  330. pass
  331. else:
  332. # Create an empty cookie file with mode 600
  333. fd = os.open(self.cookie_file, os.O_CREAT, 0600)
  334. os.close(fd)
  335. # Always chmod the cookie file
  336. os.chmod(self.cookie_file, 0600)
  337. else:
  338. # Don't save cookies across runs of update.py.
  339. self.cookie_jar = cookielib.CookieJar()
  340. opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
  341. return opener
  342. parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
  343. parser.add_option("-y", "--assume_yes", action="store_true",
  344. dest="assume_yes", default=False,
  345. help="Assume that the answer to yes/no questions is 'yes'.")
  346. # Logging
  347. group = parser.add_option_group("Logging options")
  348. group.add_option("-q", "--quiet", action="store_const", const=0,
  349. dest="verbose", help="Print errors only.")
  350. group.add_option("-v", "--verbose", action="store_const", const=2,
  351. dest="verbose", default=1,
  352. help="Print info level logs (default).")
  353. group.add_option("--noisy", action="store_const", const=3,
  354. dest="verbose", help="Print all logs.")
  355. # Review server
  356. group = parser.add_option_group("Review server options")
  357. group.add_option("-s", "--server", action="store", dest="server",
  358. default="codereview.appspot.com",
  359. metavar="SERVER",
  360. help=("The server to upload to. The format is host[:port]. "
  361. "Defaults to 'codereview.appspot.com'."))
  362. group.add_option("-e", "--email", action="store", dest="email",
  363. metavar="EMAIL", default=None,
  364. help="The username to use. Will prompt if omitted.")
  365. group.add_option("-H", "--host", action="store", dest="host",
  366. metavar="HOST", default=None,
  367. help="Overrides the Host header sent with all RPCs.")
  368. group.add_option("--no_cookies", action="store_false",
  369. dest="save_cookies", default=True,
  370. help="Do not save authentication cookies to local disk.")
  371. # Issue
  372. group = parser.add_option_group("Issue options")
  373. group.add_option("-d", "--description", action="store", dest="description",
  374. metavar="DESCRIPTION", default=None,
  375. help="Optional description when creating an issue.")
  376. group.add_option("-f", "--description_file", action="store",
  377. dest="description_file", metavar="DESCRIPTION_FILE",
  378. default=None,
  379. help="Optional path of a file that contains "
  380. "the description when creating an issue.")
  381. group.add_option("-r", "--reviewers", action="store", dest="reviewers",
  382. metavar="REVIEWERS", default=None,
  383. help="Add reviewers (comma separated email addresses).")
  384. group.add_option("--cc", action="store", dest="cc",
  385. metavar="CC", default=None,
  386. help="Add CC (comma separated email addresses).")
  387. # Upload options
  388. group = parser.add_option_group("Patch options")
  389. group.add_option("-m", "--message", action="store", dest="message",
  390. metavar="MESSAGE", default=None,
  391. help="A message to identify the patch. "
  392. "Will prompt if omitted.")
  393. group.add_option("-i", "--issue", type="int", action="store",
  394. metavar="ISSUE", default=None,
  395. help="Issue number to which to add. Defaults to new issue.")
  396. group.add_option("--download_base", action="store_true",
  397. dest="download_base", default=False,
  398. help="Base files will be downloaded by the server "
  399. "(side-by-side diffs may not work on files with CRs).")
  400. group.add_option("--rev", action="store", dest="revision",
  401. metavar="REV", default=None,
  402. help="Branch/tree/revision to diff against (used by DVCS).")
  403. group.add_option("--send_mail", action="store_true",
  404. dest="send_mail", default=False,
  405. help="Send notification email to reviewers.")
  406. def GetRpcServer(options):
  407. """Returns an instance of an AbstractRpcServer.
  408. Returns:
  409. A new AbstractRpcServer, on which RPC calls can be made.
  410. """
  411. rpc_server_class = HttpRpcServer
  412. def GetUserCredentials():
  413. """Prompts the user for a username and password."""
  414. email = options.email
  415. if email is None:
  416. email = GetEmail("Email (login for uploading to %s)" % options.server)
  417. password = getpass.getpass("Password for %s: " % email)
  418. return (email, password)
  419. # If this is the dev_appserver, use fake authentication.
  420. host = (options.host or options.server).lower()
  421. if host == "localhost" or host.startswith("localhost:"):
  422. email = options.email
  423. if email is None:
  424. email = "test@example.com"
  425. logging.info("Using debug user %s. Override with --email" % email)
  426. server = rpc_server_class(
  427. options.server,
  428. lambda: (email, "password"),
  429. host_override=options.host,
  430. extra_headers={"Cookie":
  431. 'dev_appserver_login="%s:False"' % email},
  432. save_cookies=options.save_cookies)
  433. # Don't try to talk to ClientLogin.
  434. server.authenticated = True
  435. return server
  436. return rpc_server_class(options.server, GetUserCredentials,
  437. host_override=options.host,
  438. save_cookies=options.save_cookies)
  439. def EncodeMultipartFormData(fields, files):
  440. """Encode form fields for multipart/form-data.
  441. Args:
  442. fields: A sequence of (name, value) elements for regular form fields.
  443. files: A sequence of (name, filename, value) elements for data to be
  444. uploaded as files.
  445. Returns:
  446. (content_type, body) ready for httplib.HTTP instance.
  447. Source:
  448. http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
  449. """
  450. BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
  451. CRLF = '\r\n'
  452. lines = []
  453. for (key, value) in fields:
  454. lines.append('--' + BOUNDARY)
  455. lines.append('Content-Disposition: form-data; name="%s"' % key)
  456. lines.append('')
  457. lines.append(value)
  458. for (key, filename, value) in files:
  459. lines.append('--' + BOUNDARY)
  460. lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
  461. (key, filename))
  462. lines.append('Content-Type: %s' % GetContentType(filename))
  463. lines.append('')
  464. lines.append(value)
  465. lines.append('--' + BOUNDARY + '--')
  466. lines.append('')
  467. body = CRLF.join(lines)
  468. content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
  469. return content_type, body
  470. def GetContentType(filename):
  471. """Helper to guess the content-type from the filename."""
  472. return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
  473. # Use a shell for subcommands on Windows to get a PATH search.
  474. use_shell = sys.platform.startswith("win")
  475. def RunShellWithReturnCode(command, print_output=False,
  476. universal_newlines=True):
  477. """Executes a command and returns the output from stdout and the return code.
  478. Args:
  479. command: Command to execute.
  480. print_output: If True, the output is printed to stdout.
  481. If False, both stdout and stderr are ignored.
  482. universal_newlines: Use universal_newlines flag (default: True).
  483. Returns:
  484. Tuple (output, return code)
  485. """
  486. logging.info("Running %s", command)
  487. p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
  488. shell=use_shell, universal_newlines=universal_newlines)
  489. if print_output:
  490. output_array = []
  491. while True:
  492. line = p.stdout.readline()
  493. if not line:
  494. break
  495. print line.strip("\n")
  496. output_array.append(line)
  497. output = "".join(output_array)
  498. else:
  499. output = p.stdout.read()
  500. p.wait()
  501. errout = p.stderr.read()
  502. if print_output and errout:
  503. print >>sys.stderr, errout
  504. p.stdout.close()
  505. p.stderr.close()
  506. return output, p.returncode
  507. def RunShell(command, silent_ok=False, universal_newlines=True,
  508. print_output=False):
  509. data, retcode = RunShellWithReturnCode(command, print_output,
  510. universal_newlines)
  511. if retcode:
  512. ErrorExit("Got error status from %s:\n%s" % (command, data))
  513. if not silent_ok and not data:
  514. ErrorExit("No output from %s" % command)
  515. return data
  516. class VersionControlSystem(object):
  517. """Abstract base class providing an interface to the VCS."""
  518. def __init__(self, options):
  519. """Constructor.
  520. Args:
  521. options: Command line options.
  522. """
  523. self.options = options
  524. def GenerateDiff(self, args):
  525. """Return the current diff as a string.
  526. Args:
  527. args: Extra arguments to pass to the diff command.
  528. """
  529. raise NotImplementedError(
  530. "abstract method -- subclass %s must override" % self.__class__)
  531. def GetUnknownFiles(self):
  532. """Return a list of files unknown to the VCS."""
  533. raise NotImplementedError(
  534. "abstract method -- subclass %s must override" % self.__class__)
  535. def CheckForUnknownFiles(self):
  536. """Show an "are you sure?" prompt if there are unknown files."""
  537. unknown_files = self.GetUnknownFiles()
  538. if unknown_files:
  539. print "The following files are not added to version control:"
  540. for line in unknown_files:
  541. print line
  542. prompt = "Are you sure to continue?(y/N) "
  543. answer = raw_input(prompt).strip()
  544. if answer != "y":
  545. ErrorExit("User aborted")
  546. def GetBaseFile(self, filename):
  547. """Get the content of the upstream version of a file.
  548. Returns:
  549. A tuple (base_content, new_content, is_binary, status)
  550. base_content: The contents of the base file.
  551. new_content: For text files, this is empty. For binary files, this is
  552. the contents of the new file, since the diff output won't contain
  553. information to reconstruct the current file.
  554. is_binary: True iff the file is binary.
  555. status: The status of the file.
  556. """
  557. raise NotImplementedError(
  558. "abstract method -- subclass %s must override" % self.__class__)
  559. def GetBaseFiles(self, diff):
  560. """Helper that calls GetBase file for each file in the patch.
  561. Returns:
  562. A dictionary that maps from filename to GetBaseFile's tuple. Filenames
  563. are retrieved based on lines that start with "Index:" or
  564. "Property changes on:".
  565. """
  566. files = {}
  567. for line in diff.splitlines(True):
  568. if line.startswith('Index:') or line.startswith('Property changes on:'):
  569. unused, filename = line.split(':', 1)
  570. # On Windows if a file has property changes its filename uses '\'
  571. # instead of '/'.
  572. filename = filename.strip().replace('\\', '/')
  573. files[filename] = self.GetBaseFile(filename)
  574. return files
  575. def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
  576. files):
  577. """Uploads the base files (and if necessary, the current ones as well)."""
  578. def UploadFile(filename, file_id, content, is_binary, status, is_base):
  579. """Uploads a file to the server."""
  580. file_too_large = False
  581. if is_base:
  582. type = "base"
  583. else:
  584. type = "current"
  585. if len(content) > MAX_UPLOAD_SIZE:
  586. print ("Not uploading the %s file for %s because it's too large." %
  587. (type, filename))
  588. file_too_large = True
  589. content = ""
  590. checksum = md5.new(content).hexdigest()
  591. if options.verbose > 0 and not file_too_large:
  592. print "Uploading %s file for %s" % (type, filename)
  593. url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
  594. form_fields = [("filename", filename),
  595. ("status", status),
  596. ("checksum", checksum),
  597. ("is_binary", str(is_binary)),
  598. ("is_current", str(not is_base)),
  599. ]
  600. if file_too_large:
  601. form_fields.append(("file_too_large", "1"))
  602. if options.email:
  603. form_fields.append(("user", options.email))
  604. ctype, body = EncodeMultipartFormData(form_fields,
  605. [("data", filename, content)])
  606. response_body = rpc_server.Send(url, body,
  607. content_type=ctype)
  608. if not response_body.startswith("OK"):
  609. StatusUpdate(" --> %s" % response_body)
  610. sys.exit(1)
  611. patches = dict()
  612. [patches.setdefault(v, k) for k, v in patch_list]
  613. for filename in patches.keys():
  614. base_content, new_content, is_binary, status = files[filename]
  615. file_id_str = patches.get(filename)
  616. if file_id_str.find("nobase") != -1:
  617. base_content = None
  618. file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
  619. file_id = int(file_id_str)
  620. if base_content != None:
  621. UploadFile(filename, file_id, base_content, is_binary, status, True)
  622. if new_content != None:
  623. UploadFile(filename, file_id, new_content, is_binary, status, False)
  624. def IsImage(self, filename):
  625. """Returns true if the filename has an image extension."""
  626. mimetype = mimetypes.guess_type(filename)[0]
  627. if not mimetype:
  628. return False
  629. return mimetype.startswith("image/")
  630. class SubversionVCS(VersionControlSystem):
  631. """Implementation of the VersionControlSystem interface for Subversion."""
  632. def __init__(self, options):
  633. super(SubversionVCS, self).__init__(options)
  634. if self.options.revision:
  635. match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
  636. if not match:
  637. ErrorExit("Invalid Subversion revision %s." % self.options.revision)
  638. self.rev_start = match.group(1)
  639. self.rev_end = match.group(3)
  640. else:
  641. self.rev_start = self.rev_end = None
  642. # Cache output from "svn list -r REVNO dirname".
  643. # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
  644. self.svnls_cache = {}
  645. # SVN base URL is required to fetch files deleted in an older revision.
  646. # Result is cached to not guess it over and over again in GetBaseFile().
  647. required = self.options.download_base or self.options.revision is not None
  648. self.svn_base = self._GuessBase(required)
  649. def GuessBase(self, required):
  650. """Wrapper for _GuessBase."""
  651. return self.svn_base
  652. def _GuessBase(self, required):
  653. """Returns the SVN base URL.
  654. Args:
  655. required: If true, exits if the url can't be guessed, otherwise None is
  656. returned.
  657. """
  658. info = RunShell(["svn", "info"])
  659. for line in info.splitlines():
  660. words = line.split()
  661. if len(words) == 2 and words[0] == "URL:":
  662. url = words[1]
  663. scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
  664. username, netloc = urllib.splituser(netloc)
  665. if username:
  666. logging.info("Removed username from base URL")
  667. if netloc.endswith("svn.python.org"):
  668. if netloc == "svn.python.org":
  669. if path.startswith("/projects/"):
  670. path = path[9:]
  671. elif netloc != "pythondev@svn.python.org":
  672. ErrorExit("Unrecognized Python URL: %s" % url)
  673. base = "http://svn.python.org/view/*checkout*%s/" % path
  674. logging.info("Guessed Python base = %s", base)
  675. elif netloc.endswith("svn.collab.net"):
  676. if path.startswith("/repos/"):
  677. path = path[6:]
  678. base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
  679. logging.info("Guessed CollabNet base = %s", base)
  680. elif netloc.endswith(".googlecode.com"):
  681. path = path + "/"
  682. base = urlparse.urlunparse(("http", netloc, path, params,
  683. query, fragment))
  684. logging.info("Guessed Google Code base = %s", base)
  685. else:
  686. path = path + "/"
  687. base = urlparse.urlunparse((scheme, netloc, path, params,
  688. query, fragment))
  689. logging.info("Guessed base = %s", base)
  690. return base
  691. if required:
  692. ErrorExit("Can't find URL in output from svn info")
  693. return None
  694. def GenerateDiff(self, args):
  695. cmd = ["svn", "diff"]
  696. if self.options.revision:
  697. cmd += ["-r", self.options.revision]
  698. cmd.extend(args)
  699. data = RunShell(cmd)
  700. count = 0
  701. for line in data.splitlines():
  702. if line.startswith("Index:") or line.startswith("Property changes on:"):
  703. count += 1
  704. logging.info(line)
  705. if not count:
  706. ErrorExit("No valid patches found in output from svn diff")
  707. return data
  708. def _CollapseKeywords(self, content, keyword_str):
  709. """Collapses SVN keywords."""
  710. # svn cat translates keywords but svn diff doesn't. As a result of this
  711. # behavior patching.PatchChunks() fails with a chunk mismatch error.
  712. # This part was originally written by the Review Board development team
  713. # who had the same problem (http://reviews.review-board.org/r/276/).
  714. # Mapping of keywords to known aliases
  715. svn_keywords = {
  716. # Standard keywords
  717. 'Date': ['Date', 'LastChangedDate'],
  718. 'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
  719. 'Author': ['Author', 'LastChangedBy'],
  720. 'HeadURL': ['HeadURL', 'URL'],
  721. 'Id': ['Id'],
  722. # Aliases
  723. 'LastChangedDate': ['LastChangedDate', 'Date'],
  724. 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
  725. 'LastChangedBy': ['LastChangedBy', 'Author'],
  726. 'URL': ['URL', 'HeadURL'],
  727. }
  728. def repl(m):
  729. if m.group(2):
  730. return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
  731. return "$%s$" % m.group(1)
  732. keywords = [keyword
  733. for name in keyword_str.split(" ")
  734. for keyword in svn_keywords.get(name, [])]
  735. return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
  736. def GetUnknownFiles(self):
  737. status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
  738. unknown_files = []
  739. for line in status.split("\n"):
  740. if line and line[0] == "?":
  741. unknown_files.append(line)
  742. return unknown_files
  743. def ReadFile(self, filename):
  744. """Returns the contents of a file."""
  745. file = open(filename, 'rb')
  746. result = ""
  747. try:
  748. result = file.read()
  749. finally:
  750. file.close()
  751. return result
  752. def GetStatus(self, filename):
  753. """Returns the status of a file."""
  754. if not self.options.revision:
  755. status = RunShell(["svn", "status", "--ignore-externals", filename])
  756. if not status:
  757. ErrorExit("svn status returned no output for %s" % filename)
  758. status_lines = status.splitlines()
  759. # If file is in a cl, the output will begin with
  760. # "\n--- Changelist 'cl_name':\n". See
  761. # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
  762. if (len(status_lines) == 3 and
  763. not status_lines[0] and
  764. status_lines[1].startswith("--- Changelist")):
  765. status = status_lines[2]
  766. else:
  767. status = status_lines[0]
  768. # If we have a revision to diff against we need to run "svn list"
  769. # for the old and the new revision and compare the results to get
  770. # the correct status for a file.
  771. else:
  772. dirname, relfilename = os.path.split(filename)
  773. if dirname not in self.svnls_cache:
  774. cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
  775. out, returncode = RunShellWithReturnCode(cmd)
  776. if returncode:
  777. ErrorExit("Failed to get status for %s." % filename)
  778. old_files = out.splitlines()
  779. args = ["svn", "list"]
  780. if self.rev_end:
  781. args += ["-r", self.rev_end]
  782. cmd = args + [dirname or "."]
  783. out, returncode = RunShellWithReturnCode(cmd)
  784. if returncode:
  785. ErrorExit("Failed to run command %s" % cmd)
  786. self.svnls_cache[dirname] = (old_files, out.splitlines())
  787. old_files, new_files = self.svnls_cache[dirname]
  788. if relfilename in old_files and relfilename not in new_files:
  789. status = "D "
  790. elif relfilename in old_files and relfilename in new_files:
  791. status = "M "
  792. else:
  793. status = "A "
  794. return status
  795. def GetBaseFile(self, filename):
  796. status = self.GetStatus(filename)
  797. base_content = None
  798. new_content = None
  799. # If a file is copied its status will be "A +", which signifies
  800. # "addition-with-history". See "svn st" for more information. We need to
  801. # upload the original file or else diff parsing will fail if the file was
  802. # edited.
  803. if status[0] == "A" and status[3] != "+":
  804. # We'll need to upload the new content if we're adding a binary file
  805. # since diff's output won't contain it.
  806. mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
  807. silent_ok=True)
  808. base_content = ""
  809. is_binary = mimetype and not mimetype.startswith("text/")
  810. if is_binary and self.IsImage(filename):
  811. new_content = self.ReadFile(filename)
  812. elif (status[0] in ("M", "D", "R") or
  813. (status[0] == "A" and status[3] == "+") or # Copied file.
  814. (status[0] == " " and status[1] == "M")): # Property change.
  815. args = []
  816. if self.options.revision:
  817. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
  818. else:
  819. # Don't change filename, it's needed later.
  820. url = filename
  821. args += ["-r", "BASE"]
  822. cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
  823. mimetype, returncode = RunShellWithReturnCode(cmd)
  824. if returncode:
  825. # File does not exist in the requested revision.
  826. # Reset mimetype, it contains an error message.
  827. mimetype = ""
  828. get_base = False
  829. is_binary = mimetype and not mimetype.startswith("text/")
  830. if status[0] == " ":
  831. # Empty base content just to force an upload.
  832. base_content = ""
  833. elif is_binary:
  834. if self.IsImage(filename):
  835. get_base = True
  836. if status[0] == "M":
  837. if not self.rev_end:
  838. new_content = self.ReadFile(filename)
  839. else:
  840. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
  841. new_content = RunShell(["svn", "cat", url],
  842. universal_newlines=True, silent_ok=True)
  843. else:
  844. base_content = ""
  845. else:
  846. get_base = True
  847. if get_base:
  848. if is_binary:
  849. universal_newlines = False
  850. else:
  851. universal_newlines = True
  852. if self.rev_start:
  853. # "svn cat -r REV delete_file.txt" doesn't work. cat requires
  854. # the full URL with "@REV" appended instead of using "-r" option.
  855. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
  856. base_content = RunShell(["svn", "cat", url],
  857. universal_newlines=universal_newlines,
  858. silent_ok=True)
  859. else:
  860. base_content = RunShell(["svn", "cat", filename],
  861. universal_newlines=universal_newlines,
  862. silent_ok=True)
  863. if not is_binary:
  864. args = []
  865. if self.rev_start:
  866. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
  867. else:
  868. url = filename
  869. args += ["-r", "BASE"]
  870. cmd = ["svn"] + args + ["propget", "svn:keywords", url]
  871. keywords, returncode = RunShellWithReturnCode(cmd)
  872. if keywords and not returncode:
  873. base_content = self._CollapseKeywords(base_content, keywords)
  874. else:
  875. StatusUpdate("svn status returned unexpected output: %s" % status)
  876. sys.exit(1)
  877. return base_content, new_content, is_binary, status[0:5]
  878. class GitVCS(VersionControlSystem):
  879. """Implementation of the VersionControlSystem interface for Git."""
  880. def __init__(self, options):
  881. super(GitVCS, self).__init__(options)
  882. # Map of filename -> hash of base file.
  883. self.base_hashes = {}
  884. def GenerateDiff(self, extra_args):
  885. # This is more complicated than svn's GenerateDiff because we must convert
  886. # the diff output to include an svn-style "Index:" line as well as record
  887. # the hashes of the base files, so we can upload them along with our diff.
  888. if self.options.revision:
  889. extra_args = [self.options.revision] + extra_args
  890. gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
  891. svndiff = []
  892. filecount = 0
  893. filename = None
  894. for line in gitdiff.splitlines():
  895. match = re.match(r"diff --git a/(.*) b/.*$", line)
  896. if match:
  897. filecount += 1
  898. filename = match.group(1)
  899. svndiff.append("Index: %s\n" % filename)
  900. else:
  901. # The "index" line in a git diff looks like this (long hashes elided):
  902. # index 82c0d44..b2cee3f 100755
  903. # We want to save the left hash, as that identifies the base file.
  904. match = re.match(r"index (\w+)\.\.", line)
  905. if match:
  906. self.base_hashes[filename] = match.group(1)
  907. svndiff.append(line + "\n")
  908. if not filecount:
  909. ErrorExit("No valid patches found in output from git diff")
  910. return "".join(svndiff)
  911. def GetUnknownFiles(self):
  912. status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
  913. silent_ok=True)
  914. return status.splitlines()
  915. def GetBaseFile(self, filename):
  916. hash = self.base_hashes[filename]
  917. base_content = None
  918. new_content = None
  919. is_binary = False
  920. if hash == "0" * 40: # All-zero hash indicates no base file.
  921. status = "A"
  922. base_content = ""
  923. else:
  924. status = "M"
  925. base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
  926. if returncode:
  927. ErrorExit("Got error status from 'git show %s'" % hash)
  928. return (base_content, new_content, is_binary, status)
  929. class MercurialVCS(VersionControlSystem):
  930. """Implementation of the VersionControlSystem interface for Mercurial."""
  931. def __init__(self, options, repo_dir):
  932. super(MercurialVCS, self).__init__(options)
  933. # Absolute path to repository (we can be in a subdir)
  934. self.repo_dir = os.path.normpath(repo_dir)
  935. # Compute the subdir
  936. cwd = os.path.normpath(os.getcwd())
  937. assert cwd.startswith(self.repo_dir)
  938. self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
  939. if self.options.revision:
  940. self.base_rev = self.options.revision
  941. else:
  942. self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
  943. def _GetRelPath(self, filename):
  944. """Get relative path of a file according to the current directory,
  945. given its logical path in the repo."""
  946. assert filename.startswith(self.subdir), filename
  947. return filename[len(self.subdir):].lstrip(r"\/")
  948. def GenerateDiff(self, extra_args):
  949. # If no file specified, restrict to the current subdir
  950. extra_args = extra_args or ["."]
  951. cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
  952. data = RunShell(cmd, silent_ok=True)
  953. svndiff = []
  954. filecount = 0
  955. for line in data.splitlines():
  956. m = re.match("diff --git a/(\S+) b/(\S+)", line)
  957. if m:
  958. # Modify line to make it look like as it comes from svn diff.
  959. # With this modification no changes on the server side are required
  960. # to make upload.py work with Mercurial repos.
  961. # NOTE: for proper handling of moved/copied files, we have to use
  962. # the second filename.
  963. filename = m.group(2)
  964. svndiff.append("Index: %s" % filename)
  965. svndiff.append("=" * 67)
  966. filecount += 1
  967. logging.info(line)
  968. else:
  969. svndiff.append(line)
  970. if not filecount:
  971. ErrorExit("No valid patches found in output from hg diff")
  972. return "\n".join(svndiff) + "\n"
  973. def GetUnknownFiles(self):
  974. """Return a list of files unknown to the VCS."""
  975. args = []
  976. status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
  977. silent_ok=True)
  978. unknown_files = []
  979. for line in status.splitlines():
  980. st, fn = line.split(" ", 1)
  981. if st == "?":
  982. unknown_files.append(fn)
  983. return unknown_files
  984. def GetBaseFile(self, filename):
  985. # "hg status" and "hg cat" both take a path relative to the current subdir
  986. # rather than to the repo root, but "hg diff" has given us the full path
  987. # to the repo root.
  988. base_content = ""
  989. new_content = None
  990. is_binary = False
  991. oldrelpath = relpath = self._GetRelPath(filename)
  992. # "hg status -C" returns two lines for moved/copied files, one otherwise
  993. out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
  994. out = out.splitlines()
  995. # HACK: strip error message about missing file/directory if it isn't in
  996. # the working copy
  997. if out[0].startswith('%s: ' % relpath):
  998. out = out[1:]
  999. if len(out) > 1:
  1000. # Moved/copied => considered as modified, use old filename to
  1001. # retrieve base contents
  1002. oldrelpath = out[1].strip()
  1003. status = "M"
  1004. else:
  1005. status, _ = out[0].split(' ', 1)
  1006. if status != "A":
  1007. base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
  1008. silent_ok=True)
  1009. is_binary = "\0" in base_content # Mercurial's heuristic
  1010. if status != "R":
  1011. new_content = open(relpath, "rb").read()
  1012. is_binary = is_binary or "\0" in new_content
  1013. if is_binary and base_content:
  1014. # Fetch again without converting newlines
  1015. base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
  1016. silent_ok=True, universal_newlines=False)
  1017. if not is_binary or not self.IsImage(relpath):
  1018. new_content = None
  1019. return base_content, new_content, is_binary, status
  1020. # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
  1021. def SplitPatch(data):
  1022. """Splits a patch into separate pieces for each file.
  1023. Args:
  1024. data: A string containing the output of svn diff.
  1025. Returns:
  1026. A list of 2-tuple (filename, text) where text is the svn diff output
  1027. pertaining to filename.
  1028. """
  1029. patches = []
  1030. filename = None
  1031. diff = []
  1032. for line in data.splitlines(True):
  1033. new_filename = None
  1034. if line.startswith('Index:'):
  1035. unused, new_filename = line.split(':', 1)
  1036. new_filename = new_filename.strip()
  1037. elif line.startswith('Property changes on:'):
  1038. unused, temp_filename = line.split(':', 1)
  1039. # When a file is modified, paths use '/' between directories, however
  1040. # when a property is modified '\' is used on Windows. Make them the same
  1041. # otherwise the file shows up twice.
  1042. temp_filename = temp_filename.strip().replace('\\', '/')
  1043. if temp_filename != filename:
  1044. # File has property changes but no modifications, create a new diff.
  1045. new_filename = temp_filename
  1046. if new_filename:
  1047. if filename and diff:
  1048. patches.append((filename, ''.join(diff)))
  1049. filename = new_filename
  1050. diff = [line]
  1051. continue
  1052. if diff is not None:
  1053. diff.append(line)
  1054. if filename and diff:
  1055. patches.append((filename, ''.join(diff)))
  1056. return patches
  1057. def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
  1058. """Uploads a separate patch for each file in the diff output.
  1059. Returns a list of [patch_key, filename] for each file.
  1060. """
  1061. patches = SplitPatch(data)
  1062. rv = []
  1063. for patch in patches:
  1064. if len(patch[1]) > MAX_UPLOAD_SIZE:
  1065. print ("Not uploading the patch for " + patch[0] +
  1066. " because the file is too large.")
  1067. continue
  1068. form_fields = [("filename", patch[0])]
  1069. if not options.download_base:
  1070. form_fields.append(("content_upload", "1"))
  1071. files = [("data", "data.diff", patch[1])]
  1072. ctype, body = EncodeMultipartFormData(form_fields, files)
  1073. url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
  1074. print "Uploading patch for " + patch[0]
  1075. response_body = rpc_server.Send(url, body, content_type=ctype)
  1076. lines = response_body.splitlines()
  1077. if not lines or lines[0] != "OK":
  1078. StatusUpdate(" --> %s" % response_body)
  1079. sys.exit(1)
  1080. rv.append([lines[1], patch[0]])
  1081. return rv
  1082. def GuessVCS(options):
  1083. """Helper to guess the version control system.
  1084. This examines the current directory, guesses which VersionControlSystem
  1085. we're using, and returns an instance of the appropriate class. Exit with an
  1086. error if we can't figure it out.
  1087. Returns:
  1088. A VersionControlSystem instance. Exits if the VCS can't be guessed.
  1089. """
  1090. # Mercurial has a command to get the base directory of a repository
  1091. # Try running it, but don't die if we don't have hg installed.
  1092. # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
  1093. try:
  1094. out, returncode = RunShellWithReturnCode(["hg", "root"])
  1095. if returncode == 0:
  1096. return MercurialVCS(options, out.strip())
  1097. except OSError, (errno, message):
  1098. if errno != 2: # ENOENT -- they don't have hg installed.
  1099. raise
  1100. # Subversion has a .svn in all working directories.
  1101. if os.path.isdir('.svn'):
  1102. logging.info("Guessed VCS = Subversion")
  1103. return SubversionVCS(options)
  1104. # Git has a command to test if you're in a git tree.
  1105. # Try running it, but don't die if we don't have git installed.
  1106. try:
  1107. out, returncode = RunShellWithReturnCode(["git", "rev-parse",
  1108. "--is-inside-work-tree"])
  1109. if returncode == 0:
  1110. return GitVCS(options)
  1111. except OSError, (errno, message):
  1112. if errno != 2: # ENOENT -- they don't have git installed.
  1113. raise
  1114. ErrorExit(("Could not guess version control system. "
  1115. "Are you in a working copy directory?"))
  1116. def RealMain(argv, data=None):
  1117. """The real main function.
  1118. Args:
  1119. argv: Command line arguments.
  1120. data: Diff contents. If None (default) the diff is generated by
  1121. the VersionControlSystem implementation returned by GuessVCS().
  1122. Returns:
  1123. A 2-tuple (issue id, patchset id).
  1124. The patchset id is None if the base files are not uploaded by this
  1125. script (applies only to SVN checkouts).
  1126. """
  1127. logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
  1128. "%(lineno)s %(message)s "))
  1129. os.environ['LC_ALL'] = 'C'
  1130. options, args = parser.parse_args(argv[1:])
  1131. global verbosity
  1132. verbosity = options.verbose
  1133. if verbosity >= 3:
  1134. logging.getLogger().setLevel(logging.DEBUG)
  1135. elif verbosity >= 2:
  1136. logging.getLogger().setLevel(logging.INFO)
  1137. vcs = GuessVCS(options)
  1138. if isinstance(vcs, SubversionVCS):
  1139. # base field is only allowed for Subversion.
  1140. # Note: Fetching base files may become deprecated in future releases.
  1141. base = vcs.GuessBase(options.download_base)
  1142. else:
  1143. base = None
  1144. if not base and options.download_base:
  1145. options.download_base = True
  1146. logging.info("Enabled upload of base file")
  1147. if not options.assume_yes:
  1148. vcs.CheckForUnknownFiles()
  1149. if data is None:
  1150. data = vcs.GenerateDiff(args)
  1151. files = vcs.GetBaseFiles(data)
  1152. if verbosity >= 1:
  1153. print "Upload server:", options.server, "(change with -s/--server)"
  1154. if options.issue:
  1155. prompt = "Message describing this patch set: "
  1156. else:
  1157. prompt = "New issue subject: "
  1158. message = options.message or raw_input(prompt).strip()
  1159. if not message:
  1160. ErrorExit("A non-empty message is required")
  1161. rpc_server = GetRpcServer(options)
  1162. form_fields = [("subject", message)]
  1163. if base:
  1164. form_fields.append(("base", base))
  1165. if options.issue:
  1166. form_fields.append(("issue", str(options.issue)))
  1167. if options.email:
  1168. form_fields.append(("user", options.email))
  1169. if options.reviewers:
  1170. for reviewer in options.reviewers.split(','):
  1171. if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
  1172. ErrorExit("Invalid email address: %s" % reviewer)
  1173. form_fields.append(("reviewers", options.reviewers))
  1174. if options.cc:
  1175. for cc in options.cc.split(','):
  1176. if "@" in cc and not cc.split("@")[1].count(".") == 1:
  1177. ErrorExit("Invalid email address: %s" % cc)
  1178. form_fields.append(("cc", options.cc))
  1179. description = options.description
  1180. if options.description_file:
  1181. if options.description:
  1182. ErrorExit("Can't specify description and description_file")
  1183. file = open(options.description_file, 'r')
  1184. description = file.read()
  1185. file.close()
  1186. if description:
  1187. form_fields.append(("description", description))
  1188. # Send a hash of all the base file so the server can determine if a copy
  1189. # already exists in an earlier patchset.
  1190. base_hashes = ""
  1191. for file, info in files.iteritems():
  1192. if not info[0] is None:
  1193. checksum = md5.new(info[0]).hexdigest()
  1194. if base_hashes:
  1195. base_hashes += "|"
  1196. base_hashes += checksum + ":" + file
  1197. form_fields.append(("base_hashes", base_hashes))
  1198. # If we're uploading base files, don't send the email before the uploads, so
  1199. # that it contains the file status.
  1200. if options.send_mail and options.download_base:
  1201. form_fields.append(("send_mail", "1"))
  1202. if not options.download_base:
  1203. form_fields.append(("content_upload", "1"))
  1204. if len(data) > MAX_UPLOAD_SIZE:
  1205. print "Patch is large, so uploading file patches separately."
  1206. uploaded_diff_file = []
  1207. form_fields.append(("separate_patches", "1"))
  1208. else:
  1209. uploaded_diff_file = [("data", "data.diff", data)]
  1210. ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
  1211. response_body = rpc_server.Send("/upload", body, content_type=ctype)
  1212. patchset = None
  1213. if not options.download_base or not uploaded_diff_file:
  1214. lines = response_body.splitlines()
  1215. if len(lines) >= 2:
  1216. msg = lines[0]
  1217. patchset = lines[1].strip()
  1218. patches = [x.split(" ", 1) for x in lines[2:]]
  1219. else:
  1220. msg = response_body
  1221. else:
  1222. msg = response_body
  1223. StatusUpdate(msg)
  1224. if not response_body.startswith("Issue created.") and \
  1225. not response_body.startswith("Issue updated."):
  1226. sys.exit(0)
  1227. issue = msg[msg.rfind("/")+1:]
  1228. if not uploaded_diff_file:
  1229. result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
  1230. if not options.download_base:
  1231. patches = result
  1232. if not options.download_base:
  1233. vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
  1234. if options.send_mail:
  1235. rpc_server.Send("/" + issue + "/mail", payload="")
  1236. return issue, patchset
  1237. def main():
  1238. try:
  1239. RealMain(sys.argv)
  1240. except KeyboardInterrupt:
  1241. print
  1242. StatusUpdate("Interrupted.")
  1243. sys.exit(1)
  1244. if __name__ == "__main__":
  1245. main()