Coverage for ingadhoc-odoo-saas-adhoc / saas_provider_upgrade / UpgradeScript.py: 15%

586 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-03-09 20:33 +0000

1#!/usr/bin/env python 

2import argparse 

3import getpass 

4import gzip 

5import hashlib 

6import json 

7import logging 

8import os 

9import re 

10import shutil 

11import ssl 

12import subprocess 

13import sys 

14import tempfile 

15import time 

16import zipfile 

17from datetime import datetime, timedelta 

18from operator import itemgetter 

19 

20try: 

21 import urllib.request 

22 

23 urlrequest = urllib.request 

24except ImportError: 

25 import urllib2 

26 

27 urlrequest = urllib2 

28 

29try: 

30 import shutil 

31 

32 which = shutil.which 

33except ImportError: 

34 import distutils.spawn 

35 

36 which = distutils.spawn.find_executable 

37 

38if sys.version_info[0] == 2: 38 ↛ 39line 38 didn't jump to line 39 because the condition on line 38 was never true

39 input = raw_input # noqa: A001, F821 

40 

41# Mapping each subcommand to its required external dependencies 

42COMMAND_DEPENDENCIES = { 

43 "log": {"ssh-keygen"}, 

44 "status": {"ssh-keygen"}, 

45 "restore": {"ssh-keygen", "rsync", "createdb", "pg_restore"}, 

46 "test": {"ssh-keygen", "rsync", "psql", "createdb", "pg_restore", "pg_dump"}, 

47 "production": {"ssh-keygen", "rsync", "psql", "createdb", "pg_restore", "pg_dump"}, 

48 "wipe": {"ssh-keygen"}, 

49} 

50 

51# Mapping subcommand arguments to the dependencies they make unnecessary 

52EXCLUDED_DEPENDENCIES = { 

53 "no_restore": {"createdb", "pg_restore"}, 

54 "dump": {"pg_dump"}, 

55} 

56 

57UID = os.getuid() if hasattr(os, "getuid") else getpass.getuser() 

58DEFAULT_SSH_KEY_NAME = os.path.join(tempfile.gettempdir(), f"{UID}_upgrade_ssh_key") 

59KNOWN_HOSTS_NAME = os.path.join(tempfile.gettempdir(), f"{UID}_upgrade_known_hosts") 

60 

61UPGRADE_SERVER_NAME = os.environ.get("UPGRADE_SERVER_NAME", "https://upgrade.odoo.com") 

62DATA_SERVER_USER = "odoo" 

63DATA_SERVER_PATH = "/data" 

64SSH_KEY_NAME = os.environ.get("SSH_KEY_NAME", DEFAULT_SSH_KEY_NAME) 

65SSL_VERIFICATION = os.environ.get("SSL_VERIFICATION", "1").strip().lower() not in { 

66 "0", 

67 "off", 

68 "no", 

69} 

70 

71ORIGIN_DUMP_BASE_NAME = "origin" 

72ORIGIN_DUMP_NAME = "origin.dump" 

73EXPECTED_DUMP_EXTENSIONS = [".sql", ".dump", ".zip", ".sql.gz"] 

74POSTGRES_TABLE_OF_CONTENTS = "toc.dat" 

75FILESTORE_NAME = "filestore" 

76FILESTORE_PATH = os.path.expanduser("~/.local/share/Odoo/filestore") 

77 

78DB_TIMESTAMP_FORMAT = "%Y_%m_%d_%H_%M" 

79 

80REQUEST_TIMEOUT = 60 

81STATUS_MONITORING_PERIOD = 5 

82LOG_REFRESH_PERIOD = 5 

83CORE_COUNT = 4 

84 

85ssl_context = ssl.create_default_context() if SSL_VERIFICATION else ssl._create_unverified_context() 

86 

87 

88class UpgradeError(Exception): 

89 """Generic exception to handled any kind of upgrade errors in a same way""" 

90 

91 

92class StateMachine: 

93 """ 

94 Simple state machine, with: 

95 * `run` method that will start executing handlers methods from a starting state. 

96 * `<state>_handler` methods corresponding to each non terminal state. Each state 

97 handler must return the next state. 

98 * `context` (internal data) which may be updated by the state handlers. 

99 * `get_context_data` method that must be used to get data from the context in the 

100 handlers. It verifies that the data is present in the context and raises an error 

101 on missing data. 

102 """ 

103 

104 class Error(Exception): 

105 pass 

106 

107 def __init__(self): 

108 self.context = {} 

109 self.state = None 

110 

111 def get_context_data(self, *keys): 

112 missing = [key for key in keys if key not in self.context] 

113 if missing: 

114 raise StateMachine.Error("State %r: missing data in context %s", self.state, missing) 

115 return itemgetter(*keys)(self.context) 

116 

117 def run(self, from_state, additional_context=None): 

118 """ 

119 Execute the state machine from `from_state` with an optional additional context. 

120 If an additional context is specified, the current context will be updated. 

121 """ 

122 if not hasattr(self, from_state + "_handler"): 

123 raise StateMachine.Error(f"The state {from_state!r} is not a valid state.") 

124 

125 if additional_context is not None: 

126 self.context.update(additional_context) 

127 

128 self.state = from_state 

129 while hasattr(self, self.state + "_handler"): 

130 self.state = getattr(self, self.state + "_handler")() 

131 

132 def init_handler(self): 

133 input_source, target, aim, core_count, env_vars, ssh_key, token_name, contract = self.get_context_data( 

134 "input_source", "target", "aim", "core_count", "env_vars", "ssh_key", "token_name", "contract" 

135 ) 

136 

137 if input_source == "db": 

138 dbname = self.get_context_data("dbname") 

139 db_contract = get_db_contract(dbname, contract) # never returns an empty contract 

140 if contract and contract != db_contract: 

141 logging.warning( 

142 "The subscription code found in the database %s differs from the one provided as `--contract` parameter %s. " 

143 "Continuing with the contract found in the database: %s.", 

144 db_contract, 

145 contract, 

146 db_contract, 

147 ) 

148 contract = db_contract 

149 self.context["contract"] = contract 

150 

151 if ssh_key == DEFAULT_SSH_KEY_NAME: 

152 generate_default_ssh_keys() 

153 

154 response = create_upgrade_request(contract, target, aim, env_vars, ssh_key + ".pub") 

155 

156 if input_source == "db": 

157 dump_database(dbname, ORIGIN_DUMP_NAME, core_count) 

158 

159 # store the token in a file to be able to resume the request in case of interruption 

160 save_token(token_name, target, aim, response["token"]) 

161 

162 # make sure that the request is resumed from the correct node 

163 set_servers_info(response["assigned_host_uri"]) 

164 self.context.update(response) 

165 

166 return "new" 

167 

168 def new_handler(self): 

169 input_source, token, ssh_key, data_server_name, data_server_user, data_server_path = self.get_context_data( 

170 "input_source", "token", "ssh_key", "data_server_name", "data_server_user", "data_server_path" 

171 ) 

172 restore_db = not self.get_context_data("no_restore") 

173 

174 if input_source == "db": 

175 dump_path = ORIGIN_DUMP_NAME 

176 dest_dump_name = ORIGIN_DUMP_NAME 

177 else: 

178 dump_path, dump_ext = self.get_context_data("host_dump_upload_path", "dump_ext") 

179 dest_dump_name = ORIGIN_DUMP_BASE_NAME + dump_ext 

180 

181 info = start_transfer(token, ssh_key + ".pub", "upload") 

182 store_known_hosts(info["known_hosts"]) 

183 upload_dump( 

184 dump_path, 

185 data_server_name, 

186 info["ssh_port"], 

187 data_server_user, 

188 data_server_path, 

189 ssh_key, 

190 dest_dump_name, 

191 ) 

192 stop_transfer(token) 

193 

194 # if the postgres version used for the upgrade is not compatible with the client postgres 

195 # version used to dump the database, deactivate the upgraded database restoring. 

196 if not process_upgrade_request(token): 

197 logging.warning( 

198 "Your postgres version is lower than the minimal required version to restore your upgraded database." 

199 ) 

200 if restore_db: 

201 logging.warning("The upgraded dump will be downloaded but not restored.") 

202 self.context["no_restore"] = True 

203 

204 return "pending" 

205 

206 def pending_handler(self): 

207 return "progress" 

208 

209 def progress_handler(self): 

210 token = self.get_context_data("token") 

211 status, reason = monitor_request_processing(token) 

212 self.context["reason"] = reason 

213 

214 return status 

215 

216 def failed_handler(self): 

217 reason = self.get_context_data("reason") 

218 logging.error("The upgrade request has failed%s", (": " + reason) if reason else "") 

219 

220 return "terminal" 

221 

222 def cancelled_handler(self): 

223 logging.info("The upgrade request has been cancelled") 

224 

225 return "terminal" 

226 

227 def done_handler(self): 

228 ( 

229 input_source, 

230 token, 

231 ssh_key, 

232 core_count, 

233 aim, 

234 data_server_name, 

235 data_server_user, 

236 data_server_path, 

237 no_restore, 

238 dump_dest_path, 

239 ) = self.get_context_data( 

240 "input_source", 

241 "token", 

242 "ssh_key", 

243 "core_count", 

244 "aim", 

245 "data_server_name", 

246 "data_server_user", 

247 "data_server_path", 

248 "no_restore", 

249 "host_dump_download_path", 

250 ) 

251 

252 info = start_transfer(token, ssh_key + ".pub", "download") 

253 store_known_hosts(info["known_hosts"]) 

254 download_dump( 

255 data_server_name, 

256 info["ssh_port"], 

257 data_server_user, 

258 data_server_path, 

259 info["dump_name"], 

260 ssh_key, 

261 dump_dest_path, 

262 ) 

263 stop_transfer(token) 

264 

265 if no_restore: 

266 logging.info( 

267 "The upgraded database and filestore have been downloaded as %s.\n" 

268 "Skipping the restore of the upgraded dump and the merge of the filestore.", 

269 info["dump_name"], 

270 ) 

271 else: 

272 upgraded_db_name = self.get_context_data("upgraded_db_name") 

273 db_name = self.get_context_data("dbname") if input_source == "db" else None 

274 

275 restore_database(upgraded_db_name, info["dump_name"], core_count) 

276 restore_filestore(db_name, upgraded_db_name) 

277 clean_dump(info["dump_name"]) 

278 

279 return "terminal" 

280 

281 

282STATE_MACHINE = StateMachine() 

283 

284 

285def set_servers_info(host_uri): 

286 STATE_MACHINE.context["upgrade_server_name"] = "https://" + host_uri 

287 STATE_MACHINE.context["data_server_name"] = host_uri 

288 

289 

290# --------------------------------------------------------------------------------- 

291# Common functions 

292# --------------------------------------------------------------------------------- 

293 

294 

295def user_confirm(negative_answer="n"): 

296 return not input().lower().lstrip().startswith(negative_answer) 

297 

298 

299def check_binaries_exist(args): 

300 skip = set().union( 

301 *(to_skip for arg_name, to_skip in EXCLUDED_DEPENDENCIES.items() if getattr(args, arg_name, False)) 

302 ) 

303 not_found = [cmd for cmd in COMMAND_DEPENDENCIES[args.command] - skip if not which(cmd)] 

304 if not_found: 

305 logging.error( 

306 "It seems we cannot find some binaries needed for the requested action:\n" 

307 "- %s\n\n" 

308 "Please ensure they are present in your system, perhaps you need to install some packages.\n" 

309 "If you wish you can continue, even though something may fail later. " 

310 "Do you want to proceed? [y/N]", 

311 "\n- ".join(not_found), 

312 ) 

313 if user_confirm(negative_answer="y"): 

314 sys.exit(1) 

315 

316 

317def run_command(command, stream_output=False): 

318 """ 

319 Run a Linux command. Any error from the command will be raised as an exception. 

320 """ 

321 try: 

322 return (subprocess.check_call if stream_output else subprocess.check_output)(command, stderr=subprocess.STDOUT) 

323 except subprocess.CalledProcessError as e: 

324 error_msg = f"The {e.cmd[0]!r} command has failed" 

325 if not stream_output: 

326 error_msg += " with the following output:\n {}".format(e.output.decode("utf-8", "replace").rstrip("\n")) 

327 raise UpgradeError(error_msg) 

328 

329 

330# This is an advanced version of os.path.basename in python 2, which can get both dir and file basename from path 

331def get_path_basename(path): 

332 return os.path.basename(path if not path.endswith(os.sep) else path[:-1]) 

333 

334 

335# --------------------------------------------------------------------------------- 

336# Data transfer functions 

337# --------------------------------------------------------------------------------- 

338 

339 

340def clean_default_ssh_keys(): 

341 if os.path.isfile(DEFAULT_SSH_KEY_NAME): 

342 os.remove(DEFAULT_SSH_KEY_NAME) 

343 

344 if os.path.isfile(DEFAULT_SSH_KEY_NAME + ".pub"): 

345 os.remove(DEFAULT_SSH_KEY_NAME + ".pub") 

346 

347 

348def generate_default_ssh_keys(): 

349 """ 

350 Generate public/private SSH key pair in the current working directory 

351 """ 

352 if not os.path.isfile(DEFAULT_SSH_KEY_NAME) or not os.path.isfile(DEFAULT_SSH_KEY_NAME + ".pub"): 

353 logging.info("Generating temporary public/private SSH key pair") 

354 clean_default_ssh_keys() 

355 run_command(["ssh-keygen", "-t", "rsa", "-N", "", "-f", DEFAULT_SSH_KEY_NAME]) 

356 

357 

358def upload_dump(dump_path, server, port, user, path, ssh_key, dest_dump_name=None): 

359 """ 

360 Upload the database dump to the server through SSH. 

361 """ 

362 server_string = "{}@{}:{}".format( 

363 user, 

364 server, 

365 f"{path}/{dest_dump_name}" if dest_dump_name else path, 

366 ) 

367 ssh = f"ssh -p {port} -o IdentitiesOnly=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile={KNOWN_HOSTS_NAME} -i {ssh_key}" 

368 # windows: rsync would interpret a full path (C:\...) as a remote path, because it contains ':' 

369 dump_path = os.path.relpath(dump_path) 

370 # if the --dump dir is passed, then transfer only the content of the dir, and not the directory itself 

371 if os.path.isdir(dump_path) and not dump_path.endswith(os.sep): 

372 dump_path += os.sep 

373 

374 logging.info("Upload the database dump.") 

375 try: 

376 run_command( 

377 [ 

378 "rsync", 

379 "--chmod=u+rwx,g+rwx,o+r", 

380 "--info=progress2", 

381 "--delete-after", 

382 "-are", 

383 ssh, 

384 dump_path, 

385 server_string, 

386 ], 

387 stream_output=True, 

388 ) 

389 except Exception: 

390 logging.error( 

391 "The upload failed. If you see a message like 'ssh: [..]: Connection refused/timed out' above,\n" 

392 "this is most commonly caused by a restrictive firewall. Please check if your firewall is configured to\n" 

393 "block outgoing connections to TCP ports in the range between 32768 and 60999. If so, please add an\n" 

394 "exception for the target host %s. Then, please re-run the script and when prompted, choose to resume.", 

395 server, 

396 ) 

397 sys.exit(1) 

398 

399 

400def download_dump(server, port, user, dump_path, dump_name, ssh_key, dump_dest_path="."): 

401 """ 

402 Download a database dump and its filestore from the server through SSH 

403 """ 

404 ssh = f"ssh -p {port} -o IdentitiesOnly=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile={KNOWN_HOSTS_NAME} -i {ssh_key}" 

405 server = f"{user}@{server}:{dump_path}" 

406 server_dump_path = os.path.join(server, dump_name) 

407 server_fs_path = os.path.join(server, FILESTORE_NAME) 

408 reports_path = os.path.join(server, "upgrade-report.html") 

409 logs_path = os.path.join(server, "upgrade.log") 

410 

411 logging.info( 

412 "Downloading the database dump and its filestore from %s.", 

413 server, 

414 ) 

415 try: 

416 run_command( 

417 [ 

418 "rsync", 

419 "--info=progress2", 

420 "-are", 

421 ssh, 

422 "--ignore-missing-args", 

423 server_dump_path, 

424 server_fs_path, 

425 reports_path, 

426 logs_path, 

427 dump_dest_path, 

428 ], 

429 stream_output=True, 

430 ) 

431 except Exception: 

432 logging.error( 

433 "The connection may have been been closed because you reached the 5 minutes timeout. Please, re-run the script and resume." 

434 ) 

435 raise 

436 

437 

438# --------------------------------------------------------------------------------- 

439# DB management functions 

440# --------------------------------------------------------------------------------- 

441 

442 

443def get_upgraded_db_name(dbname, target, aim): 

444 timestamp = datetime.now().strftime(DB_TIMESTAMP_FORMAT) 

445 

446 if aim == "production": 

447 return f"{dbname}_backup_{timestamp}" 

448 

449 return f"{dbname}_test_{target}_{timestamp}" 

450 

451 

452def dump_database(db_name, dump_name, core_count): 

453 """ 

454 Dump the database as dump_name using 'core_count' CPU to reduce the dumping time. 

455 """ 

456 logging.info("Dump the database '%s' as '%s'", db_name, dump_name) 

457 

458 clean_dump(dump_name) 

459 

460 try: 

461 run_command( 

462 [ 

463 "pg_dump", 

464 "--no-owner", 

465 "--format", 

466 "d", 

467 "--jobs", 

468 str(core_count), 

469 "--file", 

470 dump_name, 

471 db_name, 

472 ] 

473 ) 

474 except Exception as e: 

475 logging.error( 

476 "Generating the dump of your database has failed. %s\n" 

477 "\nHint: ensure this script is run by the same system user running the Odoo process " 

478 "(by default user 'odoo'), to avoid permission and operational issues. " 

479 "The current user should have at least the necessary permissions to access " 

480 "the Postgres database you are aiming to upgrade.", 

481 e, 

482 ) 

483 sys.exit(1) 

484 

485 

486def restore_database(db_name, dump_name, core_count): 

487 """ 

488 Restore the upgraded database locally using 'core_count' CPU to reduce the restoring time. 

489 """ 

490 logging.info("Restore the dump file '%s' as the database '%s'", dump_name, db_name) 

491 

492 try: 

493 run_command(["createdb", db_name]) 

494 run_command( 

495 [ 

496 "pg_restore", 

497 "--no-owner", 

498 "--format", 

499 "d", 

500 dump_name, 

501 "--dbname", 

502 db_name, 

503 "--jobs", 

504 str(core_count), 

505 ] 

506 ) 

507 except Exception as e: 

508 logging.error( 

509 "Restoring the upgraded database has failed:\n %s \n\n" 

510 "You can run the following command to retry restoring the upgraded database yourself:\n" 

511 "pg_restore --no-owner --format d --jobs %s --dbname %s %s", 

512 e, 

513 core_count, 

514 db_name, 

515 dump_name, 

516 ) 

517 sys.exit(1) 

518 

519 

520def restore_filestore(origin_db_name, upgraded_db_name): 

521 """ 

522 Restore the new filestore by merging it with the old one, in a folder named 

523 as the upgraded database. 

524 If the previous filestore is not found, the new filestore should be restored manually. 

525 """ 

526 if not origin_db_name: 

527 logging.warning( 

528 "The original filestore location could not be determined." 

529 " The filestore of the upgrade database should be restored manually." 

530 ) 

531 return 

532 

533 origin_fs_path = os.path.join(FILESTORE_PATH, origin_db_name) 

534 

535 if os.path.exists(origin_fs_path): 

536 new_fs_path = os.path.join(FILESTORE_PATH, upgraded_db_name) 

537 

538 logging.info("Merging the new filestore with the old one in %s ...", new_fs_path) 

539 shutil.copytree(origin_fs_path, new_fs_path) 

540 if os.path.isdir(FILESTORE_NAME): 

541 run_command(["rsync", "-a", FILESTORE_NAME + os.sep, new_fs_path]) 

542 shutil.rmtree(FILESTORE_NAME) 

543 else: 

544 logging.warning( 

545 "The original filestore of '%s' has not been found in %s. " 

546 "The filestore of the upgrade database should be restored manually.", 

547 origin_db_name, 

548 FILESTORE_PATH, 

549 ) 

550 

551 

552def clean_dump(dump_name): 

553 try: 

554 if os.path.isdir(dump_name): 

555 shutil.rmtree(dump_name) 

556 

557 if os.path.isfile(dump_name): 

558 os.remove(dump_name) 

559 except PermissionError: 

560 logging.error( 

561 "Cleaning leftover dump has failed: the user executing the script does not have " 

562 "enough permissions to remove the old dump, likely used for another upgrade request in the past. " 

563 "Check ownership of '%s'.", 

564 dump_name, 

565 ) 

566 sys.exit(1) 

567 

568 

569def get_db_contract(dbname, fallback_contract=None): 

570 try: 

571 output = subprocess.check_output( 

572 [ 

573 "psql", 

574 dbname, 

575 "--no-psqlrc", 

576 "--tuples-only", 

577 "--command", 

578 "SELECT value FROM ir_config_parameter WHERE key = 'database.enterprise_code'", 

579 ] 

580 ) 

581 contract = output.decode().strip() 

582 if contract: 

583 return contract 

584 except Exception: 

585 pass 

586 

587 if fallback_contract: 

588 logging.info( 

589 "No subscription code found in the database. Fallback to the one given as parameter: %s", fallback_contract 

590 ) 

591 return fallback_contract 

592 

593 raise UpgradeError( 

594 "Unable to get the subscription code of your database. Your database must be registered to be " 

595 "eligible for an upgrade. See https://www.odoo.com/documentation/user/administration/maintain/on_premise.html for more info. " 

596 "Alternatively, you can specify the subscription code using the `--contract` argument." 

597 ) 

598 

599 

600def get_dump_basename_and_format(dump): 

601 """ 

602 Return the basename and the extension of the dump. 

603 """ 

604 dump_ext = next((ext for ext in EXPECTED_DUMP_EXTENSIONS if dump.endswith(ext)), None) 

605 if dump_ext: 

606 return os.path.basename(dump)[: -len(dump_ext)], dump_ext 

607 elif os.path.isdir(dump): 

608 return get_path_basename(dump), ".dump" 

609 

610 return None, None 

611 

612 

613def is_zip_dump_valid(dump_file): 

614 def check_zip_integrity(f): 

615 try: 

616 if f.testzip() is not None: 

617 return False 

618 except Exception: 

619 return False 

620 

621 try: 

622 if zipfile.is_zipfile(dump_file): 

623 with zipfile.ZipFile(dump_file) as zipf: 

624 check_zip_integrity(zipf) 

625 

626 # check that the archive contains at least the mandatory content 

627 if "dump.sql" not in zipf.namelist(): 

628 return False 

629 except Exception: 

630 return False 

631 return True 

632 

633 

634def is_gz_dump_valid(dump_path): 

635 try: 

636 with gzip.open(dump_path) as f: 

637 f.read(2) 

638 except OSError: 

639 return False 

640 return True 

641 

642 

643# --------------------------------------------------------------------------------- 

644# API management functions 

645# --------------------------------------------------------------------------------- 

646 

647 

648def send_json_request(endpoint, params): 

649 """ 

650 Send a JSONRPC request to the upgrade server and return its response as a dictionary 

651 """ 

652 

653 request_url = "{}/{}".format(STATE_MACHINE.context["upgrade_server_name"], endpoint) 

654 

655 # build the JSONRPC request 

656 jsonrpc_payload = { 

657 "jsonrpc": "2.0", 

658 "method": "not_used", 

659 "params": params, 

660 "id": "not_used", 

661 } 

662 

663 request_payload = json.dumps(jsonrpc_payload).encode("utf-8") 

664 

665 # build the HTTP request 

666 req = urlrequest.Request(request_url, request_payload, headers={"Content-type": "application/json"}) 

667 

668 # send it and parse the response content 

669 try: 

670 response = urlrequest.urlopen(req, timeout=REQUEST_TIMEOUT, context=ssl_context) 

671 info = response.info() 

672 

673 if "Content-Length" in info and int(info["Content-Length"]) > 0: 

674 response_data = response.read().decode("utf-8") 

675 

676 # JSONRPC response 

677 if "application/json" in info["Content-Type"]: 

678 resp_payload = json.loads(response_data) 

679 

680 if "result" in resp_payload: 

681 if "error" in resp_payload["result"]: 

682 raise UpgradeError(resp_payload["result"]["error"]) 

683 return resp_payload["result"] 

684 else: 

685 error = resp_payload.get("error", {}).get("data", {}).get("message") 

686 error = error or "Upgrade server bad JSONRPC response" 

687 raise UpgradeError(f"Error: {error}") 

688 

689 # static file response 

690 if "text/html" in info["Content-Type"]: 

691 return response_data 

692 

693 # empty response 

694 return [] 

695 

696 except (urlrequest.HTTPError, urlrequest.URLError) as e: 

697 raise UpgradeError("Upgrade server communication error: '%s'" % e) 

698 

699 except TimeoutError: 

700 raise UpgradeError("Upgrade server communication timeout") 

701 

702 

703def check_response_format(response, keys): 

704 """ 

705 Check that a response follows the expected format (keys) 

706 """ 

707 missing = [k for k in keys if k not in response] 

708 if missing: 

709 raise UpgradeError( 

710 f"The response received from the upgrade server does not have the expected format (missing data: {missing})" 

711 ) 

712 

713 

714def create_upgrade_request(contract, target, aim, env_vars, ssh_key): 

715 """ 

716 Create a new upgrade request using the upgrade API 

717 """ 

718 logging.info("Creating new upgrade request") 

719 

720 with open(ssh_key) as fd: 

721 response = send_json_request( 

722 "upgrade/request/create", 

723 { 

724 "contract": contract, 

725 "target": target, 

726 "aim": aim, 

727 "actuator": "cli", 

728 "env_vars": env_vars, 

729 "ssh_key": fd.read(), 

730 "api_version": "0.2", 

731 }, 

732 ) 

733 

734 check_response_format(response, ("request_id", "token", "assigned_host_uri")) 

735 if response.get("info"): 

736 logging.warning(response["info"]) 

737 logging.info("Assigned host's server uri '%s'", response["assigned_host_uri"]) 

738 logging.info("The secret token is '%s'", response["token"]) 

739 return response 

740 

741 

742def process_upgrade_request(token): 

743 """ 

744 Start the upgrade request processing using the upgrade API 

745 

746 Return whether the original DB PG version is compatible with the minimum PG for the Odoo target version 

747 """ 

748 logging.info("Processing the upgrade request") 

749 response = send_json_request("upgrade/request/process", {"token": token}) 

750 

751 check_response_format(response, ("is_pg_version_compatible",)) 

752 

753 return response["is_pg_version_compatible"] 

754 

755 

756def start_transfer(token, ssh_key, transfer_type): 

757 if not os.path.isfile(ssh_key): 

758 raise UpgradeError(f"The SSH key {ssh_key!r} does not exist.") 

759 

760 response = send_json_request( 

761 "upgrade/request/transfer/start", 

762 { 

763 "token": token, 

764 "transfer_type": transfer_type, 

765 }, 

766 ) 

767 check_response_format(response, ("ssh_port", "known_hosts")) 

768 

769 if transfer_type == "download": 

770 check_response_format(response, ("dump_name",)) 

771 

772 return response 

773 

774 

775def stop_transfer(token): 

776 send_json_request("upgrade/request/transfer/stop", {"token": token}) 

777 

778 

779def store_known_hosts(known_hosts): 

780 """ 

781 Create a known_hosts file to be able to authenticate the rsync SSH server 

782 """ 

783 known_hosts = known_hosts or "" 

784 try: 

785 with open(KNOWN_HOSTS_NAME, "w") as f: 

786 f.write(known_hosts) 

787 except PermissionError: 

788 logging.error( 

789 "The current user is not the owner of the file '%s'.\n" 

790 "Hint: in your next attempt, answer N when asked if you want to resume.", 

791 KNOWN_HOSTS_NAME, 

792 ) 

793 sys.exit(1) 

794 

795 

796def get_logs(token, from_byte=0): 

797 """ 

798 Request the actual log file 

799 """ 

800 request_url = "{}/{}?token={}".format( 

801 STATE_MACHINE.context["upgrade_server_name"], 

802 "upgrade/request/logs", 

803 token, 

804 ) 

805 req = urlrequest.Request(request_url, headers={"Range": f"bytes={from_byte:d}-"}) 

806 return urlrequest.urlopen(req, timeout=REQUEST_TIMEOUT, context=ssl_context).read().decode("utf-8", "replace") 

807 

808 

809def get_request_status(token): 

810 """ 

811 Request the request processing status and an optional reason 

812 """ 

813 response = send_json_request("upgrade/request/status", {"token": token}) 

814 check_response_format(response, ("status", "host_uri")) 

815 

816 return response["status"], response["host_uri"], response.get("reason"), response.get("archived") 

817 

818 

819def request_data_wipe(contract, token): 

820 """ 

821 Wipe the request data related to a contract 

822 """ 

823 response = send_json_request("upgrade/request/wipe", {"contract": contract, "token": token}) 

824 

825 wiped = response["wiped"] 

826 failed = response["failed"] 

827 if wiped: 

828 logging.info("The data for requests %s was successfully deleted", wiped) 

829 if failed: 

830 logging.info("The data for requests %s could not be deleted", failed) 

831 status = "success" if not failed else "partial" if wiped and failed else "failed" 

832 return status 

833 

834 

835# --------------------------------------------------------------------------------- 

836# Token functions (for recovering) 

837# --------------------------------------------------------------------------------- 

838 

839 

840def get_token_file(token_name, target, aim): 

841 return os.path.join(tempfile.gettempdir(), f"odoo-upgrade-{aim}-{token_name}-{target}") 

842 

843 

844def save_token(token_name, target, aim, token): 

845 """ 

846 Save the request token in a temporary file. 

847 """ 

848 filename = get_token_file(token_name, target, aim) 

849 

850 with open(filename, "w") as f: 

851 f.write(token) 

852 

853 

854def get_saved_token(token_name, target, aim): 

855 """ 

856 Get the token of the upgrade request if it has been saved previously 

857 """ 

858 filename = get_token_file(token_name, target, aim) 

859 

860 try: 

861 with open(filename) as f: 

862 return f.readline() 

863 except Exception: 

864 return None 

865 

866 

867def remove_saved_token(token_name, target, aim): 

868 filename = get_token_file(token_name, target, aim) 

869 if os.path.isfile(filename): 

870 os.remove(filename) 

871 

872 

873# --------------------------------------------------------------------------------- 

874# Main functions 

875# --------------------------------------------------------------------------------- 

876 

877 

878def monitor_request_processing(token): 

879 """ 

880 Monitor the request processing status and display logs at the same time 

881 """ 

882 status, _, reason = get_request_status(token)[:3] 

883 displayed_log_bytes = 0 

884 last_check_time = datetime.now() 

885 

886 while status in ("progress", "pending"): 

887 # status monitoring 

888 if datetime.now() > last_check_time + timedelta(seconds=STATUS_MONITORING_PERIOD): 

889 status, _, reason = get_request_status(token)[:3] 

890 last_check_time = datetime.now() 

891 

892 # logs streaming 

893 if status == "progress": 

894 logs = get_logs(token, displayed_log_bytes) 

895 if logs.strip(): 

896 logging.info(logs.strip()) 

897 displayed_log_bytes += len(logs) - 1 

898 

899 time.sleep(LOG_REFRESH_PERIOD) 

900 

901 return status, reason 

902 

903 

904def parse_command_line(): 

905 """ 

906 Parse command-line arguments and return them 

907 """ 

908 

909 def add_upgrade_arguments(subparser): 

910 dbname_or_dump = subparser.add_mutually_exclusive_group(required=True) 

911 dbname_or_dump.add_argument( 

912 "-d", 

913 "--dbname", 

914 help="The name of a database to dump and upgrade", 

915 ) 

916 dbname_or_dump.add_argument( 

917 "-i", 

918 "--dump", 

919 help=f"The database dump to upgrade (.sql, .dump, .sql.gz, .zip or a psql dump directory with {POSTGRES_TABLE_OF_CONTENTS} file)", 

920 ) 

921 

922 subparser.add_argument( 

923 "-r", 

924 "--restore-name", 

925 help="The new name of the local database into which the upgraded dump will be restored. Do not create it manually, it will be done automatically.", 

926 ) 

927 subparser.add_argument( 

928 "-c", 

929 "--contract", 

930 help="The contract number associated to the database (by default taken from the DB if it already has one, mandatory when sending a dump file with --dump)", 

931 ) 

932 subparser.add_argument("-t", "--target", required=True, help="The upgraded database version") 

933 subparser.add_argument( 

934 "-e", 

935 "--env", 

936 action="append", 

937 help="Set an environment variable, in the format VAR=VAL", 

938 ) 

939 subparser.add_argument( 

940 "--env-file", 

941 type=argparse.FileType("r"), 

942 help="Read in a file of environment variables, one per line, in the format VAR=VAL", 

943 ) 

944 

945 def add_pg_arguments(subparser): 

946 subparser.add_argument( 

947 "-x", 

948 "--no-restore", 

949 action="store_true", 

950 help="Download the upgraded database dump without restoring it", 

951 ) 

952 

953 def add_common_arguments(subparser): 

954 subparser.add_argument( 

955 "-s", 

956 "--ssh-key", 

957 help="The ssh key to use for data transfer (default: %(default)s)", 

958 default=SSH_KEY_NAME, 

959 ) 

960 subparser.add_argument( 

961 "-j", 

962 "--core-count", 

963 help="The number of core to use to dump/restore a database (default: %(default)s)", 

964 default=CORE_COUNT, 

965 ) 

966 subparser.add_argument( 

967 "-n", 

968 "--data-server-name", 

969 help=argparse.SUPPRESS, # Deprecated: not used anymore, ignored 

970 ) 

971 subparser.add_argument( 

972 "-u", 

973 "--data-server-user", 

974 help="The server user where to download/upload dumps (default: %(default)s)", 

975 default=DATA_SERVER_USER, 

976 ) 

977 subparser.add_argument( 

978 "-p", 

979 "--data-server-path", 

980 help="The path on the server where to download/upload dumps (default: %(default)s)", 

981 default=DATA_SERVER_PATH, 

982 ) 

983 

984 def add_token_argument(subparser): 

985 subparser.add_argument( 

986 "-t", 

987 "--token", 

988 required=True, 

989 help=( 

990 "The token ID of the request. It can be found in the output of the " 

991 " `test` and `production` commands or in the file `upgrade.log`." 

992 ), 

993 ) 

994 

995 prog = "python <(curl -s https://upgrade.odoo.com/upgrade)" if not os.path.isfile(sys.argv[0]) else None 

996 

997 parser = argparse.ArgumentParser( 

998 prog=prog, 

999 epilog=( 

1000 "Some options require access rights to connect to a database and generate a dump.\n" 

1001 "Make sure that you are running this script with the correct user.\n" 

1002 "Running as root is not advised." 

1003 ), 

1004 ) 

1005 parser.add_argument("--debug", action="store_true", help="activate debug traces") 

1006 

1007 subparsers = parser.add_subparsers(dest="command") 

1008 subparsers.required = True 

1009 

1010 # sub-parser for the 'test' command 

1011 parser_test = subparsers.add_parser("test", help="upgrade a database for test purpose") 

1012 add_upgrade_arguments(parser_test) 

1013 add_pg_arguments(parser_test) 

1014 add_common_arguments(parser_test) 

1015 

1016 # sub-parser for the 'production' command 

1017 parser_prod = subparsers.add_parser("production", help="upgrade a database for production purpose") 

1018 add_upgrade_arguments(parser_prod) 

1019 add_pg_arguments(parser_prod) 

1020 add_common_arguments(parser_prod) 

1021 

1022 # sub-parser for the 'restore' command 

1023 parser_restore = subparsers.add_parser("restore", help="download and restore the upgraded database") 

1024 add_pg_arguments(parser_restore) 

1025 add_token_argument(parser_restore) 

1026 parser_restore.add_argument( 

1027 "-d", 

1028 "--dbname", 

1029 required=True, 

1030 help="The local database name to retrieve the original filestore", 

1031 ) 

1032 parser_restore.add_argument( 

1033 "-r", 

1034 "--restored-name", 

1035 required=True, 

1036 help="The database name to restore the upgraded dump", 

1037 ) 

1038 parser_restore.add_argument( 

1039 "--production", 

1040 action="store_true", 

1041 help="Indicates that it's not a test database but a production database", 

1042 ) 

1043 add_common_arguments(parser_restore) 

1044 

1045 # sub-parser for the 'status' command 

1046 parser_status = subparsers.add_parser("status", help="show the upgrade request status") 

1047 add_token_argument(parser_status) 

1048 

1049 # sub-parser for the 'log' command 

1050 parser_log = subparsers.add_parser("log", help="show the upgrade request log") 

1051 add_token_argument(parser_log) 

1052 parser_log.add_argument( 

1053 "-f", 

1054 "--from-byte", 

1055 type=int, 

1056 default=0, 

1057 help="From which line start retrieving the log (0=from the beginning)", 

1058 ) 

1059 

1060 # sub-parser for the 'wipe' command 

1061 parser_wipe = subparsers.add_parser( 

1062 "wipe", 

1063 help="Remove all database dumps for the upgrade requests on the platform related to your Odoo contract number.", 

1064 epilog=( 

1065 "Since this command is a destructive action both a token and the associated contract are mandatory. " 

1066 "All associated requests of the contract will be wiped out. Use the token of the last request." 

1067 ), 

1068 ) 

1069 add_token_argument(parser_wipe) 

1070 parser_wipe.add_argument( 

1071 "-c", 

1072 "--contract", 

1073 required=True, 

1074 help="The contract number related to the data you wish to remove.", 

1075 ) 

1076 parser_wipe.add_argument( 

1077 "-y", 

1078 "--yes", 

1079 action="store_true", 

1080 help="Pre-approve the data removal", 

1081 ) 

1082 

1083 args = parser.parse_args() 

1084 

1085 if args.command in ("test", "production") and args.dump and not args.contract: 

1086 parser.error("A contract number must be provided when the --dump argument is used") 

1087 

1088 return args 

1089 

1090 

1091def get_env_vars(env_vars, env_file): 

1092 if env_vars is None: 

1093 env_vars = [] 

1094 if env_file is not None: 

1095 # Lines that start with # are treated as comments 

1096 env_vars.extend(line.strip() for line in env_file if line and line[0] != "#") 

1097 # Check that args are correctly formatted in the form VAR=VAL 

1098 for var in env_vars: 

1099 if not re.match(r"^\w+=", var): 

1100 raise ValueError("The following environment variable option is badly formatted: " + var) 

1101 return env_vars 

1102 

1103 

1104def process_upgrade_command(dbname, upgraded_db_name, dump, contract, target, aim, env_vars): 

1105 start_state = "init" 

1106 additional_context = { 

1107 "target": target, 

1108 "aim": aim, 

1109 "contract": contract, 

1110 "env_vars": env_vars, 

1111 } 

1112 

1113 # update the context when a database is upgraded 

1114 if dbname: 

1115 token_name = "db_" + dbname 

1116 additional_context.update( 

1117 { 

1118 "input_source": "db", 

1119 "dbname": dbname, 

1120 "upgraded_db_name": upgraded_db_name if upgraded_db_name else get_upgraded_db_name(dbname, target, aim), 

1121 "token_name": token_name, 

1122 } 

1123 ) 

1124 

1125 # update the context when a dump is upgraded 

1126 if dump: 

1127 if not os.path.exists(dump): 

1128 raise UpgradeError(f"Dump {dump!r} not found.") 

1129 

1130 dump_absolute_path = os.path.abspath(dump) 

1131 dump_basename, dump_ext = get_dump_basename_and_format(dump) 

1132 if dump_ext is None or ( 

1133 os.path.isdir(dump_absolute_path) 

1134 and not os.path.isfile(os.path.join(dump_absolute_path, POSTGRES_TABLE_OF_CONTENTS)) 

1135 ): 

1136 raise UpgradeError( 

1137 ( 

1138 "The database dump must be in one of the following formats: {}. " 

1139 "It can also be a directory dump (containing the file {})." 

1140 ).format(", ".join(EXPECTED_DUMP_EXTENSIONS), POSTGRES_TABLE_OF_CONTENTS) 

1141 ) 

1142 

1143 if dump_ext == ".zip" and not is_zip_dump_valid(dump): 

1144 raise UpgradeError( 

1145 "The zip dump archive is not valid (either corrupted or does not contain, at least, a dump.sql file)" 

1146 ) 

1147 if dump_ext == ".sql.gz" and not is_gz_dump_valid(dump): 

1148 raise UpgradeError(f"The dump {dump!r} is not valid (either corrupted or has the wrong extension)") 

1149 

1150 token_name = get_token_name(dump_absolute_path) 

1151 additional_context.update( 

1152 { 

1153 "input_source": "dump", 

1154 "token_name": token_name, 

1155 "dump_basename": dump_basename, 

1156 "dump_ext": dump_ext, 

1157 "no_restore": True, 

1158 } 

1159 ) 

1160 

1161 # if this upgrade request has been interrupted, try to resume it 

1162 saved_token = get_saved_token(token_name, target, aim) 

1163 

1164 if saved_token is not None: 

1165 req_state, host_uri, reason, archived = get_request_status(saved_token) 

1166 if not archived: 

1167 logging.info("This upgrade request seems to have been interrupted. Do you want to resume it? [Y/n]") 

1168 if user_confirm(): 

1169 logging.info("Resuming the upgrade request") 

1170 

1171 start_state = req_state 

1172 additional_context.update({"token": saved_token, "reason": reason}) 

1173 

1174 # make sure that the request is resumed from the correct node 

1175 set_servers_info(host_uri) 

1176 else: 

1177 logging.info("Restarting the upgrade request from the beginning") 

1178 

1179 # run the upgrade 

1180 STATE_MACHINE.run(start_state, additional_context) 

1181 

1182 # cleaning 

1183 if dbname: 

1184 clean_dump(ORIGIN_DUMP_NAME) 

1185 remove_saved_token(token_name, target, aim) 

1186 

1187 

1188def get_token_name(dump_absolute_path): 

1189 input_file = ( 

1190 os.path.join(dump_absolute_path, POSTGRES_TABLE_OF_CONTENTS) 

1191 if os.path.isdir(dump_absolute_path) 

1192 else dump_absolute_path 

1193 ) 

1194 

1195 try: 

1196 uname = getpass.getuser() 

1197 except Exception: 

1198 uname = "" 

1199 

1200 heuristics = ( 

1201 input_file, 

1202 os.path.getsize(input_file), 

1203 os.path.getctime(input_file), 

1204 uname, 

1205 ) 

1206 sha = hashlib.sha256() 

1207 for heuristic in heuristics: 

1208 sha.update(str(heuristic).encode() + b"\x1e") 

1209 return "dump_" + sha.hexdigest() 

1210 

1211 

1212def process_restore_command(token, dbname, aim, restored_name): 

1213 status, host_uri = get_request_status(token)[:2] 

1214 set_servers_info(host_uri) 

1215 if status == "done": 

1216 STATE_MACHINE.run( 

1217 "done", 

1218 { 

1219 "token": token, 

1220 "aim": aim, 

1221 "dbname": dbname, 

1222 "upgraded_db_name": restored_name, 

1223 "input_source": None, 

1224 }, 

1225 ) 

1226 

1227 

1228def process_status_command(token): 

1229 status, _, reason = get_request_status(token)[:3] 

1230 logging.info("Request status: %s%s", status.upper(), f" ({reason})" if reason else "") 

1231 

1232 

1233def process_log_command(token, from_byte): 

1234 host_uri = get_request_status(token)[1] 

1235 set_servers_info(host_uri) 

1236 logs = get_logs(token, from_byte) 

1237 for log in logs.split("\n")[:-1]: 

1238 logging.info(log) 

1239 

1240 

1241def process_wipe_command(contract, token, confirmed): 

1242 logging.info( 

1243 "This command will remove all dumps, original or upgraded, in our platform for the contract '%s'. " 

1244 "This will make it impossible to get any support for what happened during the upgrade.", 

1245 contract, 

1246 ) 

1247 if not confirmed: 

1248 logging.info("Type 'yes' to confirm") 

1249 confirmed = input().lower().lstrip() == "yes" 

1250 if confirmed: 

1251 logging.info("Wipe process starting") 

1252 status = request_data_wipe(contract, token) 

1253 result_message = ( 

1254 "were successfully" if status == "success" else "were partially" if status == "partial" else "failed to be" 

1255 ) 

1256 logging.info("The dumps associated to the contract '%s' %s wiped out", contract, result_message) 

1257 if status != "success": 

1258 logging.info("You can try to run the command again or contact support.") 

1259 else: 

1260 logging.info("Wipe process aborted") 

1261 

1262 

1263def check_restore(args): 

1264 if "restore_name" in args and args.restore_name: 

1265 output = subprocess.check_output( 

1266 [ 

1267 "psql", 

1268 "postgres", 

1269 "--no-psqlrc", 

1270 "--tuples-only", 

1271 "--csv", 

1272 "--command", 

1273 "SELECT datname FROM pg_database", 

1274 ], 

1275 ) 

1276 if any(localdb == args.restore_name for localdb in output.decode("utf-8", "ignore").splitlines()): 

1277 logging.error( 

1278 "Refusing to restore the dump into DB '%s' since it already exists.\n" 

1279 "This script will only restore the upgraded dump into a new DB.\n" 

1280 "You can rerun it providing a name that doesn't collide with an already existing DB. This script will create the DB for you.\n", 

1281 args.restore_name, 

1282 ) 

1283 sys.exit(1) 

1284 

1285 

1286def set_download_paths(args): 

1287 if "dump" in args and args.dump: 

1288 dump_absolute_path = os.path.abspath(args.dump) 

1289 

1290 """ 

1291 If the table of contents path is passed, change the directory and path to the parent of the table of 

1292 contents so that rsync can send the whole directory without any issues 

1293 """ 

1294 if get_path_basename(dump_absolute_path) == POSTGRES_TABLE_OF_CONTENTS: 

1295 host_dump_upload_path = os.path.abspath(os.path.realpath(dump_absolute_path) + "/..") 

1296 host_dump_download_path = os.path.abspath(os.path.realpath(dump_absolute_path) + "/../..") 

1297 args.dump = host_dump_upload_path 

1298 else: 

1299 host_dump_upload_path = dump_absolute_path 

1300 host_dump_download_path = os.path.abspath(os.path.realpath(dump_absolute_path) + "/..") 

1301 else: 

1302 host_dump_upload_path = "." 

1303 host_dump_download_path = "." 

1304 

1305 STATE_MACHINE.context.update( 

1306 { 

1307 "host_dump_upload_path": host_dump_upload_path, 

1308 "host_dump_download_path": host_dump_download_path, 

1309 } 

1310 ) 

1311 

1312 

1313def set_common_context(args): 

1314 if args.command not in ("test", "production", "restore"): 

1315 return 

1316 

1317 if os.path.isfile(args.ssh_key): 

1318 try: 

1319 run_command(["ssh-keygen", "-y", "-f", args.ssh_key]) 

1320 except UpgradeError as e: 

1321 logging.error( 

1322 "The current user is not able to use the SSH key file '%s'.\nError: %s\n" 

1323 "Hint: each upgrade request is associated with a SSH key. If the key is lost, this\n" 

1324 "script will create another one, but you will have to request a new upgrade.\n" 

1325 "In such case, if asked to resume the upgrade, answer 'n'.", 

1326 args.ssh_key, 

1327 e, 

1328 ) 

1329 sys.exit(1) 

1330 

1331 STATE_MACHINE.context.update( 

1332 { 

1333 "ssh_key": args.ssh_key, 

1334 "core_count": args.core_count, 

1335 "data_server_user": args.data_server_user, 

1336 "data_server_path": args.data_server_path, 

1337 "no_restore": args.no_restore, 

1338 } 

1339 ) 

1340 

1341 

1342def main(): 

1343 args = parse_command_line() 

1344 logging.basicConfig( 

1345 format="%(asctime)s %(levelname)s: %(message)s", 

1346 datefmt="%Y-%m-%d %I:%M:%S", 

1347 level=logging.DEBUG if args.debug else logging.INFO, 

1348 ) 

1349 

1350 if not UPGRADE_SERVER_NAME.startswith("https://"): 

1351 logging.error("Must use https for the upgrade platform.") 

1352 sys.exit(1) 

1353 STATE_MACHINE.context["upgrade_server_name"] = UPGRADE_SERVER_NAME 

1354 

1355 check_binaries_exist(args) 

1356 check_restore(args) 

1357 set_download_paths(args) 

1358 set_common_context(args) 

1359 

1360 try: 

1361 if args.command in ("test", "production"): 

1362 env_vars = get_env_vars(args.env, args.env_file) 

1363 process_upgrade_command( 

1364 args.dbname, 

1365 args.restore_name, 

1366 args.dump, 

1367 args.contract, 

1368 args.target, 

1369 args.command, 

1370 env_vars, 

1371 ) 

1372 

1373 elif args.command == "restore": 

1374 aim = "production" if args.production else "test" 

1375 process_restore_command(args.token, args.dbname, aim, args.restored_name) 

1376 

1377 elif args.command == "status": 

1378 process_status_command(args.token) 

1379 

1380 elif args.command == "log": 

1381 process_log_command(args.token, args.from_byte) 

1382 

1383 elif args.command == "wipe": 

1384 process_wipe_command(args.contract, args.token, args.yes) 

1385 

1386 except (UpgradeError, StateMachine.Error) as e: 

1387 logging.error("Error: %s", e) 

1388 

1389 except KeyboardInterrupt: 

1390 pass 

1391 

1392 

1393if __name__ == "__main__": 1393 ↛ 1394line 1393 didn't jump to line 1394 because the condition on line 1393 was never true

1394 main()