upload_debug_symbols_to_s3.py 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. # Copyright Materialize, Inc. and contributors. All rights reserved.
  2. #
  3. # Use of this software is governed by the Business Source License
  4. # included in the LICENSE file at the root of this repository.
  5. #
  6. # As of the Change Date specified in that file, in accordance with
  7. # the Business Source License, use of this software will be governed
  8. # by the Apache License, Version 2.0.
  9. from pathlib import Path
  10. import boto3
  11. from materialize import elf
  12. # The S3 bucket in which to store debuginfo.
  13. DEBUGINFO_S3_BUCKET = "materialize-debuginfo"
  14. # The binaries for which debuginfo should be uploaded to S3 and Polar Signals.
  15. DEBUGINFO_BINS = {"environmentd", "clusterd", "balancerd", "materialized"}
  16. def upload_debuginfo_to_s3(bin_path: Path, dbg_path: Path, is_tag_build: bool) -> str:
  17. s3 = boto3.client("s3")
  18. with open(bin_path, "rb") as exe, open(dbg_path, "rb") as dbg:
  19. build_id = elf.get_build_id(exe)
  20. assert build_id.isalnum()
  21. assert len(build_id) > 0
  22. dbg_build_id = elf.get_build_id(dbg)
  23. assert build_id == dbg_build_id
  24. for fileobj, name in [
  25. (exe, "executable"),
  26. (dbg, "debuginfo"),
  27. ]:
  28. key = f"buildid/{build_id}/{name}"
  29. print(f"Uploading {name} to s3://{DEBUGINFO_S3_BUCKET}/{key}...")
  30. fileobj.seek(0)
  31. s3.upload_fileobj(
  32. Fileobj=fileobj,
  33. Bucket=DEBUGINFO_S3_BUCKET,
  34. Key=key,
  35. ExtraArgs={
  36. "Tagging": f"ephemeral={'false' if is_tag_build else 'true'}",
  37. },
  38. )
  39. return build_id