From 7aaf348f3d0111b70d67bf0a526b07a3ebfe0442 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Wed, 13 Dec 2023 09:43:22 -0600 Subject: [PATCH 01/15] Change version naming on PackageUpload task to use the predicted version number and a jinja2 template expression --- cumulusci/tasks/salesforce/package_upload.py | 29 +++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/cumulusci/tasks/salesforce/package_upload.py b/cumulusci/tasks/salesforce/package_upload.py index 15c6392938..1570f2bc22 100644 --- a/cumulusci/tasks/salesforce/package_upload.py +++ b/cumulusci/tasks/salesforce/package_upload.py @@ -1,5 +1,6 @@ from datetime import datetime +from jinja2.sandbox import ImmutableSandboxedEnvironment from cumulusci.cli.ui import CliTable from cumulusci.core.dependencies.resolvers import get_static_dependencies from cumulusci.core.exceptions import ( @@ -9,12 +10,17 @@ ) from cumulusci.tasks.salesforce import BaseSalesforceApiTask +jinja2_env = ImmutableSandboxedEnvironment() +DEFAULT_VERSION_NAME_TEMPLATE = ( + "{{ project_config.project__package__name }} - {{ major_version }}.{{ minor_version }}" + "{% if not production %} Beta {{ build_number }}{% endif %}" +) class PackageUpload(BaseSalesforceApiTask): name = "PackageUpload" api_version = "48.0" task_options = { - "name": {"description": "The name of the package version.", "required": True}, + "name": {"description": "The name of the package version."}, "production": { "description": "If True, uploads a production release. Defaults to uploading a beta" }, @@ -44,6 +50,10 @@ class PackageUpload(BaseSalesforceApiTask): "description": "The desired minor version number for the uploaded package. Defaults to next available minor version for the current major version.", "required": False, }, + "version_name_template": { + "description": f"A jinja2 template expression for the version name. Defaults to:\n{DEFAULT_VERSION_NAME_TEMPLATE}", + "required": False, + } } def _init_options(self, kwargs): @@ -57,9 +67,15 @@ def _init_options(self, kwargs): if "namespace" not in self.options: self.options["namespace"] = self.project_config.project__package__namespace + # Set the default version_name_template option if not already set + if "version_name_template" not in self.options: + self.options["version_name_template"] = DEFAULT_VERSION_NAME_TEMPLATE def _run_task(self): self._validate_versions() + self._set_version_name() + self.logger.info(f"Generated version name: {self.options['name']}") + return self._set_package_id() self._set_package_info() @@ -84,6 +100,7 @@ def _validate_versions(self): SELECT MajorVersion, MinorVersion, PatchVersion, + BuildNumber, ReleaseState FROM MetadataPackageVersion ORDER BY @@ -133,12 +150,22 @@ def _validate_versions(self): else: if version["ReleaseState"] == "Beta": self.options["minor_version"] = str(version["MinorVersion"]) + self.options["build_number"] = str(version["BuildNumber"] + 1) else: self.options["minor_version"] = str(version["MinorVersion"] + 1) else: if "minor_version" not in self.options: self.options["minor_version"] = "0" + def _set_version_name(self): + """ Sets self.options["name"] using the jinja2 template expression in self.options["version_name_template"] """ + # Add self.project_config and self.options keys to the jinja2 environment + jinja2_context = {"project_config": self.project_config} + jinja2_context.update(self.options) + template = jinja2_env.from_string(self.options["version_name_template"]) + value = template.render(**jinja2_context) + self.options["name"] = value + def _set_package_info(self): if not self.package_id: self._set_package_id() From e5dbbd6e2c33b30f010c6256af0a459e245a3caa Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Wed, 13 Dec 2023 10:18:34 -0600 Subject: [PATCH 02/15] Simple plugin framework that uses a top level plugins: key in cumulusci.yml to list python modules to load. If a plugin is specified and not available for import, an error will be thrown. Plugin packages are scanned for a cumulusci.yml in their root directory and if found, it is added to the merge stack for cumulusci.yml before the project config. --- cumulusci/core/config/project_config.py | 38 ++++++++++++++++++++++--- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/cumulusci/core/config/project_config.py b/cumulusci/core/config/project_config.py index 856bca6401..98d86a1095 100644 --- a/cumulusci/core/config/project_config.py +++ b/cumulusci/core/config/project_config.py @@ -16,6 +16,7 @@ from cumulusci.core.config.base_config import BaseConfig from cumulusci.core.debug import get_debug_mode +from cumulusci.core.utils import import_global from cumulusci.core.versions import PackageVersionNumber from cumulusci.utils.version_strings import LooseVersion @@ -165,6 +166,29 @@ def _load_config(self): if project_config: self.config_project.update(project_config) + self.config_plugins = {} + for plugin in project_config.get("plugins", []): + # Look for a cumulusci.yml file in the plugin's python package root directory + # and load it if found + try: + __import__(plugin) + plugin_config_path = ( + Path(sys.modules[plugin].__file__).parent / "cumulusci.yml" + ) + except Exception as exc: + raise ConfigError(f"Could not load plugin {plugin}: {exc}. Please make sure the plugin is installed.") + if plugin_config_path.is_file(): + plugin_config = cci_safe_load( + str(plugin_config_path), logger=self.logger + ) + plugin_config = plugin_config + if plugin_config: + self.config_plugins[plugin] = plugin_config + self.logger.info(f"Loaded plugin {plugin}") + else: + self.logger.info(f"Loaded plugin {plugin} but no cumulusci.yml found") + + # Load the local project yaml config file if it exists if self.config_project_local_path: local_config = cci_safe_load( @@ -183,15 +207,21 @@ def _load_config(self): if additional_yaml_config: self.config_additional_yaml.update(additional_yaml_config) - self.config = merge_config( - { + merge_stack = { "universal_config": self.config_universal, "global_config": self.config_global, + } + if self.config_plugins: + for plugin, plugin_config in self.config_plugins.items(): + merge_stack[plugin] = plugin_config + + merge_stack.update({ "project_config": self.config_project, "project_local_config": self.config_project_local, "additional_yaml": self.config_additional_yaml, - } - ) + }) + + self.config = merge_config(merge_stack) self._validate_config() From 1873eddd8d92bbd555dc83a6657680540458f671 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Wed, 13 Dec 2023 10:30:00 -0600 Subject: [PATCH 03/15] Add support for outputting markdown of test results including a default integration for Job Summary Reports in GitHub Actions runtime environments --- cumulusci/tasks/apex/testrunner.py | 66 ++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/cumulusci/tasks/apex/testrunner.py b/cumulusci/tasks/apex/testrunner.py index 5accf25144..965e925a3b 100644 --- a/cumulusci/tasks/apex/testrunner.py +++ b/cumulusci/tasks/apex/testrunner.py @@ -3,6 +3,7 @@ import html import io import json +import os import re from cumulusci.core.exceptions import ( @@ -151,6 +152,11 @@ class RunApexTests(BaseSalesforceApiTask): "json_output": { "description": "File name for json output. Defaults to test_results.json" }, + "markdown_output": { + "description": ("If set, outputs GitHub flavored markdown output for test" + "results that can be included as GitHub Actions job summaries." + " By default, uses $GITHUB_STEP_SUMMARY if available in environment"), + }, "retry_failures": { "description": "A list of regular expression patterns to match against " "test failures. If failures match, the failing tests are retried in " @@ -199,6 +205,12 @@ def _init_options(self, kwargs): "json_output", "test_results.json" ) + if ( + self.options.get("markdown_output") is None + and "GITHUB_STEP_SUMMARY" in os.environ + ): + self.options["markdown_output"] = os.environ["GITHUB_STEP_SUMMARY"] + self.options["retry_failures"] = process_list_arg( self.options.get("retry_failures", []) ) @@ -735,3 +747,57 @@ def _write_output(self, test_results): if json_output: with io.open(json_output, mode="w", encoding="utf-8") as f: f.write(str(json.dumps(test_results, indent=4))) + markdown_output = self.options.get("markdown_output") + if markdown_output: + with io.open(markdown_output, mode="w+", encoding="utf-8") as f: + + # Test Results + f.write("# Test Results\n") + f.write("| Status | Count |\n") + f.write("| :--- | ---: |\n") + if self.counts["Pass"]: + f.write(f"| 🟢 **Passed** | {self.counts['Pass']}|\n") + if self.counts["Fail"]: + f.write(f"| ❌ **Failed** | {self.counts['Fail']}|\n") + if self.counts["CompileFail"]: + f.write(f"| ❗ **Compile Failed** | {self.counts['CompileFail']}|\n") + if self.counts["Retriable"]: + f.write(f"| ♻ **Retriable** | {self.counts['Retriable']}|\n") + if self.counts["Skip"]: + f.write(f"| 🚫 **Skipped** | {self.counts['Skip']}|\n") + f.write("\n") + + results_failed = [] + results = [] + for result in test_results: + duration = result.get("Stats", {}).get("duration") + if duration: + duration = str(duration) + test_class = result.get("ClassName") + test_method = result.get("Method") + outcome = result.get("Outcome") + results.append( + f"| {test_class} | {test_method} | {outcome} | {duration} |" + ) + if outcome in ["Fail", "CompileFail"]: + results_failed.append(result) + + if results_failed: + f.write("\n## Test Failures\n") + for result in results_failed: + message = result.get("Message") + stacktrace = result.get("StackTrace") + f.write(f"**{result['ClassName']}.{result['Method']}**\n") + f.write("```") + if message: + f.write(f"Message: {message}\n") + if result["StackTrace"]: + f.write(f"\nStack Trace: {stacktrace}") + f.write(f"{result['StackTrace']}\n") + f.write("```\n\n") + + if results: + f.write("\n## Individual Test Results\n") + f.write("| Class | Method | Outcome | Duration |\n") + f.write("| :--- | :--- | ---: | ---: |\n") + f.write("\n".join(results)) From 22de0cc67b322b5080827352f1536543a3a0d061 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Wed, 13 Dec 2023 11:07:08 -0600 Subject: [PATCH 04/15] Remove testing return --- cumulusci/tasks/salesforce/package_upload.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cumulusci/tasks/salesforce/package_upload.py b/cumulusci/tasks/salesforce/package_upload.py index 1570f2bc22..a54de7d0ea 100644 --- a/cumulusci/tasks/salesforce/package_upload.py +++ b/cumulusci/tasks/salesforce/package_upload.py @@ -74,8 +74,6 @@ def _run_task(self): self._validate_versions() self._set_version_name() - self.logger.info(f"Generated version name: {self.options['name']}") - return self._set_package_id() self._set_package_info() @@ -165,6 +163,7 @@ def _set_version_name(self): template = jinja2_env.from_string(self.options["version_name_template"]) value = template.render(**jinja2_context) self.options["name"] = value + self.logger.info(f"Generated version name: {self.options['name']}") def _set_package_info(self): if not self.package_id: From a1d0527c82cf3dd746cea9cf0d38960cc2737d11 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Sun, 17 Dec 2023 14:58:47 -0600 Subject: [PATCH 05/15] Working parent branch resolver, need to write test coverage --- cumulusci/core/dependencies/resolvers.py | 74 +++++++++++++++++++ .../core/dependencies/tests/test_resolvers.py | 37 ++++++++++ cumulusci/cumulusci.yml | 2 + cumulusci/utils/git.py | 6 ++ 4 files changed, 119 insertions(+) diff --git a/cumulusci/core/dependencies/resolvers.py b/cumulusci/core/dependencies/resolvers.py index afa0d04e31..4296ee37e0 100644 --- a/cumulusci/core/dependencies/resolvers.py +++ b/cumulusci/core/dependencies/resolvers.py @@ -38,6 +38,7 @@ get_feature_branch_name, get_release_identifier, is_release_branch_or_child, + is_child_branch, ) @@ -46,10 +47,12 @@ class DependencyResolutionStrategy(StrEnum): STATIC_TAG_REFERENCE = "tag" COMMIT_STATUS_EXACT_BRANCH = "commit_status_exact_branch" + COMMIT_STATUS_PARENT_BRANCH = "commit_status_parent_branch" COMMIT_STATUS_RELEASE_BRANCH = "commit_status_release_branch" COMMIT_STATUS_PREVIOUS_RELEASE_BRANCH = "commit_status_previous_release_branch" COMMIT_STATUS_DEFAULT_BRANCH = "commit_status_default_branch" UNLOCKED_EXACT_BRANCH = "unlocked_exact_branch" + UNLOCKED_PARENT_BRANCH = "unlocked_parent_branch" UNLOCKED_RELEASE_BRANCH = "unlocked_release_branch" UNLOCKED_PREVIOUS_RELEASE_BRANCH = "unlocked_previous_release_branch" UNLOCKED_DEFAULT_BRANCH = "unlocked_default_branch" @@ -304,6 +307,57 @@ def resolve( ) return (None, None) +class AbstractGitHubParentBranchResolver( + AbstractGitHubCommitStatusPackageResolver, abc.ABC +): + """Abstract base class for resolvers that use commit statuses on child branches to find refs on parent branches.""" + + branch_offset_start = 0 + branch_offset_end = 0 + + def is_valid_repo_context(self, context: BaseProjectConfig) -> bool: + return is_child_branch( + context.repo_branch, context.project__git__prefix_feature # type: ignore + ) + + def get_branches( + self, + dep: BaseGitHubDependency, + context: BaseProjectConfig, + ) -> List[Branch]: + repo = context.get_repo_from_url(dep.github) + if not repo: + raise DependencyResolutionError( + f"Unable to access GitHub repository for {dep.github}" + ) + + try: + remote_branch_prefix = find_repo_feature_prefix(repo) + except Exception: + context.logger.info( + f"Could not find feature branch prefix or commit-status context for {repo.clone_url}. Unable to resolve packages." + ) + return [] + + # Construct a list of all parent branches for a child with format feature/foo__bar__baz + parents = [] + branch_parts = context.repo_branch[len(remote_branch_prefix) :].split("__") + for i in range(1, len(branch_parts)): + parents.append( + f"{remote_branch_prefix}{'__'.join(branch_parts[:i])}" + ) + + # We will check at least the release branch corresponding to our release id. + # We may be configured to check backwards on release branches. + branches = [] + for parent in parents: + try: + branches.append(repo.branch(parent)) + except NotFoundError: + context.logger.info(f"Remote branch {parent} not found") + pass + + return branches class AbstractGitHubReleaseBranchResolver( AbstractGitHubCommitStatusPackageResolver, abc.ABC @@ -451,6 +505,14 @@ class GitHubExactMatch2GPResolver(AbstractGitHubExactMatchCommitStatusResolver): commit_status_context = "2gp_context" commit_status_default = "Build Feature Test Package" +class GitHubParentBranch2GPResolver(AbstractGitHubParentBranchResolver): + """Resolver that identifies a ref by finding a 2GP package version + in a commit status on a branch whose name matches the local branch.""" + + name = "GitHub Parent Branch Commit Status Resolver" + commit_status_context = "2gp_context" + commit_status_default = "Build Feature Test Package" + class GitHubExactMatchUnlockedCommitStatusResolver( AbstractGitHubExactMatchCommitStatusResolver @@ -462,6 +524,16 @@ class GitHubExactMatchUnlockedCommitStatusResolver( commit_status_context = "unlocked_context" commit_status_default = "Build Unlocked Test Package" +class GitHubParentBranchUnlockedCommitStatusResolver( + AbstractGitHubParentBranchResolver +): + """Resolver that identifies a ref by finding an unlocked package version + in a commit status on a parent branch of the local branch.""" + + name = "GitHub Parent Branch Unlocked Commit Status Resolver" + commit_status_context = "unlocked_context" + commit_status_default = "Build Unlocked Test Package" + class AbstractGitHubDefaultBranchCommitStatusResolver( AbstractGitHubCommitStatusPackageResolver, abc.ABC @@ -496,6 +568,7 @@ class GitHubDefaultBranchUnlockedCommitStatusResolver( RESOLVER_CLASSES = { DependencyResolutionStrategy.STATIC_TAG_REFERENCE: GitHubTagResolver, DependencyResolutionStrategy.COMMIT_STATUS_EXACT_BRANCH: GitHubExactMatch2GPResolver, + DependencyResolutionStrategy.COMMIT_STATUS_PARENT_BRANCH: GitHubParentBranch2GPResolver, DependencyResolutionStrategy.COMMIT_STATUS_RELEASE_BRANCH: GitHubReleaseBranchCommitStatusResolver, DependencyResolutionStrategy.COMMIT_STATUS_PREVIOUS_RELEASE_BRANCH: GitHubPreviousReleaseBranchCommitStatusResolver, DependencyResolutionStrategy.COMMIT_STATUS_DEFAULT_BRANCH: GitHubDefaultBranch2GPResolver, @@ -503,6 +576,7 @@ class GitHubDefaultBranchUnlockedCommitStatusResolver( DependencyResolutionStrategy.RELEASE_TAG: GitHubReleaseTagResolver, DependencyResolutionStrategy.UNMANAGED_HEAD: GitHubUnmanagedHeadResolver, DependencyResolutionStrategy.UNLOCKED_EXACT_BRANCH: GitHubExactMatchUnlockedCommitStatusResolver, + DependencyResolutionStrategy.UNLOCKED_PARENT_BRANCH: GitHubParentBranchUnlockedCommitStatusResolver, DependencyResolutionStrategy.UNLOCKED_RELEASE_BRANCH: GitHubReleaseBranchUnlockedResolver, DependencyResolutionStrategy.UNLOCKED_PREVIOUS_RELEASE_BRANCH: GitHubPreviousReleaseBranchUnlockedResolver, DependencyResolutionStrategy.UNLOCKED_DEFAULT_BRANCH: GitHubDefaultBranchUnlockedCommitStatusResolver, diff --git a/cumulusci/core/dependencies/tests/test_resolvers.py b/cumulusci/core/dependencies/tests/test_resolvers.py index acd3eed745..d2d2797008 100644 --- a/cumulusci/core/dependencies/tests/test_resolvers.py +++ b/cumulusci/core/dependencies/tests/test_resolvers.py @@ -22,6 +22,7 @@ GitHubBetaReleaseTagResolver, GitHubDefaultBranch2GPResolver, GitHubExactMatch2GPResolver, + GitHubParentBranch2GPResolver, GitHubReleaseBranchCommitStatusResolver, GitHubReleaseTagResolver, GitHubTagResolver, @@ -328,6 +329,42 @@ def test_unmanaged_head_resolver(self, project_config): assert resolver.resolve(dep, project_config) == ("commit_sha", None) +class ConcreteGitHubParentBranchResolver(AbstractGitHubReleaseBranchResolver): + def resolve( + self, dep: GitHubDynamicDependency, context: BaseProjectConfig + ) -> Tuple[Optional[str], Optional[StaticDependency]]: + return (None, None) + +#class TestGitHubParentBranchResolver: +# def test_is_valid_repo_context(self): +# pc = BaseProjectConfig(UniversalConfig()) +# +# pc.repo_info["branch"] = "feature/parent__child" +# pc.project__git["prefix_feature"] = "feature/" +# assert ConcreteGitHubParentBranchResolver().is_valid_repo_context(pc) +# +# pc.repo_info["branch"] = "feature/parent__child__grandchild" +# assert ConcreteGitHubParentBranchResolver().is_valid_repo_context(pc) +# +# pc.repo_info["branch"] = "feature/parent" +# assert not ConcreteGitHubParentBranchResolver().is_valid_repo_context(pc) +# +# def test_can_resolve(self): +# pc = BaseProjectConfig(UniversalConfig()) +# +# pc.repo_info["branch"] = "feature/parent__child" +# pc.project__git["prefix_feature"] = "feature/" +# +# gh = ConcreteGitHubParentBranchResolver() +# +# assert gh.can_resolve( +# GitHubDynamicDependency(github="https://github.com/SFDO-Tooling/Test"), +# pc, +# ) +# +# assert not gh.can_resolve(ConcreteDynamicDependency(), pc) + + class ConcreteGitHubReleaseBranchResolver(AbstractGitHubReleaseBranchResolver): def resolve( diff --git a/cumulusci/cumulusci.yml b/cumulusci/cumulusci.yml index bcdd32f69e..9fe275bb95 100644 --- a/cumulusci/cumulusci.yml +++ b/cumulusci/cumulusci.yml @@ -1503,12 +1503,14 @@ project: resolution_strategies: unlocked: - unlocked_exact_branch + - unlocked_parent_branch - unlocked_release_branch - unlocked_previous_release_branch - unlocked_default_branch commit_status: - tag - commit_status_exact_branch + - commit_status_parent_branch - commit_status_release_branch - commit_status_previous_release_branch - commit_status_default_branch diff --git a/cumulusci/utils/git.py b/cumulusci/utils/git.py index cc4b7c7f86..d730f560a1 100644 --- a/cumulusci/utils/git.py +++ b/cumulusci/utils/git.py @@ -42,6 +42,12 @@ def is_release_branch_or_child(branch_name: str, prefix: str) -> bool: parts = branch_name[len(prefix) :].split("__") return len(parts) >= 1 and parts[0].isdigit() +def is_child_branch(branch_name: str, prefix: str) -> bool: + if not branch_name.startswith(prefix): + return False + parts = branch_name[len(prefix) :].split("__") + return len(parts) >= 1 + def get_feature_branch_name(branch_name: str, prefix: str) -> Optional[str]: if branch_name.startswith(prefix): From 73c931bf56f893a877d62de4657c4dd57655b934 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Mon, 1 Jan 2024 19:34:45 -0600 Subject: [PATCH 06/15] Added support for `--load-yml` option on cci task and cci flow commands including full test coverage and docs. --- additional.yml | 14 ++++++ cumulusci/cli/cci.py | 18 ++++++- cumulusci/cli/flow.py | 25 ++++++++-- cumulusci/cli/task.py | 22 +++++++-- cumulusci/cli/tests/test_cci.py | 84 +++++++++++++++++++++++++++++++- cumulusci/cli/tests/test_task.py | 4 +- docs/config.md | 22 +++++++++ 7 files changed, 176 insertions(+), 13 deletions(-) create mode 100644 additional.yml diff --git a/additional.yml b/additional.yml new file mode 100644 index 0000000000..7a78b7a871 --- /dev/null +++ b/additional.yml @@ -0,0 +1,14 @@ +flows: + my_custom_flow: + description: A custom flow loaded via --load-yml + group: Loaded YAML + steps: + 1: + task: my_custom_task +tasks: + my_custom_task: + description: A custom task loaded via --load-yml + group: Loaded YAML + class_path: cumulusci.tasks.util.Sleep + options: + seconds: 1 diff --git a/cumulusci/cli/cci.py b/cumulusci/cli/cci.py index adc50c49df..d950fc6591 100644 --- a/cumulusci/cli/cci.py +++ b/cumulusci/cli/cci.py @@ -1,5 +1,6 @@ import code import contextlib +import os import pdb import runpy import sys @@ -52,6 +53,7 @@ def main(args=None): This wraps the `click` library in order to do some initialization and centralized error handling. """ + with contextlib.ExitStack() as stack: args = args or sys.argv @@ -71,13 +73,27 @@ def main(args=None): logger, tempfile_path = get_tempfile_logger() stack.enter_context(tee_stdout_stderr(args, logger, tempfile_path)) + context_kwargs = {} + + # Allow commands to load additional yaml configuration from a file + if "--load-yml" in args: + yml_path_index = args.index("--load-yml") + 1 + try: + load_yml_path = args[yml_path_index] + except IndexError: + raise CumulusCIUsageError("No path specified for --load-yml") + if not os.path.isfile(load_yml_path): + raise CumulusCIUsageError(f"File not found: {load_yml_path}") + with open(load_yml_path, "r") as f: + context_kwargs["additional_yaml"] = f.read() + debug = "--debug" in args if debug: args.remove("--debug") with set_debug_mode(debug): try: - runtime = CliRuntime(load_keychain=False) + runtime = CliRuntime(load_keychain=False, **context_kwargs) except Exception as e: handle_exception(e, is_error_command, tempfile_path, debug) sys.exit(1) diff --git a/cumulusci/cli/flow.py b/cumulusci/cli/flow.py index 96bd8db9cf..8733331129 100644 --- a/cumulusci/cli/flow.py +++ b/cumulusci/cli/flow.py @@ -24,8 +24,12 @@ def flow(): @click.option( "--project", "project", is_flag=True, help="Include project-specific flows only" ) +@click.option( + "--load-yml", + help="If set, loads the specified yml file into the the project config as additional config", +) @pass_runtime(require_project=False, require_keychain=True) -def flow_doc(runtime, project=False): +def flow_doc(runtime, project=False, load_yml=None): flow_info_path = Path(__file__, "..", "..", "..", "docs", "flows.yml").resolve() with open(flow_info_path, "r", encoding="utf-8") as f: flow_info = load_yaml_data(f) @@ -79,8 +83,12 @@ def flow_doc(runtime, project=False): @flow.command(name="list", help="List available flows for the current context") @click.option("--plain", is_flag=True, help="Print the table using plain ascii.") @click.option("--json", "print_json", is_flag=True, help="Print a json string") +@click.option( + "--load-yml", + help="If set, loads the specified yml file into the the project config as additional config", +) @pass_runtime(require_project=False) -def flow_list(runtime, plain, print_json): +def flow_list(runtime, plain, print_json, load_yml=None): plain = plain or runtime.universal_config.cli__plain_output flows = runtime.get_available_flows() if print_json: @@ -106,8 +114,12 @@ def flow_list(runtime, plain, print_json): @flow.command(name="info", help="Displays information for a flow") @click.argument("flow_name") +@click.option( + "--load-yml", + help="If set, loads the specified yml file into the the project config as additional config", +) @pass_runtime(require_keychain=True) -def flow_info(runtime, flow_name): +def flow_info(runtime, flow_name, load_yml=None): try: coordinator = runtime.get_flow(flow_name) output = coordinator.get_summary(verbose=True) @@ -141,9 +153,12 @@ def flow_info(runtime, flow_name): is_flag=True, help="Disables all prompts. Set for non-interactive mode use such as calling from scripts or CI systems", ) +@click.option( + "--load-yml", + help="If set, loads the specified yml file into the the project config as additional config", +) @pass_runtime(require_keychain=True) -def flow_run(runtime, flow_name, org, delete_org, debug, o, no_prompt): - +def flow_run(runtime, flow_name, org, delete_org, debug, o, no_prompt, load_yml=None): # Get necessary configs org, org_config = runtime.get_org(org) if delete_org and not org_config.scratch: diff --git a/cumulusci/cli/task.py b/cumulusci/cli/task.py index cfbe749b91..1ee0ee7df3 100644 --- a/cumulusci/cli/task.py +++ b/cumulusci/cli/task.py @@ -25,8 +25,12 @@ def task(): @task.command(name="list", help="List available tasks for the current context") @click.option("--plain", is_flag=True, help="Print the table using plain ascii.") @click.option("--json", "print_json", is_flag=True, help="Print a json string") +@click.option( + "--load-yml", + help="If set, loads the specified yml file into the the project config as additional config", +) @pass_runtime(require_project=False) -def task_list(runtime, plain, print_json): +def task_list(runtime, plain, print_json, load_yml=None): tasks = runtime.get_available_tasks() plain = plain or runtime.universal_config.cli__plain_output @@ -60,8 +64,12 @@ def task_list(runtime, plain, print_json): is_flag=True, help="If true, write output to a file (./docs/project_tasks.rst or ./docs/cumulusci_tasks.rst)", ) +@click.option( + "--load-yml", + help="If set, loads the specified yml file into the the project config as additional config", +) @pass_runtime(require_project=False) -def task_doc(runtime, project=False, write=False): +def task_doc(runtime, project=False, write=False, load_yml=None): if project and runtime.project_config is None: raise click.UsageError( "The --project option can only be used inside a project." @@ -95,8 +103,12 @@ def task_doc(runtime, project=False, write=False): @task.command(name="info", help="Displays information for a task") @click.argument("task_name") +@click.option( + "--load-yml", + help="If set, loads the specified yml file into the the project config as additional config", +) @pass_runtime(require_project=False, require_keychain=True) -def task_info(runtime, task_name): +def task_info(runtime, task_name, load_yml=None): task_config = ( runtime.project_config.get_task(task_name) if runtime.project_config is not None @@ -126,6 +138,10 @@ class RunTaskCommand(click.MultiCommand): "help": "Drops into the Python debugger at task completion.", "is_flag": True, }, + "load-yml": { + "help": "If set, loads the specified yml file into the the project config as additional config", + "is_flag": False, + }, } def list_commands(self, ctx): diff --git a/cumulusci/cli/tests/test_cci.py b/cumulusci/cli/tests/test_cci.py index ccb06adaea..e5a01712f3 100644 --- a/cumulusci/cli/tests/test_cci.py +++ b/cumulusci/cli/tests/test_cci.py @@ -1,3 +1,4 @@ +import builtins import contextlib import io import os @@ -8,6 +9,8 @@ from unittest import mock import click +from click import Command +from click.testing import CliRunner import pkg_resources import pytest from requests.exceptions import ConnectionError @@ -15,10 +18,11 @@ import cumulusci from cumulusci.cli import cci +from cumulusci.cli.task import task_list from cumulusci.cli.tests.utils import run_click_command from cumulusci.cli.utils import get_installed_version from cumulusci.core.config import BaseProjectConfig -from cumulusci.core.exceptions import CumulusCIException +from cumulusci.core.exceptions import CumulusCIException, CumulusCIUsageError from cumulusci.utils import temporary_dir MagicMock = mock.MagicMock() @@ -209,6 +213,83 @@ def test_main__CliRuntime_error(CliRuntime, get_tempfile_logger, tee): tempfile.unlink() +@mock.patch("cumulusci.cli.cci.init_logger") # side effects break other tests +@mock.patch("cumulusci.cli.cci.get_tempfile_logger") +@mock.patch("cumulusci.cli.cci.tee_stdout_stderr") +@mock.patch("sys.exit") +def test_cci_load_yml__missing( + exit, tee_stdout_stderr, get_tempfile_logger, init_logger +): + # get_tempfile_logger doesn't clean up after itself which breaks other tests + get_tempfile_logger.return_value = mock.Mock(), "" + runner = CliRunner() + # Mock the contents of the yaml file + with pytest.raises(CumulusCIUsageError): + cci.main( + [ + "cci", + "task", + "list", + "--load-yml", + ], + ) + + +@mock.patch("cumulusci.cli.cci.init_logger") # side effects break other tests +@mock.patch("cumulusci.cli.cci.get_tempfile_logger") +@mock.patch("cumulusci.cli.cci.tee_stdout_stderr") +@mock.patch("sys.exit") +# @mock.patch("cumulusci.cli.cci.CliRuntime") +def test_cci_load_yml__notfound( + exit, tee_stdout_stderr, get_tempfile_logger, init_logger +): + # get_tempfile_logger doesn't clean up after itself which breaks other tests + get_tempfile_logger.return_value = mock.Mock(), "" + runner = CliRunner() + with pytest.raises(CumulusCIUsageError): + cci.main( + [ + "cci", + "task", + "list", + "--load-yml", + "/path/that/does/not/exist/anywhere", + ], + ) + + +@mock.patch("cumulusci.cli.cci.init_logger") # side effects break other tests +@mock.patch("cumulusci.cli.cci.get_tempfile_logger") +@mock.patch("cumulusci.cli.cci.tee_stdout_stderr") +@mock.patch("sys.exit") +@mock.patch("cumulusci.cli.cci.CliRuntime") +def test_cci_load_yml( + CliRuntime, exit, tee_stdout_stderr, get_tempfile_logger, init_logger +): + # get_tempfile_logger doesn't clean up after itself which breaks other tests + get_tempfile_logger.return_value = mock.Mock(), "" + + load_yml_path = [cumulusci.__path__[0][: -len("/cumulusci")]] + load_yml_path.append("additional.yml") + load_yml = os.path.join(*load_yml_path) + + cci.main( + [ + "cci", + "org", + "default", + "--load-yml", + load_yml, + ] + ) + + # Check that CliRuntime was called with the correct arguments + with open(load_yml, "r") as f: + CliRuntime.assert_called_once_with( + load_keychain=False, additional_yaml=f.read() + ) + + @mock.patch("cumulusci.cli.cci.init_logger") # side effects break other tests @mock.patch("cumulusci.cli.cci.get_tempfile_logger") @mock.patch("cumulusci.cli.cci.tee_stdout_stderr") @@ -217,7 +298,6 @@ def test_main__CliRuntime_error(CliRuntime, get_tempfile_logger, tee): def test_handle_org_name( CliRuntime, tee_stdout_stderr, get_tempfile_logger, init_logger ): - # get_tempfile_logger doesn't clean up after itself which breaks other tests get_tempfile_logger.return_value = mock.Mock(), "" diff --git a/cumulusci/cli/tests/test_task.py b/cumulusci/cli/tests/test_task.py index 398fb1b8b1..6b88aeaef1 100644 --- a/cumulusci/cli/tests/test_task.py +++ b/cumulusci/cli/tests/test_task.py @@ -126,10 +126,10 @@ def test_format_help(runtime): def test_get_default_command_options(): opts = task.RunTaskCommand()._get_default_command_options(is_salesforce_task=False) - assert len(opts) == 4 + assert len(opts) == 5 opts = task.RunTaskCommand()._get_default_command_options(is_salesforce_task=True) - assert len(opts) == 5 + assert len(opts) == 6 assert any([o.name == "org" for o in opts]) diff --git a/docs/config.md b/docs/config.md index 99ca4fc416..865202cff9 100644 --- a/docs/config.md +++ b/docs/config.md @@ -908,3 +908,25 @@ how a task or flow is _currently_ configured. The information output by these commands change as you make further customizations to your project's `cumulusci.yml` file. ``` + +## Loading additional yaml configuration from a file + +CumulusCI supports loading in an additional yaml file from the command line with the `--load-yml ` option on `cci task` and `cci flow` commands. This can be useful if you have one-off automation configurations that you want to keep out of the main project's configuration and load only in special cases. + +A good example of this is upgrade or migration scripts to do things like enabling a new feature from a release. If you create a lot of these upgrade scripts as their own flows. All those flows and all their one-off custom tasks would show up for everyone in the `cci task list` and `cci flow list`. Instead, you could create a directory of yaml files such as `migrations/1.2.yml` and `migrations/1.3.yml` where each file contains a set of custom tasks and flows only needed for their migration logic. + +You could inspect the new commands added on top of the existing project config defined in cumulusci.yml with the following commands: + +```console +# Tasks +$ cci task list --load-yml migrations/1.2.yml +$ cci task info custom_task_for_1.2 --load-yml migrations/1.2.yml +$ cci task run custom_task_for_1.2 --load-yml migrations/1.2.yml + +# Flows +$ cci flow list --load-yml migrations/1.2.yml +$ cci flow info custom_flow_for_1.2 --load-yml migrations/1.2.yml +$ cci flow run custom_flow_for_1.2 --load-yml migrations/1.2.yml +``` + +Behind the scenes, CumulusCI is merging the yaml file specified by `--load-yml` on top of the project config. This means any customizations you could make in cumulusci.yml can also be made in a file loaded via `--load-yml` \ No newline at end of file From a636dd1e0515ae7b3572cee839ec5f30eef2f151 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Mon, 1 Jan 2024 19:37:41 -0600 Subject: [PATCH 07/15] Update docs --- docs/config.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/config.md b/docs/config.md index 865202cff9..3de230be5b 100644 --- a/docs/config.md +++ b/docs/config.md @@ -929,4 +929,4 @@ $ cci flow info custom_flow_for_1.2 --load-yml migrations/1.2.yml $ cci flow run custom_flow_for_1.2 --load-yml migrations/1.2.yml ``` -Behind the scenes, CumulusCI is merging the yaml file specified by `--load-yml` on top of the project config. This means any customizations you could make in cumulusci.yml can also be made in a file loaded via `--load-yml` \ No newline at end of file +Behind the scenes, CumulusCI is merging the yaml file specified by `--load-yml` on top of the project config only for the single command being run. This means any customizations you could make in cumulusci.yml can also be made in a file loaded via `--load-yml`, and they won't have any impact on any other commands you run without `--load-yml` or with a different yaml file path provided. \ No newline at end of file From 952fdf53bedbea5545e8097c35503dc47003da99 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Mon, 8 Jan 2024 14:24:56 -0600 Subject: [PATCH 08/15] Add BuildNumber to soql order by when retrieving package versions and update template to match default version numbering for betas --- cumulusci/tasks/salesforce/package_upload.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/cumulusci/tasks/salesforce/package_upload.py b/cumulusci/tasks/salesforce/package_upload.py index a54de7d0ea..fbf7d57736 100644 --- a/cumulusci/tasks/salesforce/package_upload.py +++ b/cumulusci/tasks/salesforce/package_upload.py @@ -13,9 +13,10 @@ jinja2_env = ImmutableSandboxedEnvironment() DEFAULT_VERSION_NAME_TEMPLATE = ( "{{ project_config.project__package__name }} - {{ major_version }}.{{ minor_version }}" - "{% if not production %} Beta {{ build_number }}{% endif %}" + "{% if not production %} (Beta {{ build_number }}){% endif %}" ) + class PackageUpload(BaseSalesforceApiTask): name = "PackageUpload" api_version = "48.0" @@ -53,7 +54,7 @@ class PackageUpload(BaseSalesforceApiTask): "version_name_template": { "description": f"A jinja2 template expression for the version name. Defaults to:\n{DEFAULT_VERSION_NAME_TEMPLATE}", "required": False, - } + }, } def _init_options(self, kwargs): @@ -70,8 +71,8 @@ def _init_options(self, kwargs): # Set the default version_name_template option if not already set if "version_name_template" not in self.options: self.options["version_name_template"] = DEFAULT_VERSION_NAME_TEMPLATE - def _run_task(self): + def _run_task(self): self._validate_versions() self._set_version_name() self._set_package_id() @@ -105,6 +106,7 @@ def _validate_versions(self): MajorVersion DESC, MinorVersion DESC, PatchVersion DESC, + BuildNumber DESC, ReleaseState DESC LIMIT 1 """ @@ -156,7 +158,7 @@ def _validate_versions(self): self.options["minor_version"] = "0" def _set_version_name(self): - """ Sets self.options["name"] using the jinja2 template expression in self.options["version_name_template"] """ + """Sets self.options["name"] using the jinja2 template expression in self.options["version_name_template"]""" # Add self.project_config and self.options keys to the jinja2 environment jinja2_context = {"project_config": self.project_config} jinja2_context.update(self.options) From 739908b1597769c2ac36c5eda0c5aa6dbb5a4021 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Tue, 19 Mar 2024 10:31:29 -0500 Subject: [PATCH 09/15] Update create_package_version.py --- cumulusci/tasks/create_package_version.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cumulusci/tasks/create_package_version.py b/cumulusci/tasks/create_package_version.py index 4104e2c0f7..a26f3def04 100644 --- a/cumulusci/tasks/create_package_version.py +++ b/cumulusci/tasks/create_package_version.py @@ -151,6 +151,7 @@ class CreatePackageVersion(BaseSalesforceApiTask): "Defaults to False." }, } + api_version = "52.0" def _init_options(self, kwargs): super()._init_options(kwargs) @@ -199,7 +200,7 @@ def _init_task(self): self.tooling = get_simple_salesforce_connection( self.project_config, get_devhub_config(self.project_config), - api_version=self.project_config.project__package__api_version, + api_version=self.api_version, base_url="tooling", ) self.context = TaskContext(self.org_config, self.project_config, self.logger) From 216820fc2998f89c9f2689f202f2973d37051975 Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Thu, 9 May 2024 07:29:39 -0500 Subject: [PATCH 10/15] Never set in 2gp package generation manifests Change pulled from https://github.com/SFDO-Tooling/CumulusCI/pull/3748 --- cumulusci/tasks/create_package_version.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cumulusci/tasks/create_package_version.py b/cumulusci/tasks/create_package_version.py index a26f3def04..7d72d40057 100644 --- a/cumulusci/tasks/create_package_version.py +++ b/cumulusci/tasks/create_package_version.py @@ -230,8 +230,7 @@ def _run_task(self): with convert_sfdx_source( self.project_config.default_package_path, None - if self.package_config.package_type == PackageTypeEnum.unlocked - else self.package_config.package_name, + None, self.logger, ) as path: package_zip_builder = MetadataPackageZipBuilder( From 2fa70a948e08d6539ea6110026c3f76acadaedc8 Mon Sep 17 00:00:00 2001 From: Alex Date: Thu, 9 May 2024 11:30:05 -0230 Subject: [PATCH 11/15] fix: added comma, removed additional None param --- cumulusci/tasks/create_package_version.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cumulusci/tasks/create_package_version.py b/cumulusci/tasks/create_package_version.py index 7d72d40057..d203db2b93 100644 --- a/cumulusci/tasks/create_package_version.py +++ b/cumulusci/tasks/create_package_version.py @@ -229,7 +229,6 @@ def _run_task(self): package_zip_builder = None with convert_sfdx_source( self.project_config.default_package_path, - None None, self.logger, ) as path: From 126da0fd4805e369c0def48677e85341b5b2134f Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Thu, 9 May 2024 09:07:14 -0500 Subject: [PATCH 12/15] Update create_package_version.py --- cumulusci/tasks/create_package_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cumulusci/tasks/create_package_version.py b/cumulusci/tasks/create_package_version.py index 7d72d40057..df768fe653 100644 --- a/cumulusci/tasks/create_package_version.py +++ b/cumulusci/tasks/create_package_version.py @@ -229,7 +229,7 @@ def _run_task(self): package_zip_builder = None with convert_sfdx_source( self.project_config.default_package_path, - None + None, None, self.logger, ) as path: From 31daa4ce12ec7874363504bc5d0010ed3cd388bf Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Thu, 9 May 2024 09:23:32 -0500 Subject: [PATCH 13/15] Set package name to None --- cumulusci/tasks/create_package_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cumulusci/tasks/create_package_version.py b/cumulusci/tasks/create_package_version.py index d203db2b93..b077f09b4c 100644 --- a/cumulusci/tasks/create_package_version.py +++ b/cumulusci/tasks/create_package_version.py @@ -234,7 +234,7 @@ def _run_task(self): ) as path: package_zip_builder = MetadataPackageZipBuilder( path=path, - name=self.package_config.package_name, + name=None, options=options, context=self.context, ) From 4f5f1f0d24539f137464c54e02332f214979b9ab Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Tue, 17 Sep 2024 17:39:40 -0500 Subject: [PATCH 14/15] Initial CumulusCI task for managing Scratch Org Snapshots via the OrgSnapshot object in the DevHub --- cumulusci/tasks/snapshot.py | 990 ++++++++++++++++++++++++++++++++++++ 1 file changed, 990 insertions(+) create mode 100644 cumulusci/tasks/snapshot.py diff --git a/cumulusci/tasks/snapshot.py b/cumulusci/tasks/snapshot.py new file mode 100644 index 0000000000..54f1c7b90b --- /dev/null +++ b/cumulusci/tasks/snapshot.py @@ -0,0 +1,990 @@ +import json +import os +import time +from datetime import datetime, timedelta +from dateutil.parser import parse +from cumulusci.core.exceptions import CumulusCIException, SalesforceException +from cumulusci.core.sfdx import sfdx +from cumulusci.core.utils import process_bool_arg +from cumulusci.salesforce_api.utils import get_simple_salesforce_connection +from cumulusci.tasks.salesforce import BaseSalesforceTask +from cumulusci.tasks.github.base import BaseGithubTask +from github3 import GitHubError +from pydantic import BaseModel, Field, validator +from simple_salesforce import Salesforce +from simple_salesforce.exceptions import SalesforceResourceNotFound +from rich.console import Console +from rich.table import Table + +ORG_SNAPSHOT_FIELDS = [ + "Id", + "SnapshotName", + "Description", + "Status", + "SourceOrg", + "CreatedDate", + "LastModifiedDate", + "ExpirationDate", + "Error", +] + + +import time +from rich.console import Console +from rich.progress import ( + Progress, + SpinnerColumn, + TextColumn, + BarColumn, + TimeElapsedColumn, +) +from rich.panel import Panel +from rich.live import Live + + +class SnapshotNameValidator(BaseModel): + base_name: str = Field(..., max_length=13) + + @classmethod + @validator("base_name") + def validate_name(cls, name): + if len(name) > 13: + raise ValueError("Snapshot name cannot exceed 13 characters") + if not name.isalnum(): + raise ValueError("Snapshot name must only contain alphanumeric characters") + return name + + +class SnapshotManager: + def __init__(self, devhub, logger): + self.devhub = devhub + self.logger = logger + self.existing_active_snapshot_id = None + self.temporary_snapshot_name = None + self.console = Console() + + def generate_temp_name(self, base_name: str, max_length: int = 14) -> str: + temp_name = f"{base_name}0" + if len(temp_name) > max_length: + temp_name = temp_name[:max_length] + self.logger.info(f"Generated temporary snapshot name: {temp_name}") + return temp_name + + def query_existing_active_snapshot(self, snapshot_name: str): + self.logger.info( + f"Checking for existing active snapshot with name: {snapshot_name}" + ) + query = f"SELECT Id FROM OrgSnapshot WHERE Status = 'Active' AND SnapshotName = '{snapshot_name}'" + result = self.devhub.query(query) + + if result["totalSize"] > 0: + self.existing_active_snapshot_id = result["records"][0]["Id"] + self.logger.info( + f"Found existing active snapshot: {self.existing_active_snapshot_id}" + ) + else: + self.logger.info(f"No active snapshot found with name {snapshot_name}") + + def query_and_delete_in_progress_snapshot(self, snapshot_name: str): + self.logger.info( + f"Checking for in-progress snapshot with name: {snapshot_name}" + ) + query = f"SELECT Id FROM OrgSnapshot WHERE Status in ('Active','InProgress') AND SnapshotName = '{snapshot_name}'" + result = self.devhub.query(query) + + if result["totalSize"] > 0: + snapshot_id = result["records"][0]["Id"] + self.logger.info( + f"Found in-progress snapshot {snapshot_id}, deleting it..." + ) + self.devhub.OrgSnapshot.delete(snapshot_id) + self.logger.info(f"Deleted in-progress snapshot: {snapshot_id}") + else: + self.logger.info(f"No in-progress snapshot found with name {snapshot_name}") + + def create_org_snapshot( + self, snapshot_name: str, description: str, source_org: str + ): + self.logger.info(f"Creating new org snapshot: {snapshot_name}") + snapshot_body = { + "Description": description, + "SnapshotName": snapshot_name, + "SourceOrg": source_org, + "Content": "metadatadata", + } + try: + snapshot_result = self.devhub.OrgSnapshot.create(snapshot_body) + snapshot_id = snapshot_result["id"] + self.logger.info(f"Org snapshot {snapshot_id} created.") + return snapshot_id + except SalesforceException as e: + if "NOT_FOUND" in str(e): + raise SnapshotError( + "Org snapshot feature is not enabled for this Dev Hub." + ) from e + raise + + def poll_for_completion( + self, + snapshot_id: str, + progress, + task, + timeout: int = 1200, + initial_poll_interval: int = 10, + ): + poll_interval = initial_poll_interval + start_time = time.time() + end_time = start_time + timeout + + while time.time() < end_time: + try: + snapshot = self.devhub.OrgSnapshot.get(snapshot_id) + except SalesforceResourceNotFound as exc: + raise SnapshotError( + "Snapshot not found. This usually happens because another build deleted the snapshot while it was being built." + ) from exc + + status = snapshot.get("Status") + progress.update( + task, description=f"[cyan]Creating snapshot... Status: {status}" + ) + + if status == "Active": + progress.update(task, completed=100) + self.logger.info(f"Snapshot {snapshot_id} completed successfully.") + return snapshot + if status == "Error": + progress.update(task, completed=100) + raise SnapshotError( + f"Snapshot {snapshot_id} failed to complete. Error: {snapshot.get('Error')}" + ) + + time.sleep(poll_interval) + elapsed = time.time() - start_time + progress.update(task, completed=min(int((elapsed / timeout) * 100), 100)) + poll_interval = min(poll_interval * 1.5, 30) + + raise TimeoutError( + f"Snapshot {snapshot_id} did not complete within {timeout} seconds." + ) + + def delete_snapshot(self, snapshot_id: str = None): + snapshot_id = snapshot_id or self.existing_active_snapshot_id + if snapshot_id: + self.logger.info(f"Deleting snapshot: {snapshot_id}") + self.devhub.OrgSnapshot.delete(snapshot_id) + self.logger.info(f"Deleted snapshot: {snapshot_id}") + + def rename_snapshot(self, snapshot_id: str, new_name: str): + self.logger.info(f"Renaming snapshot {snapshot_id} to {new_name}") + update_body = {"SnapshotName": new_name} + self.devhub.OrgSnapshot.update(snapshot_id, update_body) + self.logger.info(f"Snapshot {snapshot_id} renamed to {new_name}") + + def update_snapshot_from_org( + self, base_name: str, description: str, source_org: str, wait: bool = True + ): + with Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + BarColumn(), + TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), + TimeElapsedColumn(), + console=self.console, + ) as progress: + task = progress.add_task("[green]Creating snapshot", total=100) + + # Step 1: Generate temporary name (5% progress) + progress.update( + task, advance=5, description="[green]Generating temporary name" + ) + temp_name = self.generate_temp_name(base_name) + + # Step 2: Check for existing snapshots (10% progress) + progress.update( + task, advance=5, description="[green]Checking for existing snapshots" + ) + self.query_existing_active_snapshot(base_name) + self.query_and_delete_in_progress_snapshot(temp_name) + + # Step 3: Create new snapshot (10% progress) + progress.update( + task, advance=10, description="[green]Creating new snapshot" + ) + snapshot_id = self.create_org_snapshot(temp_name, description, source_org) + + if not wait: + snapshot = self.devhub.OrgSnapshot.get(snapshot_id) + return snapshot + + # Step 4: Wait for snapshot to complete (60% progress) + progress.update(task, description="[green]Waiting for snapshot to complete") + snapshot = self.poll_for_completion(snapshot_id, progress, task) + + # Step 5: Finalize snapshot (10% progress) + progress.update(task, advance=10, description="[green]Finalizing snapshot") + self.delete_snapshot() # Deletes the existing active snapshot if it exists + self.rename_snapshot(snapshot_id, base_name) + + self.console.print( + Panel( + f"Snapshot {snapshot_id} created successfully!", + title="Snapshot Creation", + border_style="green", + ) + ) + self.logger.info(f"Snapshot management complete for {snapshot_id}") + return snapshot + + def finalize_temp_snapshot( + self, snapshot_name: str, description: str, snapshot_id: str + ): + with Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + BarColumn(), + TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), + TimeElapsedColumn(), + console=self.console, + ) as progress: + task = progress.add_task("[green]Creating snapshot", total=100) + + # Step 1: Check for existing snapshots (10% progress) + progress.update( + task, + advance=10, + description=f"[green]Checking for existing active snapshot named {snapshot_name}", + ) + self.query_existing_active_snapshot(snapshot_name) + + # Step 2: Wait for snapshot to complete (60% progress) + progress.update(task, description="[green]Waiting for snapshot to complete") + snapshot = self.poll_for_completion(snapshot_id, progress, task) + + # Step 3: Finalize snapshot (30% progress) + progress.update(task, advance=30, description="[green]Finalizing snapshot") + self.delete_snapshot() + self.rename_snapshot(snapshot_id, snapshot_name) + return snapshot + + +class BaseDevhubTask(BaseSalesforceTask): + """Base class for tasks that need DevHub access.""" + + def _init_task(self): + super()._init_task() + self.devhub = self._get_devhub_api() + + def _get_devhub_api(self, base_url=None): + self.logger.info("Getting Dev Hub access token") + p = sfdx("config get target-dev-hub --json") + try: + devhub_username = json.loads(p.stdout_text.read())["result"][0]["value"] + except (json.JSONDecodeError, KeyError): + raise SnapshotError( + f"Failed to get Dev Hub username from sfdx: {p.stdout_text.read()}" + ) + + p = sfdx( + f"force:org:display --json", + username=devhub_username, + log_note="Getting Dev Hub org info", + ) + try: + devhub_info = json.loads(p.stdout_text.read())["result"] + except (json.JSONDecodeError, KeyError): + raise SnapshotError( + f"Failed to get Dev Hub information from sfdx: {p.stdout_text.read()}" + ) + + devhub = DevHubOrgConfig( + access_token=devhub_info["accessToken"], + instance_url=devhub_info["instanceUrl"], + ) + return get_simple_salesforce_connection( + project_config=self.project_config, + org_config=devhub, + api_version=self.api_version, + base_url=base_url, + ) + + +class SnapshotError(CumulusCIException): + pass + + +class DevHubOrgConfig(BaseModel): + access_token: str = Field(..., description="Access token for the Dev Hub org") + instance_url: str = Field(..., description="Instance URL for the Dev Hub org") + + +class BaseDevhubTask(BaseSalesforceTask): + """Base class for tasks that need DevHub access.""" + + def _init_task(self): + """Initialize the task and authenticate to DevHub.""" + super()._init_task() + self.devhub = self._get_devhub_api() + + def _get_devhub_api(self, base_url=None): + self.logger.info("Getting Dev Hub access token") + p = sfdx( + "config get target-dev-hub --json", + ) + try: + result = p.stdout_text.read() + data = json.loads(result) + devhub_username = data["result"][0]["value"] + except json.JSONDecodeError: + raise SnapshotError(f"Failed to parse SFDX output: {p.stdout_text.read()}") + except KeyError: + raise SnapshotError( + f"Failed to get Dev Hub username from sfdx. Please use `sfdx force:config:set target-dev-hub=` to set the target Dev Hub org." + ) + + p = sfdx( + f"force:org:display --json", + username=devhub_username, + log_note="Getting Dev Hub org info", + ) + + try: + devhub_info = json.loads(p.stdout_text.read()) + except json.JSONDecodeError: + raise SnapshotError(f"Failed to parse SFDX output: {p.stdout_text.read()}") + + if "result" not in devhub_info: + raise SnapshotError( + f"Failed to get Dev Hub information from sfdx: {devhub_info}" + ) + devhub = DevHubOrgConfig( + access_token=devhub_info["result"]["accessToken"], + instance_url=devhub_info["result"]["instanceUrl"], + ) + return get_simple_salesforce_connection( + project_config=self.project_config, + org_config=devhub, + api_version=self.api_version, + base_url=base_url, + ) + + +class CreateScratchOrgSnapshot(BaseSalesforceTask): + task_docs = """ + Creates a Scratch Org Snapshot using the Dev Hub org. + + **Requires** *`target-dev-hub` configured globally or for the project, used as the target Dev Hub org for Scratch Org Snapshots*. + + Interacts directly with the OrgSnapshot object in the Salesforce API to fully automate the process of maintaining one active snapshot per snapshot name. + + *Snapshot Creation Process* + + - **Check for an existing `active` OrgSnapshot** with the same name and recording its ID + - **Check for an existing `in-progress` OrgSnapshot** with the same name and delete it, maintaining only one in-progress snapshot build + - **Create a temporary snapshot** under a temporary name with the provided description + - **Poll for completion** of the snapshot creation process + + *On Successful OrgSnapshot Completion* + + - Delete the existing snapshot + - Rename the snapshot to the desired name + - Report the snapshot details including the ID, status, and expiration date + + + """ + + temp_snapshot_suffix = "0" + + task_options = { + "snapshot_name": { + "description": "Name of the snapshot to create", + "required": True, + }, + "description": { + "description": "Description of the snapshot", + "required": False, + }, + } + # Peg to API Version 60.0 for OrgSnapshot object + api_version = "60.0" + salesforce_task = True + + def __init__(self, *args, **kwargs): + self.current_snapshot_id = None + self.temp_snapshot_name = None + self.sf = None + self.snapshot_id = None + self.start_time = None + super().__init__(*args, **kwargs) + + def _init_task(self): + self.sf = self._get_devhub_api() + + def _init_options(self, kwargs): + super()._init_options(kwargs) + max_length = 15 - len(self.temp_snapshot_suffix) + if len(self.options["snapshot_name"]) > max_length: + raise SnapshotError( + f"Snapshot name must be {max_length} characters or less" + ) + self.temp_snapshot_name = ( + f"{self.options['snapshot_name']}{self.temp_snapshot_suffix}" + ) + self.console = Console() + + def _run_task(self): + self.logger.info("Starting scratch org snapshot creation") + self._check_existing_snapshot() + self._create_snapshot() + self._poll() + self._rename_snapshot() + self._report_result() + + def _get_devhub_api(self, base_url=None): + self.logger.info("Getting Dev Hub access token") + p = sfdx( + "config get target-dev-hub --json", + ) + try: + devhub_username = json.loads(p.stdout_text.read())["result"][0]["value"] + except json.JSONDecodeError: + raise SnapshotError(f"Failed to parse SFDX output: {p.stdout_text.read()}") + except KeyError: + raise SnapshotError( + f"Failed to get Dev Hub username from sfdx: {p.stdout_text.read()}" + ) + + p = sfdx( + f"force:org:display --json", + username=devhub_username, + log_note="Getting Dev Hub org info", + ) + + try: + devhub_info = json.loads(p.stdout_text.read()) + except json.JSONDecodeError: + raise SnapshotError(f"Failed to parse SFDX output: {p.stdout_text.read()}") + + if "result" not in devhub_info: + raise SnapshotError( + f"Failed to get Dev Hub information from sfdx: {devhub_info}" + ) + devhub = DevHubOrgConfig( + access_token=devhub_info["result"]["accessToken"], + instance_url=devhub_info["result"]["instanceUrl"], + ) + return get_simple_salesforce_connection( + project_config=self.project_config, + org_config=devhub, + api_version=self.api_version, + base_url=base_url, + ) + + def _check_existing_snapshot(self): + query = f"SELECT Id, Status FROM OrgSnapshot WHERE SnapshotName = '{self.options['snapshot_name']}'" + result = self.sf.query(query) + if result["totalSize"] > 0: + snapshot = result["records"][0] + if snapshot["Status"] == "In Progress": + raise SnapshotError( + f"Snapshot '{self.options['snapshot_name']}' is already being created." + ) + else: + self.current_snapshot_id = snapshot["Id"] + self.logger.info( + f"Found existing snapshot '{self.options['snapshot_name']}' with id {self.current_snapshot_id}" + ) + query = f"SELECT Id FROM OrgSnapshot WHERE SnapshotName = '{self.temp_snapshot_name}'" + result = self.sf.query(query) + if result["totalSize"] > 0: + snapshot = result["records"][0] + self.logger.info( + f"Deleting in-progress snapshot '{self.temp_snapshot_name}'" + ) + self.sf.OrgSnapshot.delete(snapshot["Id"]) + + def _create_snapshot(self): + self.logger.info( + f"Creating snapshot: {self.options['snapshot_name']} as {self.temp_snapshot_name}" + ) + try: + + snapshot = self.sf.OrgSnapshot.create( + { + "SnapshotName": self.temp_snapshot_name, + "Description": self.options["description"], + "SourceOrg": self.org_config.org_id, + "Content": "metadatadata", + } + ) + self.snapshot_id = snapshot["id"] + self.start_time = time.time() + except SalesforceException as e: + if "NOT_FOUND" in str(e): + raise SnapshotError( + "Org snapshot feature is not enabled for this Dev Hub." + ) from e + raise + + def _rename_snapshot(self): + if self.current_snapshot_id: + self.logger.info( + f"Deleting existing snapshot '{self.options['snapshot_name']}'" + ) + self.sf.OrgSnapshot.delete(self.current_snapshot_id) + self.logger.info(f"Renaming snapshot to '{self.options['snapshot_name']}'") + self.sf.OrgSnapshot.update( + self.snapshot_id, {"SnapshotName": self.options["snapshot_name"]} + ) + self.logger.info(f"Snapshot renamed to '{self.options['snapshot_name']}'") + + def _poll_action(self): + try: + snapshot = self.sf.OrgSnapshot.get(self.snapshot_id) + except SalesforceResourceNotFound as exc: + raise SnapshotError( + "Snapshot not found. This usually happens because another build deleted the while it was being built snapshot." + ) from exc + status = snapshot["Status"] + self.logger.info(f"Snapshot status: {status}") + if status == "Active": + self.poll_complete = True + elif status == "Error": + raise SnapshotError( + f"Snapshot creation failed: {snapshot.get('Error', 'Unknown error')}" + ) + + def _poll_update_interval(self): + super()._poll_update_interval() + if self.poll_interval_s > 30: + self.poll_interval_s = 30 + + def _report_result(self): + result = self.sf.query( + f"SELECT {','.join(ORG_SNAPSHOT_FIELDS)} FROM OrgSnapshot WHERE Id = '{self.snapshot_id}'" + ) + snapshot = result["records"][0] + self._print_snapshot_details(snapshot) + + end_time = time.time() + duration = end_time - self.start_time + duration_minutes = int(duration // 60) + duration_seconds = int(duration % 60) + duration_str = f"{duration_minutes}m and {duration_seconds}s" + self.logger.info(f"Snapshot created in {duration_str}") + + # Output to GitHub Actions Job Summary + summary_file = os.getenv("GITHUB_STEP_SUMMARY") + if summary_file: + with open(summary_file, "a") as f: + f.write(f"## Snapshot Creation Summary\n") + f.write(f"- **Duration**: {duration_str}\n") + f.write(f"- **Snapshot ID**: {self.snapshot_id}\n") + f.write(f"- **Fields**: {', '.join(ORG_SNAPSHOT_FIELDS)}\n") + f.write("\n### Snapshot Details\n") + for field in ORG_SNAPSHOT_FIELDS: + if field in snapshot: + value = snapshot[field] + if field in ["CreatedDate", "LastModifiedDate"]: + value = self._format_datetime(value) + elif field == "ExpirationDate": + value = self._format_date(value) + f.write(f"- **{field}**: {value}\n") + + def _print_snapshot_details(self, snapshot): + table = Table(title="Snapshot Details") + table.add_column("Field", style="cyan") + table.add_column("Value", style="magenta") + + for field in ORG_SNAPSHOT_FIELDS: + if field in snapshot: + value = snapshot[field] + if field in ["CreatedDate", "LastModifiedDate"]: + value = self._format_datetime(value) + elif field == "ExpirationDate": + value = self._format_date(value) + table.add_row(field, str(value)) + + self.console.print(table) + + def _format_datetime(self, date_string): + if date_string is None: + return "N/A" + dt = parse(date_string) + return dt.strftime("%Y-%m-%d %H:%M:%S") + + def _format_date(self, date_string): + if date_string is None: + return "N/A" + return datetime.strptime(date_string, "%Y-%m-%d").strftime("%Y-%m-%d") + + +class GithubPullRequestSnapshot(BaseGithubTask, BaseDevhubTask): + task_docs = """ + Creates a Scratch Org Snapshot for a GitHub Pull Request based on build status and conditions. + """ + + task_options = { + "wait": { + "description": "Whether to wait for the snapshot creation to complete. Defaults to True. If False, the task will return immediately after creating the snapshot. Use for running in a split workflow on GitHub. Looks for the GITHUB_OUTPUT environment variable and outputs SNAPSHOT_ID= to it if found for use in later steps.", + }, + "snapshot_id": { + "description": "The ID of the in-progress snapshot to wait for completion. If set, the task will wait for the snapshot to complete and update the existing snapshot with the new details. Use for the second step of a split workflow on GitHub.", + }, + "project_code": {"description": "Two-character project code", "required": True}, + "base_branch": { + "description": "Base branch for the pull request. Defaults to the default branch", + "required": False, + }, + "build_success": { + "description": "Set to True if the build was successful or False for a failure. Defaults to True.", + "required": True, + }, + "build_fail_tests": { + "description": "Whether the build failed due to test failures. Defaults to False", + "required": True, + }, + "commit_status_context": { + "description": "The context for the commit status check containing the snapshot name. If unset, no commit status is created.", + "required": False, + }, + "snapshot_is_packaged": { + "description": "Set to True if the snapshot is for a packaged build or False for unpackaged build", + "required": True, + }, + "snapshot_pr": { + "description": "Whether to create a snapshot for feature branches with PRs", + "required": False, + }, + "snapshot_pr_label": { + "description": "Limit snapshot creation to only PRs with this label", + "required": False, + }, + "snapshot_pr_draft": { + "description": "Whether to create snapshots for draft PRs", + "required": False, + }, + "snapshot_fail_pr": { + "description": "Whether to create snapshots for failed builds on branches with an open PR", + "required": False, + }, + "snapshot_fail_pr_label": { + "description": "Limit failure snapshot creation to only PRs with this label", + "required": False, + }, + "snapshot_fail_pr_draft": { + "description": "Whether to create snapshots for failed draft PR builds", + "required": False, + }, + "snapshot_fail_test_only": { + "description": "Whether to create snapshots only for test failures", + "required": False, + }, + "github_environment_prefix": { + "description": "If set, a GitHub Environment will be created for active snapshots using this prefix in the Environment name. If unset, no environment is created.", + "required": False, + }, + } + api_version = "60.0" + salesforce_task = True + + def _init_options(self, kwargs): + super()._init_options(kwargs) + self.options["build_success"] = process_bool_arg( + self.options.get("build_success", True) + ) + self.options["build_fail_tests"] = process_bool_arg( + self.options.get("build_fail_tests") + ) + self.options["commit_status_context"] = self.options.get( + "commit_status_context" + ) + self.options["wait"] = process_bool_arg(self.options.get("wait", True)) + self.options["snapshot_id"] = self.options.get("snapshot_id") + self.options["snapshot_is_packaged"] = process_bool_arg( + self.options.get("snapshot_is_packaged") + ) + self.options["snapshot_pr"] = process_bool_arg( + self.options.get("snapshot_pr", True) + ) + self.options["snapshot_pr_draft"] = process_bool_arg( + self.options.get("snapshot_pr_draft", False) + ) + self.options["snapshot_fail_pr"] = process_bool_arg( + self.options.get("snapshot_fail_pr", True) + ) + self.options["snapshot_fail_pr_draft"] = process_bool_arg( + self.options.get("snapshot_fail_pr_draft", False) + ) + self.options["snapshot_fail_test_only"] = process_bool_arg( + self.options.get("snapshot_fail_test_only", False) + ) + self.options["snapshot_pr_label"] = self.options.get("snapshot_pr_label") + self.options["snapshot_fail_pr_label"] = self.options.get( + "snapshot_fail_pr_label" + ) + self.options["base_branch"] = self.options.get( + "base_branch", self.project_config.project__git__default_branch + ) + self.options["github_environment_prefix"] = self.options.get( + "github_environment_prefix" + ) + + self.console = Console() + + def _init_task(self): + super()._init_task() + self.repo = self.get_repo() + + def _run_task(self): + pr = self._get_pull_request() + + if self._should_create_snapshot(pr): + snapshot_name = self._generate_snapshot_name(pr) + description = self._generate_snapshot_description(pr) + + snapshot_manager = SnapshotManager(self.devhub, self.logger) + try: + if self.options["snapshot_id"]: + snapshot = snapshot_manager.finalize_temp_snapshot( + snapshot_name=snapshot_name, + description=description, + snapshot_id=self.options["snapshot_id"], + ) + else: + snapshot = snapshot_manager.update_snapshot_from_org( + base_name=snapshot_name, + description=description, + source_org=self.org_config.org_id, + wait=self.options["wait"], + ) + except SnapshotError as e: + self.console.print( + Panel( + f"Failed to create snapshot: {str(e)}", + title="Snapshot Creation", + border_style="red", + ) + ) + if self.options["commit_status_context"]: + self._create_commit_status(snapshot_name, "error") + raise + + self.return_values["snapshot_id"] = snapshot.get("Id") + self.return_values["snapshot_name"] = snapshot.get("SnapshotName") + self.return_values["snapshot_description"] = snapshot.get("Description") + self.return_values["snapshot_status"] = snapshot.get("Status") + + self._report_result(snapshot) + if self.options["wait"] is False: + if os.getenv("GITHUB_OUTPUT"): + with open(os.getenv("GITHUB_OUTPUT"), "w") as f: + f.write(f"SNAPSHOT_ID={snapshot['Id']}") + return True + + if self.options["commit_status_context"]: + active = self.return_values["snapshot_status"] == "Active" + self._create_commit_status( + snapshot_name=( + snapshot_name + if active + else f"{snapshot_name} ({self.return_values['snapshot_status']})" + ), + state="success" if active else "error", + ) + if self.options["github_environment_prefix"]: + self._create_github_environment(snapshot_name) + + else: + if self.options.get("snapshot_id"): + self.logger.warning( + "In-progress snapshot does not meet conditions for finalization. Deleting..." + ) + + self.console.print( + Panel( + f"No snapshot creation required based on current conditions. {self.return_values.get('skip_reason','')}", + title="Snapshot Creation", + border_style="yellow", + ) + ) + + def _get_pull_request(self): + res = self.repo.pull_requests() + for pr in res: + # For some reason, github3.py or the GitHub API is returning non-matching PRs + # This is a workaround to ensure we get the correct PR + self.logger.info( + f"Checking PR: {pr.number} [{pr.state}] {pr.head.ref} -> {pr.base.ref}" + ) + if ( + pr.state == "open" + and pr.head.ref == self.project_config.repo_branch + and pr.base.ref == self.options["base_branch"] + ): + return pr + + def _should_create_snapshot(self, pr): + is_pr = pr is not None + self.return_values["has_pr"] = is_pr + is_draft = pr.draft if is_pr else False + self.return_values["pr_is_draft"] = is_draft + pr_labels = [label["name"] for label in pr.labels] if is_pr else [] + has_snapshot_label = self.options["snapshot_pr_label"] in pr_labels + has_snapshot_fail_label = self.options["snapshot_fail_pr_label"] in pr_labels + self.return_values["pr_has_snapshot_label"] = has_snapshot_label + self.return_values["pr_has_snapshot_fail_label"] = has_snapshot_fail_label + + if self.options["build_success"] is True: + if not self.options["snapshot_pr"]: + self.return_values["skip_reason"] = "snapshot_pr is False" + return False + elif not is_pr: + self.return_values["skip_reason"] = "No pull request on the branch" + return False + elif self.options["snapshot_pr_label"] and not has_snapshot_label: + self.return_values["skip_reason"] = ( + "Pull request does not have snapshot label" + ) + return False + elif is_draft and not self.options["snapshot_pr_draft"]: + self.return_values["skip_reason"] = ( + "Pull request is draft and snapshot_pr_draft is False" + ) + return False + return True + else: + if is_pr: + return ( + self.options["snapshot_fail_pr"] + and (not is_draft or self.options["snapshot_fail_pr_draft"]) + and ( + not self.options["snapshot_fail_pr_label"] + or has_snapshot_fail_label + ) + and ( + not self.options["snapshot_fail_test_only"] + or not self.options["build_fail_tests"] + ) + ) + else: + return True + + def _generate_snapshot_name(self, pr): + project_code = self.options["project_code"] + pr_number = pr.number if pr else "NoPR" + + if self.options["build_success"] is True: + return f"{project_code}Pr{pr_number}M" + else: + if self.options["snapshot_fail_test_only"]: + return f"{project_code}FTest{pr_number}M" + else: + return f"{project_code}Fail{pr_number}M" + + def _generate_snapshot_description(self, pr): + if self.options["build_success"] is True: + return f"Snapshot for PR #{pr.number}: {pr.title} of branch {self.project_config.repo_branch} for commit {self.project_config.repo_commit}" + else: + return f"Snapshot for failed build on PR #{pr.number}: {pr.title} of branch {self.project_config.repo_branch} for commit {self.project_config.repo_commit}" + + def _report_result(self, snapshot): + table = Table(title="Snapshot Details", border_style="cyan") + table.add_column("Field", style="cyan") + table.add_column("Value", style="magenta") + + for field in [ + "Id", + "SnapshotName", + "Status", + "Description", + "CreatedDate", + "ExpirationDate", + ]: + value = snapshot.get(field, "N/A") + if field in ["CreatedDate", "ExpirationDate"]: + value = self._format_datetime(value) + table.add_row(field, str(value)) + + self.console.print(table) + + # Output to GitHub Actions Job Summary + summary_file = os.getenv("GITHUB_STEP_SUMMARY") + if summary_file: + with open(summary_file, "a") as f: + f.write(f"## Snapshot Creation Summary\n") + f.write(f"- **Snapshot ID**: {snapshot.get('Id')}\n") + f.write(f"- **Snapshot Name**: {snapshot.get('SnapshotName')}\n") + f.write(f"- **Status**: {snapshot.get('Status')}\n") + f.write(f"- **Description**: {snapshot.get('Description')}\n") + f.write( + f"- **Created Date**: {self._format_datetime(snapshot.get('CreatedDate'))}\n" + ) + f.write( + f"- **Expiration Date**: {self._format_datetime(snapshot.get('ExpirationDate'))}\n" + ) + + def _format_datetime(self, date_string): + if date_string is None: + return "N/A" + dt = parse(date_string) + return dt.strftime("%Y-%m-%d %H:%M:%S") + + def _create_commit_status(self, snapshot_name, state): + try: + description = f"Snapshot: {snapshot_name}" + self.repo.create_status( + self.project_config.repo_commit, + state, + target_url=os.environ.get("JOB_URL"), + description=description, + context=self.options["commit_status_context"], + ) + except GitHubError as e: + self.logger.error(f"Failed to create commit status: {str(e)}") + self.console.print( + Panel( + f"Failed to create commit status: {str(e)}", + title="Commit Status", + border_style="red", + ) + ) + + def _create_github_environment(self, snapshot_name): + try: + environment_name = ( + f"{self.options['github_environment_prefix']}{snapshot_name}" + ) + + # Check if environment already exists + resp = self.repo._get(f"{self.repo.url}/environments/{environment_name}") + if resp.status_code == 404: + self.logger.info(f"Creating new environment: {environment_name}") + resp = self.repo._put( + f"{self.repo.url}/environments/{environment_name}", + ) + resp.raise_for_status() + self.logger.info(f"Created new environment: {environment_name}") + else: + self.logger.info(f"Environment '{environment_name}' already exists.") + + environment = resp.json() + + self.console.print( + Panel( + f"GitHub Environment '{environment_name}' created/updated successfully!", + title="Environment Creation", + border_style="green", + ) + ) + + except Exception as e: + self.logger.error(f"Failed to create/update GitHub Environment: {str(e)}") + self.console.print( + Panel( + f"Failed to create/update GitHub Environment: {str(e)}", + title="Environment Creation", + border_style="red", + ) + ) + raise From 726dd81bc1917bdf65a41529f91c78b48774a85e Mon Sep 17 00:00:00 2001 From: Jason Lantz Date: Tue, 17 Sep 2024 18:14:39 -0500 Subject: [PATCH 15/15] Added `create_snapshot` and `github_pull_request_snapshot` tasks to universal config and added `snapshot_context` field to git schema --- cumulusci/cumulusci.yml | 18 ++++++++++++++++++ cumulusci/schema/cumulusci.jsonschema.json | 4 ++++ cumulusci/utils/yaml/cumulusci_yml.py | 1 + 3 files changed, 23 insertions(+) diff --git a/cumulusci/cumulusci.yml b/cumulusci/cumulusci.yml index 0eb32d63da..4c86aa46b2 100644 --- a/cumulusci/cumulusci.yml +++ b/cumulusci/cumulusci.yml @@ -193,6 +193,10 @@ tasks: user_permissions: - PermissionsBulkApiHardDelete - PermissionsCreateAuditFields + create_snapshot: + group: Org Snapshots + description: Create a Scratch Org Snapshot of the current scratch org in the default SF CLI devhub + class_path: cumulusci.tasks.snapshot.CreateScratchOrgSnapshot create_unmanaged_ee_src: description: Modifies the src directory for unmanaged deployment to an EE org class_path: cumulusci.tasks.metadata.ee_src.CreateUnmanagedEESrc @@ -375,6 +379,19 @@ tasks: description: Look up 2gp package dependencies for a version id recorded in a commit status. class_path: cumulusci.tasks.github.commit_status.GetPackageDataFromCommitStatus group: GitHub + github_pull_request_snapshot: + description: Automate management of named Scratch Org Snapshots for feature branches with pull requests and additional conditions + class_path: cumulusci.tasks.snapshot.GithubPullRequestSnapshot + group: Org Snapshots + options: + snapshot_is_packaged: False + snapshot_pr: True + snapshot_pr_label: snapshot + snapshot_pr_draft: False + snapshot_fail_pr: False + snapshot_fail_pr_label: snapshot-fail + snapshot_fail_pr_draft: False + wait: True github_pull_requests: description: Lists open pull requests in project Github repository class_path: cumulusci.tasks.github.PullRequests @@ -1506,6 +1523,7 @@ project: push_prefix_production: "Production orgs: " 2gp_context: "Build Feature Test Package" unlocked_context: "Build Unlocked Test Package" + snapshot_context: Snapshot release_notes: parsers: 1: diff --git a/cumulusci/schema/cumulusci.jsonschema.json b/cumulusci/schema/cumulusci.jsonschema.json index 255d2de4d4..7c9ea9d04a 100644 --- a/cumulusci/schema/cumulusci.jsonschema.json +++ b/cumulusci/schema/cumulusci.jsonschema.json @@ -324,6 +324,10 @@ "unlocked_context": { "title": "Unlocked Context", "type": "string" + }, + "snapshot_context": { + "title": "Snapshot Context", + "type": "string" } }, "additionalProperties": false diff --git a/cumulusci/utils/yaml/cumulusci_yml.py b/cumulusci/utils/yaml/cumulusci_yml.py index f8498ed9ea..d0e624dd51 100644 --- a/cumulusci/utils/yaml/cumulusci_yml.py +++ b/cumulusci/utils/yaml/cumulusci_yml.py @@ -110,6 +110,7 @@ class Git(CCIDictModel): release_notes: ReleaseNotes = None two_gp_context: str = Field(None, alias="2gp_context") unlocked_context: Optional[str] = None + snapshot_context: Optional[str] = None class Plan(CCIDictModel): # MetaDeploy plans