diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index b81253c38f..ee3dd16185 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -15,6 +15,7 @@ kms_key_id_option, metadata_option, no_progressbar_option, + resolve_image_repos_option, resolve_s3_option, s3_bucket_option, s3_prefix_option, @@ -86,13 +87,14 @@ def resources_and_properties_help_string(): @use_json_option @force_upload_option @resolve_s3_option +@resolve_image_repos_option @metadata_option @signing_profiles_option @no_progressbar_option @common_options @aws_creds_options @save_params_option -@image_repository_validation(support_resolve_image_repos=False) +@image_repository_validation(support_resolve_image_repos=True) @pass_context @track_command @check_newer_version @@ -115,6 +117,7 @@ def cli( metadata, signing_profiles, resolve_s3, + resolve_image_repos, save_params, config_file, config_env, @@ -140,6 +143,7 @@ def cli( ctx.region, ctx.profile, resolve_s3, + resolve_image_repos, ) # pragma: no cover @@ -159,17 +163,22 @@ def do_cli( region, profile, resolve_s3, + resolve_image_repos, ): """ Implementation of the ``cli`` method """ + from samcli.commands.package.exceptions import PackageResolveS3AndS3NotSetError from samcli.commands.package.package_context import PackageContext if resolve_s3: s3_bucket = manage_stack(profile=profile, region=region) print_managed_s3_bucket_info(s3_bucket) + if resolve_image_repos and not s3_bucket: + raise PackageResolveS3AndS3NotSetError() + with PackageContext( template_file=template_file, s3_bucket=s3_bucket, @@ -185,5 +194,6 @@ def do_cli( region=region, profile=profile, signing_profiles=signing_profiles, + resolve_image_repos=resolve_image_repos, ) as package_context: package_context.run() diff --git a/samcli/commands/package/core/options.py b/samcli/commands/package/core/options.py index 37ea91379e..c0de5e0ed7 100644 --- a/samcli/commands/package/core/options.py +++ b/samcli/commands/package/core/options.py @@ -15,6 +15,7 @@ INFRASTRUCTURE_OPTION_NAMES: List[str] = [ "s3_prefix", + "resolve_image_repos", "image_repository", "image_repositories", "kms_key_id", diff --git a/samcli/commands/package/package_context.py b/samcli/commands/package/package_context.py index 4a64981745..1a2a432ba0 100644 --- a/samcli/commands/package/package_context.py +++ b/samcli/commands/package/package_context.py @@ -23,6 +23,7 @@ import click from samcli.commands.package.exceptions import PackageFailedError +from samcli.lib.bootstrap.companion_stack.companion_stack_manager import sync_ecr_stack from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable from samcli.lib.package.artifact_exporter import Template from samcli.lib.package.code_signer import CodeSigner @@ -71,6 +72,7 @@ def __init__( parameter_overrides=None, on_deploy=False, signing_profiles=None, + resolve_image_repos=False, ): self.template_file = template_file self.s3_bucket = s3_bucket @@ -89,6 +91,7 @@ def __init__( self.code_signer = None self.signing_profiles = signing_profiles self.parameter_overrides = parameter_overrides + self.resolve_image_repos = resolve_image_repos self._global_parameter_overrides = {IntrinsicsSymbolTable.AWS_REGION: region} if region else {} def __enter__(self): @@ -101,6 +104,14 @@ def run(self): """ Execute packaging based on the argument provided by customers and samconfig.toml. """ + if self.resolve_image_repos: + template_basename = os.path.splitext(os.path.basename(self.template_file))[0] + stack_name = f"sam-app-{template_basename}" + + self.image_repositories = sync_ecr_stack( + self.template_file, stack_name, self.region, self.s3_bucket, self.s3_prefix, self.image_repositories + ) + stacks, _ = SamLocalStackProvider.get_stacks( self.template_file, global_parameter_overrides=self._global_parameter_overrides, diff --git a/schema/samcli.json b/schema/samcli.json index 84ae577f12..343a22b829 100644 --- a/schema/samcli.json +++ b/schema/samcli.json @@ -1118,7 +1118,7 @@ "properties": { "parameters": { "title": "Parameters for the package command", - "description": "Available parameters for the package command:\n* template_file:\nAWS SAM template which references built artifacts for resources in the template. (if applicable)\n* output_template_file:\nThe path to the file where the command writes the output AWS CloudFormation template. If you don't specify a path, the command writes the template to the standard output.\n* s3_bucket:\nAWS S3 bucket where artifacts referenced in the template are uploaded.\n* image_repository:\nAWS ECR repository URI where artifacts referenced in the template are uploaded.\n* image_repositories:\nMapping of Function Logical ID to AWS ECR Repository URI.\n\nExample: Function_Logical_ID=ECR_Repo_Uri\nThis option can be specified multiple times.\n* s3_prefix:\nPrefix name that is added to the artifact's name when it is uploaded to the AWS S3 bucket.\n* kms_key_id:\nThe ID of an AWS KMS key that is used to encrypt artifacts that are at rest in the AWS S3 bucket.\n* use_json:\nIndicates whether to use JSON as the format for the output AWS CloudFormation template. YAML is used by default.\n* force_upload:\nIndicates whether to override existing files in the S3 bucket. Specify this flag to upload artifacts even if they match existing artifacts in the S3 bucket.\n* resolve_s3:\nAutomatically resolve AWS S3 bucket for non-guided deployments. Enabling this option will also create a managed default AWS S3 bucket for you. If one does not provide a --s3-bucket value, the managed bucket will be used. Do not use --guided with this option.\n* metadata:\nMap of metadata to attach to ALL the artifacts that are referenced in the template.\n* signing_profiles:\nA string that contains Code Sign configuration parameters as FunctionOrLayerNameToSign=SigningProfileName:SigningProfileOwner Since signing profile owner is optional, it could also be written as FunctionOrLayerNameToSign=SigningProfileName\n* no_progressbar:\nDoes not showcase a progress bar when uploading artifacts to S3 and pushing docker images to ECR\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* save_params:\nSave the parameters provided via the command line to the configuration file.", + "description": "Available parameters for the package command:\n* template_file:\nAWS SAM template which references built artifacts for resources in the template. (if applicable)\n* output_template_file:\nThe path to the file where the command writes the output AWS CloudFormation template. If you don't specify a path, the command writes the template to the standard output.\n* s3_bucket:\nAWS S3 bucket where artifacts referenced in the template are uploaded.\n* image_repository:\nAWS ECR repository URI where artifacts referenced in the template are uploaded.\n* image_repositories:\nMapping of Function Logical ID to AWS ECR Repository URI.\n\nExample: Function_Logical_ID=ECR_Repo_Uri\nThis option can be specified multiple times.\n* s3_prefix:\nPrefix name that is added to the artifact's name when it is uploaded to the AWS S3 bucket.\n* kms_key_id:\nThe ID of an AWS KMS key that is used to encrypt artifacts that are at rest in the AWS S3 bucket.\n* use_json:\nIndicates whether to use JSON as the format for the output AWS CloudFormation template. YAML is used by default.\n* force_upload:\nIndicates whether to override existing files in the S3 bucket. Specify this flag to upload artifacts even if they match existing artifacts in the S3 bucket.\n* resolve_s3:\nAutomatically resolve AWS S3 bucket for non-guided deployments. Enabling this option will also create a managed default AWS S3 bucket for you. If one does not provide a --s3-bucket value, the managed bucket will be used. Do not use --guided with this option.\n* resolve_image_repos:\nAutomatically create and delete ECR repositories for image-based functions in non-guided deployments. A companion stack containing ECR repos for each function will be deployed along with the template stack. Automatically created image repositories will be deleted if the corresponding functions are removed.\n* metadata:\nMap of metadata to attach to ALL the artifacts that are referenced in the template.\n* signing_profiles:\nA string that contains Code Sign configuration parameters as FunctionOrLayerNameToSign=SigningProfileName:SigningProfileOwner Since signing profile owner is optional, it could also be written as FunctionOrLayerNameToSign=SigningProfileName\n* no_progressbar:\nDoes not showcase a progress bar when uploading artifacts to S3 and pushing docker images to ECR\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* save_params:\nSave the parameters provided via the command line to the configuration file.", "type": "object", "properties": { "template_file": { @@ -1175,6 +1175,11 @@ "type": "boolean", "description": "Automatically resolve AWS S3 bucket for non-guided deployments. Enabling this option will also create a managed default AWS S3 bucket for you. If one does not provide a --s3-bucket value, the managed bucket will be used. Do not use --guided with this option." }, + "resolve_image_repos": { + "title": "resolve_image_repos", + "type": "boolean", + "description": "Automatically create and delete ECR repositories for image-based functions in non-guided deployments. A companion stack containing ECR repos for each function will be deployed along with the template stack. Automatically created image repositories will be deleted if the corresponding functions are removed." + }, "metadata": { "title": "metadata", "type": "string", @@ -2202,11 +2207,12 @@ "title": "parameter_overrides", "type": [ "array", + "object", "string" ], "description": "String that contains AWS CloudFormation parameter overrides encoded as key=value pairs.", "items": { - "type": "string" + "$ref": "#/$defs/parameter_overrides_items" } }, "stack_name": { @@ -2271,18 +2277,6 @@ "description": "Available parameters for the list stack outputs command:\n* stack_name:\nName of corresponding deployed stack.\n* output:\nOutput the results from the command in a given output format (json or table).\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.\n* save_params:\nSave the parameters provided via the command line to the configuration file.", "type": "object", "properties": { - "parameter_overrides": { - "title": "parameter_overrides", - "type": [ - "array", - "object", - "string" - ], - "description": "String that contains AWS CloudFormation parameter overrides encoded as key=value pairs.", - "items": { - "$ref": "#/$defs/parameter_overrides_items" - } - }, "stack_name": { "title": "stack_name", "type": "string", diff --git a/tests/integration/package/package_integ_base.py b/tests/integration/package/package_integ_base.py index 990a4e85bf..725e2bb544 100644 --- a/tests/integration/package/package_integ_base.py +++ b/tests/integration/package/package_integ_base.py @@ -99,6 +99,7 @@ def get_command_list( image_repository=None, image_repositories=None, resolve_s3=False, + resolve_image_repos=False, ): command_list = [get_sam_command(), "package"] @@ -130,6 +131,8 @@ def get_command_list( command_list = command_list + ["--image-repositories", str(image_repositories)] if resolve_s3: command_list = command_list + ["--resolve-s3"] + if resolve_image_repos: + command_list = command_list + ["--resolve-image-repos"] return command_list def _method_to_stack_name(self, method_name): diff --git a/tests/integration/package/test_package_command_image.py b/tests/integration/package/test_package_command_image.py index ddc0671e2b..0b299581ac 100644 --- a/tests/integration/package/test_package_command_image.py +++ b/tests/integration/package/test_package_command_image.py @@ -349,3 +349,33 @@ def test_package_with_nonloadable_image_archive(self, template_file): self.assertEqual(1, process.returncode) self.assertIn("unexpected EOF", process_stderr.decode("utf-8")) + + @parameterized.expand( + [ + "aws-serverless-function-image.yaml", + "aws-lambda-function-image.yaml", + ] + ) + def test_package_template_with_resolve_image_repos(self, template_file): + + template_path = self.test_data_path.joinpath(template_file) + command_list = PackageIntegBase.get_command_list( + s3_bucket=self.bucket_name, + template=template_path, + resolve_image_repos=True, + ) + + process = Popen(command_list, stdout=PIPE, stderr=PIPE) + try: + stdout, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip().decode("utf-8") + process_stderr = stderr.strip().decode("utf-8") + self.assertEqual(0, process.returncode, f"Command failed. Stderr: {process_stderr}") + # Verify ECR repository URI is in the output (auto-created repository) + # The output should contain an ECR repository URI pattern + ecr_uri_pattern = r"\d+\.dkr\.ecr\.[a-z0-9-]+\.amazonaws\.com/" + self.assertRegex(process_stdout, ecr_uri_pattern, "Expected ECR repository URI in packaged template") diff --git a/tests/unit/commands/package/test_command.py b/tests/unit/commands/package/test_command.py index 4b83ec35bf..047320c5cc 100644 --- a/tests/unit/commands/package/test_command.py +++ b/tests/unit/commands/package/test_command.py @@ -1,7 +1,8 @@ from unittest import TestCase from unittest.mock import patch, Mock -from samcli.commands.package.command import do_cli +from samcli.commands.package.command import do_cli, resources_and_properties_help_string +from samcli.commands.package.exceptions import PackageResolveS3AndS3NotSetError class TestPackageCliCommand(TestCase): @@ -20,6 +21,7 @@ def setUp(self): self.region = None self.profile = None self.resolve_s3 = False + self.resolve_image_repos = False self.signing_profiles = {"MyFunction": {"profile_name": "ProfileName", "profile_owner": "Profile Owner"}} @patch("samcli.commands.package.command.click") @@ -43,6 +45,7 @@ def test_all_args(self, package_command_context, click_mock): region=self.region, profile=self.profile, resolve_s3=self.resolve_s3, + resolve_image_repos=self.resolve_image_repos, signing_profiles=self.signing_profiles, ) @@ -61,6 +64,7 @@ def test_all_args(self, package_command_context, click_mock): region=self.region, profile=self.profile, signing_profiles=self.signing_profiles, + resolve_image_repos=self.resolve_image_repos, ) context_mock.run.assert_called_with() @@ -89,6 +93,7 @@ def test_all_args_resolve_s3(self, mock_managed_stack, package_command_context, region=self.region, profile=self.profile, resolve_s3=True, + resolve_image_repos=False, signing_profiles=self.signing_profiles, ) @@ -107,7 +112,84 @@ def test_all_args_resolve_s3(self, mock_managed_stack, package_command_context, region=self.region, profile=self.profile, signing_profiles=self.signing_profiles, + resolve_image_repos=False, ) context_mock.run.assert_called_with() self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + def test_resolve_image_repos_without_s3_bucket_raises_error(self, package_command_context, click_mock): + with self.assertRaises(PackageResolveS3AndS3NotSetError): + do_cli( + template_file=self.template_file, + s3_bucket=None, + s3_prefix=self.s3_prefix, + image_repository=None, + image_repositories=None, + kms_key_id=self.kms_key_id, + output_template_file=self.output_template_file, + use_json=self.use_json, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + metadata=self.metadata, + region=self.region, + profile=self.profile, + resolve_s3=False, + resolve_image_repos=True, + signing_profiles=self.signing_profiles, + ) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + def test_all_args_with_resolve_image_repos(self, package_command_context, click_mock): + context_mock = Mock() + package_command_context.return_value.__enter__.return_value = context_mock + + do_cli( + template_file=self.template_file, + s3_bucket=self.s3_bucket, + s3_prefix=self.s3_prefix, + image_repository=None, + image_repositories=None, + kms_key_id=self.kms_key_id, + output_template_file=self.output_template_file, + use_json=self.use_json, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + metadata=self.metadata, + region=self.region, + profile=self.profile, + resolve_s3=False, + resolve_image_repos=True, + signing_profiles=self.signing_profiles, + ) + + package_command_context.assert_called_with( + template_file=self.template_file, + s3_bucket=self.s3_bucket, + s3_prefix=self.s3_prefix, + image_repository=None, + image_repositories=None, + kms_key_id=self.kms_key_id, + output_template_file=self.output_template_file, + use_json=self.use_json, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + metadata=self.metadata, + region=self.region, + profile=self.profile, + signing_profiles=self.signing_profiles, + resolve_image_repos=True, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) + + def test_resources_and_properties_help_string(self): + # Test that the help string generator works + help_string = resources_and_properties_help_string() + self.assertIsInstance(help_string, str) + # Should contain resource and location information + self.assertTrue(len(help_string) > 0) diff --git a/tests/unit/commands/package/test_package_context.py b/tests/unit/commands/package/test_package_context.py index 3608947c11..5449381b4f 100644 --- a/tests/unit/commands/package/test_package_context.py +++ b/tests/unit/commands/package/test_package_context.py @@ -387,3 +387,45 @@ def test_multiple_docker_client_accesses_only_validate_once(self, patched_boto, self.assertEqual(docker_client1, docker_client2) self.assertEqual(docker_client2, docker_client3) self.assertEqual(docker_client1, mock_docker_client) + + @patch("samcli.commands.package.package_context.sync_ecr_stack") + @patch("samcli.lib.package.ecr_uploader.get_validated_container_client") + @patch.object(ResourceMetadataNormalizer, "normalize", MagicMock()) + @patch.object(Template, "export", MagicMock(return_value={})) + @patch("boto3.client") + def test_package_with_resolve_image_repos(self, patched_boto, mock_get_validated_client, mock_sync_ecr_stack): + # Mock the docker client + docker_client_mock = Mock() + mock_get_validated_client.return_value = docker_client_mock + + # Mock sync_ecr_stack to return image repositories + expected_repos = {"Function1": "123456789012.dkr.ecr.us-east-1.amazonaws.com/repo1"} + mock_sync_ecr_stack.return_value = expected_repos + + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".yaml") as temp_template_file: + package_command_context = PackageContext( + template_file=temp_template_file.name, + s3_bucket="s3-bucket", + s3_prefix="s3-prefix", + image_repository=None, + image_repositories=None, + kms_key_id="kms-key-id", + output_template_file=None, + use_json=True, + force_upload=True, + no_progressbar=False, + metadata={}, + region="us-east-1", + profile=None, + resolve_image_repos=True, + ) + package_command_context.run() + + # Verify sync_ecr_stack was called with correct arguments + # This proves the resolve_image_repos code path was executed + mock_sync_ecr_stack.assert_called_once() + call_args = mock_sync_ecr_stack.call_args + # Check that template file was passed + self.assertEqual(call_args[0][0], temp_template_file.name) + # Check that s3_bucket was passed + self.assertEqual(call_args[0][3], "s3-bucket") diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index 07457bb14f..5174ae2713 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -855,6 +855,7 @@ def test_package( "myregion", None, False, + False, ) @patch("samcli.commands._utils.options.get_template_artifacts_format") diff --git a/tests/unit/local/layers/test_download_layers.py b/tests/unit/local/layers/test_download_layers.py index e5b05195ab..d07f339171 100644 --- a/tests/unit/local/layers/test_download_layers.py +++ b/tests/unit/local/layers/test_download_layers.py @@ -1,6 +1,7 @@ import os +import errno from unittest import TestCase -from unittest.mock import Mock, call, patch +from unittest.mock import Mock, call, patch, MagicMock from botocore.exceptions import NoCredentialsError, ClientError from pathlib import Path @@ -325,3 +326,106 @@ def test_fetch_layer_uri_re_raises_client_error(self): with self.assertRaises(ClientError): download_layers._fetch_layer_uri(layer=layer) + + +class TestLayerDownloader_create_layer_directory(TestCase): + """Test cases for _create_layer_directory race condition handling""" + + @patch("samcli.local.layers.layer_downloader.Path") + def test_create_layer_directory_success(self, path_mock): + """Test successful directory creation""" + layer_path_mock = MagicMock(spec=Path) + + # Simulate successful mkdir + layer_path_mock.mkdir.return_value = None + + LayerDownloader._create_layer_directory(layer_path_mock) + + layer_path_mock.mkdir.assert_called_once_with(mode=0o700, parents=True, exist_ok=True) + + @patch("samcli.local.layers.layer_downloader.Path") + def test_create_layer_directory_handles_file_exists_error_when_dir_exists(self, path_mock): + """Test FileExistsError handling when directory actually exists""" + layer_path_mock = MagicMock(spec=Path) + + # Simulate FileExistsError due to race condition + layer_path_mock.mkdir.side_effect = FileExistsError("Directory exists") + # But directory actually exists + layer_path_mock.exists.return_value = True + + # Should not raise - handles race condition gracefully + LayerDownloader._create_layer_directory(layer_path_mock) + + layer_path_mock.mkdir.assert_called_once() + layer_path_mock.exists.assert_called_once() + + @patch("samcli.local.layers.layer_downloader.Path") + def test_create_layer_directory_raises_file_exists_error_when_dir_not_exists(self, path_mock): + """Test FileExistsError is re-raised when directory doesn't actually exist""" + layer_path_mock = MagicMock(spec=Path) + + # Simulate FileExistsError but directory doesn't exist (deeper issue) + layer_path_mock.mkdir.side_effect = FileExistsError("Directory exists") + layer_path_mock.exists.return_value = False + + # Should re-raise the exception + with self.assertRaises(FileExistsError): + LayerDownloader._create_layer_directory(layer_path_mock) + + @patch("samcli.local.layers.layer_downloader.Path") + @patch("samcli.local.layers.layer_downloader.errno") + def test_create_layer_directory_handles_oserror_eexist_when_dir_exists(self, errno_mock, path_mock): + """Test OSError with EEXIST errno when directory exists""" + errno_mock.EEXIST = errno.EEXIST # Use real errno value + layer_path_mock = MagicMock(spec=Path) + + # Simulate OSError with EEXIST errno + os_error = OSError("File exists") + os_error.errno = errno.EEXIST + layer_path_mock.mkdir.side_effect = os_error + # Directory actually exists + layer_path_mock.exists.return_value = True + + # Should not raise - handles race condition gracefully + LayerDownloader._create_layer_directory(layer_path_mock) + + layer_path_mock.mkdir.assert_called_once() + layer_path_mock.exists.assert_called_once() + + @patch("samcli.local.layers.layer_downloader.Path") + @patch("samcli.local.layers.layer_downloader.errno") + def test_create_layer_directory_raises_oserror_eexist_when_dir_not_exists(self, errno_mock, path_mock): + """Test OSError with EEXIST errno is re-raised when directory doesn't exist""" + errno_mock.EEXIST = errno.EEXIST + layer_path_mock = MagicMock(spec=Path) + + # Simulate OSError with EEXIST errno but directory doesn't exist + os_error = OSError("File exists") + os_error.errno = errno.EEXIST + layer_path_mock.mkdir.side_effect = os_error + layer_path_mock.exists.return_value = False + + # Should re-raise the exception + with self.assertRaises(OSError): + LayerDownloader._create_layer_directory(layer_path_mock) + + @patch("samcli.local.layers.layer_downloader.Path") + @patch("samcli.local.layers.layer_downloader.errno") + def test_create_layer_directory_raises_oserror_with_different_errno(self, errno_mock, path_mock): + """Test OSError with non-EEXIST errno is re-raised""" + errno_mock.EEXIST = errno.EEXIST + layer_path_mock = MagicMock(spec=Path) + + # Simulate OSError with different errno (e.g., permission denied) + os_error = OSError("Permission denied") + os_error.errno = errno.EACCES + layer_path_mock.mkdir.side_effect = os_error + + # Should re-raise the exception immediately + with self.assertRaises(OSError) as context: + LayerDownloader._create_layer_directory(layer_path_mock) + + # Verify it's the same error + self.assertEqual(context.exception.errno, errno.EACCES) + # exists() should not be called for non-EEXIST errors + layer_path_mock.exists.assert_not_called()