summaryrefslogtreecommitdiff
path: root/app-admin/ansible/files/ansible-2.10.0-CVE-2020-25635-6.patch
diff options
context:
space:
mode:
Diffstat (limited to 'app-admin/ansible/files/ansible-2.10.0-CVE-2020-25635-6.patch')
-rw-r--r--app-admin/ansible/files/ansible-2.10.0-CVE-2020-25635-6.patch54
1 files changed, 54 insertions, 0 deletions
diff --git a/app-admin/ansible/files/ansible-2.10.0-CVE-2020-25635-6.patch b/app-admin/ansible/files/ansible-2.10.0-CVE-2020-25635-6.patch
new file mode 100644
index 000000000000..df88be4264ff
--- /dev/null
+++ b/app-admin/ansible/files/ansible-2.10.0-CVE-2020-25635-6.patch
@@ -0,0 +1,54 @@
+From 921bd53103c2b543e95c9e6b863702db3ff54d0c Mon Sep 17 00:00:00 2001
+From: Jill R <4121322+jillr@users.noreply.github.com>
+Date: Fri, 2 Oct 2020 11:37:37 -0700
+Subject: [PATCH] aws_ssm: Namespace S3 buckets and delete transferred files
+ (#237)
+
+Files transferred to instances via the SSM connection plugin should use
+folders within the bucket that are namespaced per-host, to prevent collisions.
+Files should also be deleted from buckets when they are no longer required.
+
+Fixes: #221
+Fixes: #222
+
+Based on work by abeluck
+
+changelog
+---
+ ansible_collections/community/aws/changelogs/fragments/221_222_ssm_bucket_operations.yaml | 2 ++
+ ansible_collections/community/aws/plugins/connection/aws_ssm.py | 6 +++++-
+ 2 files changed, 7 insertions(+), 1 deletion(-)
+ create mode 100644 ansible_collections/community/aws/changelogs/fragments/221_222_ssm_bucket_operations.yaml
+
+diff --git a/ansible_collections/community/aws/changelogs/fragments/221_222_ssm_bucket_operations.yaml b/ansible_collections/community/aws/changelogs/fragments/221_222_ssm_bucket_operations.yaml
+new file mode 100644
+index 00000000..247d5e36
+--- /dev/null
++++ b/ansible_collections/community/aws/changelogs/fragments/221_222_ssm_bucket_operations.yaml
+@@ -0,0 +1,2 @@
++bugfixes:
++ - aws_ssm connection plugin - namespace file uploads to S3 into unique folders per host, to prevent name collisions. Also deletes files from S3 to ensure temp files are not left behind. (https://github.com/ansible-collections/community.aws/issues/221, https://github.com/ansible-collections/community.aws/issues/222)
+diff --git a/ansible_collections/community/aws/plugins/connection/aws_ssm.py b/ansible_collections/community/aws/plugins/connection/aws_ssm.py
+index 7f7d6926..94289eee 100644
+--- a/ansible_collections/community/aws/plugins/connection/aws_ssm.py
++++ b/ansible_collections/community/aws/plugins/connection/aws_ssm.py
+@@ -522,7 +522,8 @@ def _get_boto_client(self, service, region_name=None):
+ def _file_transport_command(self, in_path, out_path, ssm_action):
+ ''' transfer a file from using an intermediate S3 bucket '''
+
+- s3_path = out_path.replace('\\', '/')
++ path_unescaped = "{0}/{1}".format(self.instance_id, out_path)
++ s3_path = path_unescaped.replace('\\', '/')
+ bucket_url = 's3://%s/%s' % (self.get_option('bucket_name'), s3_path)
+
+ if self.is_windows:
+@@ -546,6 +547,9 @@ def _file_transport_command(self, in_path, out_path, ssm_action):
+ client.upload_fileobj(data, self.get_option('bucket_name'), s3_path)
+ (returncode, stdout, stderr) = self.exec_command(get_command, in_data=None, sudoable=False)
+
++ # Remove the files from the bucket after they've been transferred
++ client.delete_object(Bucket=self.get_option('bucket_name'), Key=s3_path)
++
+ # Check the return code
+ if returncode == 0:
+ return (returncode, stdout, stderr)