aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorCédric Ollivier <cedric.ollivier@orange.com>2022-03-04 10:01:29 +0100
committerCédric Ollivier <cedric.ollivier@orange.com>2022-03-04 10:02:06 +0100
commit2ee3d64d2d92e6f9a04c8043aa1ac3210941857f (patch)
tree4abdc51bf41b91ce07232c5379b73acb876700df
parenta84b4aa3d77d17cb5f798aee136660070d19766e (diff)
Protect vs Bucket containing html chars
It allows dumping MTS output files. Change-Id: I2b35ee81a764573a9d17cb3a225cb5febed5fa46 Signed-off-by: Cédric Ollivier <cedric.ollivier@orange.com>
-rw-r--r--xtesting/core/campaign.py25
1 files changed, 14 insertions, 11 deletions
diff --git a/xtesting/core/campaign.py b/xtesting/core/campaign.py
index 5ca47d6e..daab853b 100644
--- a/xtesting/core/campaign.py
+++ b/xtesting/core/campaign.py
@@ -15,9 +15,9 @@ import logging.config
import mimetypes
import os
import re
+import urllib
import zipfile
-from urllib.parse import urlparse
import boto3
from boto3.s3.transfer import TransferConfig
import botocore
@@ -121,25 +121,28 @@ class Campaign():
multipart_threshold = 5 * 1024 ** 5 if "google" in os.environ[
"S3_ENDPOINT_URL"] else 8 * 1024 * 1024
tconfig = TransferConfig(multipart_threshold=multipart_threshold)
- bucket_name = urlparse(dst_s3_url).netloc
+ bucket_name = urllib.parse.urlparse(dst_s3_url).netloc
s3path = re.search(
- '^/*(.*)/*$', urlparse(dst_s3_url).path).group(1)
+ '^/*(.*)/*$', urllib.parse.urlparse(dst_s3_url).path).group(1)
prefix = os.path.join(s3path, build_tag)
# pylint: disable=no-member
for s3_object in b3resource.Bucket(bucket_name).objects.filter(
Prefix=f"{prefix}/"):
- path, _ = os.path.split(s3_object.key)
+ path, _ = os.path.split(
+ urllib.parse.unquote_plus(s3_object.key))
lpath = re.sub(f'^{s3path}/*', '', path)
if lpath and not os.path.exists(lpath):
os.makedirs(lpath)
+ Campaign.__logger.info(
+ "Downloading %s",
+ re.sub(f'^{s3path}/*', '',
+ urllib.parse.unquote_plus(s3_object.key)))
# pylint: disable=no-member
b3resource.Bucket(bucket_name).download_file(
- s3_object.key,
- re.sub(f'^{s3path}/*', '', s3_object.key),
+ urllib.parse.unquote_plus(s3_object.key),
+ re.sub(f'^{s3path}/*', '',
+ urllib.parse.unquote_plus(s3_object.key)),
Config=tconfig)
- Campaign.__logger.info(
- "Downloading %s",
- re.sub(f'^{s3path}/*', '', s3_object.key))
return Campaign.EX_OK
except Exception: # pylint: disable=broad-except
Campaign.__logger.exception("Cannot publish the artifacts")
@@ -184,9 +187,9 @@ class Campaign():
multipart_threshold = 5 * 1024 ** 5 if "google" in os.environ[
"S3_ENDPOINT_URL"] else 8 * 1024 * 1024
tconfig = TransferConfig(multipart_threshold=multipart_threshold)
- bucket_name = urlparse(dst_s3_url).netloc
+ bucket_name = urllib.parse.urlparse(dst_s3_url).netloc
mime_type = mimetypes.guess_type(f'{build_tag}.zip')
- path = urlparse(dst_s3_url).path.strip("/")
+ path = urllib.parse.urlparse(dst_s3_url).path.strip("/")
# pylint: disable=no-member
b3resource.Bucket(bucket_name).upload_file(
f'{build_tag}.zip',