Upgrade from "boto" to "boto3"

This commit is contained in:
Michael Mintz 2023-07-18 11:31:12 -04:00
parent ea3788704c
commit eb57c34587
3 changed files with 54 additions and 37 deletions

View File

@ -24,19 +24,20 @@ class S3LoggingBucket(object):
)
with pip_find_lock:
try:
from boto.s3.connection import S3Connection
import boto3
except Exception:
shared_utils.pip_install("boto", version="2.49.0")
from boto.s3.connection import S3Connection
self.conn = S3Connection(selenium_access_key, selenium_secret_key)
self.bucket = self.conn.get_bucket(log_bucket)
shared_utils.pip_install("boto3")
import boto3
self.conn = boto3.Session(
aws_access_key_id=selenium_access_key,
aws_secret_access_key=selenium_secret_key,
)
self.bucket = log_bucket
self.bucket_url = bucket_url
def get_key(self, file_name):
"""Create a new Key instance with the given name."""
from boto.s3.key import Key
return Key(bucket=self.bucket, name=file_name)
"""Create a new S3 connection instance with the given name."""
return self.conn.resource("s3").Object(self.bucket, file_name)
def get_bucket(self):
"""Return the bucket being used."""
@ -53,18 +54,19 @@ class S3LoggingBucket(object):
content_type = "image/jpeg"
elif file_name.endswith(".png"):
content_type = "image/png"
upload_key.set_contents_from_filename(
file_path, headers={"Content-Type": content_type}
upload_key.Bucket().upload_file(
file_path,
file_name,
ExtraArgs={"ACL": "public-read", "ContentType": content_type},
)
upload_key.url = upload_key.generate_url(expires_in=3600).split("?")[0]
try:
upload_key.make_public()
except Exception:
pass
def upload_index_file(self, test_address, timestamp):
def upload_index_file(
self, test_address, timestamp, data_path, save_data_to_logs
):
"""Create an index.html file with links to all the log files
that were just uploaded."""
import os
global already_uploaded_files
already_uploaded_files = list(set(already_uploaded_files))
already_uploaded_files.sort()
@ -76,15 +78,19 @@ class S3LoggingBucket(object):
"<a href='" + self.bucket_url + ""
"%s'>%s</a>" % (completed_file, completed_file)
)
index.set_contents_from_string(
"<br>".join(index_str), headers={"Content-Type": "text/html"}
index_page = str("<br>".join(index_str))
save_data_to_logs(index_page, "index.html")
file_path = os.path.join(data_path, "index.html")
index.Bucket().upload_file(
file_path,
file_name,
ExtraArgs={"ACL": "public-read", "ContentType": "text/html"},
)
index.make_public()
return "%s%s" % (self.bucket_url, file_name)
def save_uploaded_file_names(self, files):
"""Keep a record of all file names that have been uploaded. Upload log
files related to each test after its execution. Once done, use
already_uploaded_files to create an index file."""
"""Keep a record of all file names that have been uploaded.
Upload log files related to each test after its execution.
Once done, use already_uploaded_files to create an index file."""
global already_uploaded_files
already_uploaded_files.extend(files)

View File

@ -15174,16 +15174,16 @@ class BaseCase(unittest.TestCase):
)
uploaded_files.append(logfile_name)
s3_bucket.save_uploaded_file_names(uploaded_files)
index_file = s3_bucket.upload_index_file(test_id, guid)
print("\n\n*** Log files uploaded: ***\n%s\n" % index_file)
index_file = s3_bucket.upload_index_file(
test_id, guid, self.data_path, self.save_data_to_logs
)
print("\n*** Log files uploaded: ***\n%s\n" % index_file)
logging.info(
"\n\n*** Log files uploaded: ***\n%s\n" % index_file
"\n*** Log files uploaded: ***\n%s\n" % index_file
)
if self.with_db_reporting:
from seleniumbase.core.testcase_manager import (
TestcaseDataPayload,
)
from seleniumbase.core.testcase_manager import (
TestcaseManager,
)

View File

@ -1,8 +1,6 @@
"""The S3 Plugin for uploading test logs to the S3 bucket specified."""
"""S3 Logging Plugin for SeleniumBase tests that run with pynose / nosetests"""
import uuid
import logging
import os
from seleniumbase.core.s3_manager import S3LoggingBucket
from nose.plugins import Plugin
@ -14,25 +12,38 @@ class S3Logging(Plugin):
"""Get the options."""
super().configure(options, conf)
self.options = options
self.test_id = None
def save_data_to_logs(self, data, file_name):
from seleniumbase.fixtures import page_utils
test_logpath = os.path.join(self.options.log_path, self.test_id)
file_name = str(file_name)
destination_folder = test_logpath
page_utils._save_data_as(data, destination_folder, file_name)
def afterTest(self, test):
"""After each testcase, upload logs to the S3 bucket."""
"""Upload logs to the S3 bucket after tests complete."""
from seleniumbase.core.s3_manager import S3LoggingBucket
self.test_id = test.test.id()
s3_bucket = S3LoggingBucket()
guid = str(uuid.uuid4().hex)
path = os.path.join(self.options.log_path, test.test.id())
path = os.path.join(self.options.log_path, self.test_id)
uploaded_files = []
for logfile in os.listdir(path):
logfile_name = "%s/%s/%s" % (
guid,
test.test.id(),
self.test_id,
logfile.split(path)[-1],
)
s3_bucket.upload_file(logfile_name, os.path.join(path, logfile))
uploaded_files.append(logfile_name)
s3_bucket.save_uploaded_file_names(uploaded_files)
index_file = s3_bucket.upload_index_file(test.id(), guid)
print("\n\n*** Log files uploaded: ***\n%s\n" % index_file)
logging.error("\n\n*** Log files uploaded: ***\n%s\n" % index_file)
index_file = s3_bucket.upload_index_file(
test.id(), guid, path, self.save_data_to_logs
)
print("\n*** Log files uploaded: ***\n%s\n" % index_file)
# If the SB database plugin is also being used,
# attach a link to the logs index database row.