ANSWER:
Ended up writing a python script to solve this.
We created folders on the matillion server to upload ssh keys and can reference those in the script.
Here's the script if anyone else can benefit:
import paramiko; # using version 2.8.1 - newer versions have an authentication bug
import boto3;
# turn on paramiko debugging
# paramiko.common.logging.basicConfig(level=paramiko.common.DEBUG)
# S3 credentials
s3 = boto3.client('s3')
s3_resource = boto3.resource('s3')
bucket = '[s3_bucket_name_goes_here]'
s3_folder = '[s3_subfolder_name_goes_here]'
s3_bucket = s3_resource.Bucket(bucket)
# create ssh client
ssh_client = paramiko.SSHClient()
host="[sftp host name]"
username="[you know what goes here]"
port=22
private_key="[put path/filename of ssh key here]"
private_key_pass="[password of private key goes here]"
# get private key from private key file
pkey = paramiko.RSAKey.from_private_key_file(private_key,private_key_pass)
# print (pkey)
# open SSH session
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
conn = ssh_client.connect(host,username=username,pkey=pkey,look_for_keys=False)
# ,port=22
# open sftp connection
sftp = ssh_client.open_sftp()
# get list of files
directory_list = sftp.listdir("[folder on sftp server]")
remote_path = "[again folder on sftp server]/"
# print (directory_list)
# do something with the files
for file in directory_list:
if file.__contains__('.CSV'):
print ('Processing file: ' + file)
# write file to temp so we can decrypt (pgp)
sftp.get(remote_path + file,'/tmp/' + file)
################################################################
## to move directly to s3 bucket
## get the "file-like" object of each file and put it in S3
#with sftp.open(remote_path + file, "r") as f:
# f.prefetch()
# s3_bucket.put_object(Body=f,Key=s3_folder + file)
################################################################
# remove the file from sftp folder so we don't get it again
sftp.remove(remote_path + file)
# close session
sftp.close()
ssh_client.close()