I'm looping through all object ACL's in a bucket, to remove "Everyone" permissions from all of them. The idea here is to retain all current permissions.
My issue is that the PutACL call doesn't work. In the example below, a new AccessControlList is created, omitting the "everyone" entries. The PutACL call returns successfully, but the object's ACL is unchanged.
Perhaps there is an easier way to identify and remove specific Grants.
AmazonS3Client s3 = new AmazonS3Client();
GetACLRequest aclRequest = new GetACLRequest() { BucketName = "my-bucket", Key = "/dir/protect_me.txt" };
var aclResponse = s3.GetACL(aclRequest);
bool foundEveryonePriv = false; //if found at least one.
S3AccessControlList newAcl = new S3AccessControlList();
foreach (var grant in aclResponse.AccessControlList.Grants)
{
bool grantToEveryone = string.Compare(grant.Grantee.URI, "http://acs.amazonaws.com/groups/global/AllUsers") == 0;
Logger.log.InfoFormat("{0},{1},{2},{3}", aclRequest.BucketName, o.Key, grant.Permission, (everyoneHasThisPriv ? "EVERYONE" : string.Empty));
if (grantToEveryone)
{
foundEveryonePriv = true;
newAcl.AddGrant(grant.Grantee, grant.Permission);
}
}
//modify the items if necessary and requested.
if (foundEveryonePriv)
{
newAcl.Owner = aclResponse.AccessControlList.Owner;
var response = s3.PutACL(new PutACLRequest() { AccessControlList = newAcl, BucketName = aclRequest.BucketName, Key = o.Key });
}
Try modifying the existing ACL from the GET to remove the public grant. Then send the modified ACL back in a PUT request. Here's what I did and it's working well to retain the original grants and remove the public grant from a given object.
private void RemovePublicAcl(AmazonS3Client client, string bucket, string key)
{
var aclRequest = new GetACLRequest { BucketName = bucket, Key = key };
var aclResponse = client.GetACL(aclRequest);
var acl = aclResponse.AccessControlList;
const string PUBLIC_GRANTEE = "http://acs.amazonaws.com/groups/global/AllUsers";
if (acl.Grants.Any(x =>
!string.IsNullOrWhiteSpace(x.Grantee.URI) &&
x.Grantee.URI.Equals(PUBLIC_GRANTEE)))
{
var publicGrant = new S3Grantee();
publicGrant.URI = PUBLIC_GRANTEE;
acl.Grants.RemoveAll(x =>
!string.IsNullOrWhiteSpace(x.Grantee.URI) &&
x.Grantee.URI.Equals(PUBLIC_GRANTEE));
var aclUpdate = new PutACLRequest();
aclUpdate.BucketName = bucket;
aclUpdate.Key = key;
aclUpdate.AccessControlList = acl;
var response = client.PutACL(aclUpdate);
}
Related
Trying to make use of the AndroidPublisherService from Play Developer API Client.
I can list active tracks and the releases in those tracks, but when I try to upload a new build there seems to be no way of attaching the authentication already made previously to read data.
I've authenticated using var googleCredentials = GoogleCredential.FromStream(keyDataStream) .CreateWithUser(serviceUsername); where serviceUsername is the email for my service account.
private static void Execute(string packageName, string aabfile, string credfile, string serviceUsername)
{
var credentialsFilename = credfile;
if (string.IsNullOrWhiteSpace(credentialsFilename))
{
// Check env. var
credentialsFilename =
Environment.GetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS",
EnvironmentVariableTarget.Process);
}
Console.WriteLine($"Using credentials {credfile} with package {packageName} for aab file {aabfile}");
var keyDataStream = File.OpenRead(credentialsFilename);
var googleCredentials = GoogleCredential.FromStream(keyDataStream)
.CreateWithUser(serviceUsername);
var credentials = googleCredentials.UnderlyingCredential as ServiceAccountCredential;
var service = new AndroidPublisherService();
var edit = service.Edits.Insert(new AppEdit { ExpiryTimeSeconds = "3600" }, packageName);
edit.Credential = credentials;
var activeEditSession = edit.Execute();
Console.WriteLine($"Edits started with id {activeEditSession.Id}");
var tracksList = service.Edits.Tracks.List(packageName, activeEditSession.Id);
tracksList.Credential = credentials;
var tracksResponse = tracksList.Execute();
foreach (var track in tracksResponse.Tracks)
{
Console.WriteLine($"Track: {track.TrackValue}");
Console.WriteLine("Releases: ");
foreach (var rel in track.Releases)
Console.WriteLine($"{rel.Name} version: {rel.VersionCodes.FirstOrDefault()} - Status: {rel.Status}");
}
using var fileStream = File.OpenRead(aabfile);
var upload = service.Edits.Bundles.Upload(packageName, activeEditSession.Id, fileStream, "application/octet-stream");
var uploadProgress = upload.Upload();
if (uploadProgress == null || uploadProgress.Exception != null)
{
Console.WriteLine($"Failed to upload. Error: {uploadProgress?.Exception}");
return;
}
Console.WriteLine($"Upload {uploadProgress.Status}");
var tracksUpdate = service.Edits.Tracks.Update(new Track
{
Releases = new List<TrackRelease>(new[]
{
new TrackRelease
{
Name = "Roswell - Grenis Dev Test",
Status = "completed",
VersionCodes = new List<long?>(new[] {(long?) upload?.ResponseBody?.VersionCode})
}
})
}, packageName, activeEditSession.Id, "internal");
tracksUpdate.Credential = credentials;
var trackResult = tracksUpdate.Execute();
Console.WriteLine($"Track {trackResult?.TrackValue}");
var commitResult = service.Edits.Commit(packageName, activeEditSession.Id);
Console.WriteLine($"{commitResult.EditId} has been committed");
}
And as the code points out, all action objects such as tracksList.Credential = credentials; can be given the credentials generated from the service account.
BUT the actual upload action var upload = service.Edits.Bundles.Upload(packageName, activeEditSession.Id, fileStream, "application/octet-stream"); does not expose a .Credential object, and it always fails with:
The service androidpublisher has thrown an exception: Google.GoogleApiException: Google.Apis.Requests.RequestError
Request is missing required authentication credential. Expected OAuth 2 access token, login cookie or other valid authentication credential. See https://developers.google.com/identity/sign-in/web/devconsole-project. [401]
Errors [
Message[Login Required.] Location[Authorization - header] Reason[required] Domain[global]
]
at Google.Apis.Upload.ResumableUpload`1.InitiateSessionAsync(CancellationToken cancellationToken)
at Google.Apis.Upload.ResumableUpload.UploadAsync(CancellationToken cancellationToken)
So, how would I go about providing the actual Upload action with the given credentials here?
Managed to figure this out during the day, I was missing one call to CreateScoped() when creating the GoogleCredential object as well as a call to InitiateSession() on the upload object.
var googleCredentials = GoogleCredential.FromStream(keyDataStream)
.CreateWithUser(serviceUsername)
.CreateScoped(AndroidPublisherService.Scope.Androidpublisher);
Once that was done I could then get a valid oauth token by calling
var googleCredentials = GoogleCredential.FromStream(keyDataStream)
.CreateWithUser(serviceUsername)
.CreateScoped(AndroidPublisherService.Scope.Androidpublisher);
var credentials = googleCredentials.UnderlyingCredential as ServiceAccountCredential;
var oauthToken = credentials?.GetAccessTokenForRequestAsync(AndroidPublisherService.Scope.Androidpublisher).Result;
And I can now use that oauth token in the upload request:
upload.OauthToken = oauthToken;
_ = await upload.InitiateSessionAsync();
var uploadProgress = await upload.UploadAsync();
if (uploadProgress == null || uploadProgress.Exception != null)
{
Console.WriteLine($"Failed to upload. Error: {uploadProgress?.Exception}");
return;
}
The full code example for successfully uploading a new aab file to google play store internal test track thus looks something like this:
private async Task UploadGooglePlayRelease(string fileToUpload, string changeLogFile, string serviceUsername, string packageName)
{
var serviceAccountFile = ResolveServiceAccountCertificateInfoFile();
if (!serviceAccountFile.Exists)
throw new ApplicationException($"Failed to find the service account certificate file. {serviceAccountFile.FullName}");
var keyDataStream = File.OpenRead(serviceAccountFile.FullName);
var googleCredentials = GoogleCredential.FromStream(keyDataStream)
.CreateWithUser(serviceUsername)
.CreateScoped(AndroidPublisherService.Scope.Androidpublisher);
var credentials = googleCredentials.UnderlyingCredential as ServiceAccountCredential;
var oauthToken = credentials?.GetAccessTokenForRequestAsync(AndroidPublisherService.Scope.Androidpublisher).Result;
var service = new AndroidPublisherService();
var edit = service.Edits.Insert(new AppEdit { ExpiryTimeSeconds = "3600" }, packageName);
edit.Credential = credentials;
var activeEditSession = await edit.ExecuteAsync();
_logger.LogInformation($"Edits started with id {activeEditSession.Id}");
var tracksList = service.Edits.Tracks.List(packageName, activeEditSession.Id);
tracksList.Credential = credentials;
var tracksResponse = await tracksList.ExecuteAsync();
foreach (var track in tracksResponse.Tracks)
{
_logger.LogInformation($"Track: {track.TrackValue}");
_logger.LogInformation("Releases: ");
foreach (var rel in track.Releases)
_logger.LogInformation($"{rel.Name} version: {rel.VersionCodes.FirstOrDefault()} - Status: {rel.Status}");
}
var fileStream = File.OpenRead(fileToUpload);
var upload = service.Edits.Bundles.Upload(packageName, activeEditSession.Id, fileStream, "application/octet-stream");
upload.OauthToken = oauthToken;
_ = await upload.InitiateSessionAsync();
var uploadProgress = await upload.UploadAsync();
if (uploadProgress == null || uploadProgress.Exception != null)
{
Console.WriteLine($"Failed to upload. Error: {uploadProgress?.Exception}");
return;
}
_logger.LogInformation($"Upload {uploadProgress.Status}");
var releaseNotes = await File.ReadAllTextAsync(changeLogFile);
var tracksUpdate = service.Edits.Tracks.Update(new Track
{
Releases = new List<TrackRelease>(new[]
{
new TrackRelease
{
Name = $"{upload?.ResponseBody?.VersionCode}",
Status = "completed",
InAppUpdatePriority = 5,
CountryTargeting = new CountryTargeting { IncludeRestOfWorld = true },
ReleaseNotes = new List<LocalizedText>(new []{ new LocalizedText { Language = "en-US", Text = releaseNotes } }),
VersionCodes = new List<long?>(new[] {(long?) upload?.ResponseBody?.VersionCode})
}
})
}, packageName, activeEditSession.Id, "internal");
tracksUpdate.Credential = credentials;
var trackResult = await tracksUpdate.ExecuteAsync();
_logger.LogInformation($"Track {trackResult?.TrackValue}");
var commitResult = service.Edits.Commit(packageName, activeEditSession.Id);
commitResult.Credential = credentials;
await commitResult.ExecuteAsync();
_logger.LogInformation($"{commitResult.EditId} has been committed");
}
I am trying to delete all the files inside a folder which is basically the date.
Suppose, if there are 100 files under folder "08-10-2015", instead of sending all those 100 file names, i want to send the folder name.
I am trying below code and it is not working for me.
DeleteObjectsRequest multiObjectDeleteRequest = new DeleteObjectsRequest();
multiObjectDeleteRequest.BucketName = bucketName;
multiObjectDeleteRequest.AddKey(keyName + "/" + folderName + "/");
AmazonS3Config S3Config = new AmazonS3Config()
{
ServiceURL = string.Format(servicehost)
};
using (IAmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(accesskey, secretkey, S3Config))
{
try
{
DeleteObjectsResponse response = client.DeleteObjects(multiObjectDeleteRequest);
Console.WriteLine("Successfully deleted all the {0} items", response.DeletedObjects.Count);
}
catch (DeleteObjectsException e)
{
// Process exception.
}
I am using the above code and it is not working.
I think you can delete the entire folder using the following code:
AmazonS3Config cfg = new AmazonS3Config();
cfg.RegionEndpoint = Amazon.RegionEndpoint.EUCentral1;
string bucketName = "your bucket name";
AmazonS3Client s3Client = new AmazonS3Client("your access key", "your secret key", cfg);
S3DirectoryInfo directoryToDelete = new S3DirectoryInfo(s3Client, bucketName, "your folder name or full folder key");
directoryToDelete.Delete(true); // true will delete recursively in folder inside
I am using amazon AWSSDK.Core and AWSSDK.S3 version 3.1.0.0 for .net 3.5.
I hope it can help you
You have to:
List all objects in the folder
Retrieve key for each object
Add this key to a multiple Delete Object Request
Make the request to delete all objects
AmazonS3Config S3Config = new AmazonS3Config()
{
ServiceURL = "s3.amazonaws.com",
CommunicationProtocol = Amazon.S3.Model.Protocol.HTTP,
};
const string AWS_ACCESS_KEY = "xxxxxxxxxxxxxxxx";
const string AWS_SECRET_KEY = "yyyyyyyyyyyyyyyy";
AmazonS3Client client = new AmazonS3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY, S3Config);
DeleteObjectsRequest request2 = new DeleteObjectsRequest();
ListObjectsRequest request = new ListObjectsRequest
{
BucketName = "yourbucketname",
Prefix = "yourprefix"
};
ListObjectsResponse response = await client.ListObjectsAsync(request);
// Process response.
foreach (S3Object entry in response.S3Objects)
{
request2.AddKey(entry.Key);
}
request2.BucketName = "yourbucketname";
DeleteObjectsResponse response2 = await client.DeleteObjectsAsync(request2);
I'm not sure why they didn't keep this method in future SDKs, but, for those interested, here is the implementation of the S3DirectoryInfo.Delete method:
ListObjectsRequest listObjectsRequest = new ListObjectsRequest
{
BucketName = bucket,
Prefix = directoryPrefix
};
DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest
{
BucketName = bucket
};
ListObjectsResponse listObjectsResponse = null;
do
{
listObjectsResponse = s3Client.ListObjects(listObjectsRequest);
foreach (S3Object item in listObjectsResponse.S3Objects.OrderBy((S3Object x) => x.Key))
{
deleteObjectsRequest.AddKey(item.Key);
if (deleteObjectsRequest.Objects.Count == 1000)
{
s3Client.DeleteObjects(deleteObjectsRequest);
deleteObjectsRequest.Objects.Clear();
}
listObjectsRequest.Marker = item.Key;
}
}
while (listObjectsResponse.IsTruncated);
if (deleteObjectsRequest.Objects.Count > 0)
{
s3Client.DeleteObjects(deleteObjectsRequest);
}
I use DotNetOpenAuth.
So.. I am getting looking good response which has state Authenticated.
That is fine.
Now I want to get user profile info but always getting NULL.
Here is the code.
private ServiceProviderDescription GetServiceDescription()
{
string ValidateTokenEndPoint = ConfigurationManager.AppSettings["identityOAuthValidateTokenEndPointUrl"];
string ValidateAuthorizationHeaderEndPoint = ConfigurationManager.AppSettings["identityOAuthValidateAuthorizationHeaderEndPointUrl"];
string AccessTokenEndPoint = ConfigurationManager.AppSettings["identityOAuthAccessTokenURL"];
bool UseVersion10A = Convert.ToBoolean(ConfigurationManager.AppSettings["identityOAuthUseVersion10a"]);
string RequestTokenStr = ConfigurationManager.AppSettings["identityOAuthRequestTokenURL"];
string UserAuthStr = ConfigurationManager.AppSettings["identityOAuthAuthorizeUserURL"];
string AccessTokenStr = ConfigurationManager.AppSettings["identityOAuthAccessTokenURL"];
string InvalidateTokenStr = ConfigurationManager.AppSettings["identityOAuthRequestInvalidateTokenURL"];
return new ServiceProviderDescription
{
AccessTokenEndpoint = new MessageReceivingEndpoint(AccessTokenStr, HttpDeliveryMethods.PostRequest),
RequestTokenEndpoint = new MessageReceivingEndpoint(RequestTokenStr, HttpDeliveryMethods.PostRequest),
UserAuthorizationEndpoint = new MessageReceivingEndpoint(UserAuthStr, HttpDeliveryMethods.PostRequest),
TamperProtectionElements = new ITamperProtectionChannelBindingElement[] { new HmacSha1SigningBindingElement() },
ProtocolVersion = DotNetOpenAuth.OAuth.ProtocolVersion.V10a
};
}
void GetUserProfile()
{
var tokenManager = TokenManagerFactory.GetTokenManager(TokenManagerType.InMemoryTokenManager);
tokenManager.ConsumerKey = ConfigurationManager.AppSettings["identityOAuthConsumerKey"];
tokenManager.ConsumerSecret = ConfigurationManager.AppSettings["identityOAuthConsumerSecret"];
var serviceDescription = GetServiceDescription();
var consumer = new WebConsumer(serviceDescription, tokenManager);
var result = consumer.ProcessUserAuthorization(response);
if (result != null) // It is always null
{
}
Well I checked 10 times and I am pretty sure that all URLs to create ServiceProviderDescription are correct.
Any clue?
Well
finally check your web.config app keys
add key="identityOAuthConsumerKey" value="put here correct data!!!"
add key="identityOAuthConsumerSecret" value="put here correct data!!!"
and if you use hosts file you have to put correct sitename as well
127.0.0.1 site1.host1.com
I am using the Google Analytics Api to get web property information from my Analytics account.
When I log into analaytics though, I only have one website, but through the api I get several (old and deleted sites)
My code is like this:
var provider = new WebServerClient(GoogleAuthenticationServer.Description)
{
ClientIdentifier = _appId,
ClientSecret = _appSecret
};
var auth = new OAuth2Authenticator<WebServerClient>(provider, x => new AuthorizationState { AccessToken = token });
var analyticsService = new AnalyticsService(auth);
var accounts = analyticsService.Management.Accounts.List().Fetch();
foreach (var account in accounts.Items)
{
var webProperties = analyticsService.Management.Webproperties.List(account.Id).Fetch();
// todo: determine if web property is still in use?
}
From code how can I tell which ones are still active?
So after a bit more digging.
It seems there is no flag or anything like that indicating it has been removed, but if you keep digging into the result set you will notice that at the profile level, a profile that doesn't have child items seems to be a deleted one.
Which makes sense I guess there wouldn't be a profile associated with those that have been removed.
var provider = new WebServerClient(GoogleAuthenticationServer.Description)
{
ClientIdentifier = _appId,
ClientSecret = _appSecret
};
var auth = new OAuth2Authenticator<WebServerClient>(provider, x => new AuthorizationState { AccessToken = token });
var analyticsService = new AnalyticsService(auth);
var accounts = analyticsService.Management.Accounts.List().Fetch();
var result = new List<Profile>();
foreach (var account in accounts.Items)
{
var webProperties = analyticsService.Management.Webproperties.List(account.Id).Fetch();
foreach (var webProperty in webProperties.Items)
{
var profiles = analyticsService.Management.Profiles.List(account.Id, webProperty.Id).Fetch();
if (profiles.Items != null && profiles.Items.Any())
{
// these are the ones we want
result.AddRange(profiles.Items);
}
}
}
}
Can someone please show me how to determine if a certain file/object exists in a S3 bucket and display a message if it exists or if it does not exist.
Basically I want it to:
1) Check a bucket on my S3 account such as testbucket
2) Inside of that bucket, look to see if there is a file with the prefix test_ (test_file.txt or test_data.txt).
3) If that file exists, then display a MessageBox (or Console message) that the file exists, or that the file does not exist.
Can someone please show me how to do this?
Using the AWSSDK For .Net I Currently do something along the lines of:
public bool Exists(string fileKey, string bucketName)
{
try
{
response = _s3Client.GetObjectMetadata(new GetObjectMetadataRequest()
.WithBucketName(bucketName)
.WithKey(key));
return true;
}
catch (Amazon.S3.AmazonS3Exception ex)
{
if (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
return false;
//status wasn't not found, so throw the exception
throw;
}
}
It kinda sucks, but it works for now.
Use the S3FileInfo.Exists method:
using (var client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKey, secretKey))
{
S3FileInfo s3FileInfo = new Amazon.S3.IO.S3FileInfo(client, "your-bucket-name", "your-file-name");
if (s3FileInfo.Exists)
{
// file exists
}
else
{
// file does not exist
}
}
Not sure if this applies to .NET Framework, but the .NET Core version of AWS SDK (v3) only supports async requests, so I had to use a slightly different solution:
/// <summary>
/// Determines whether a file exists within the specified bucket
/// </summary>
/// <param name="bucket">The name of the bucket to search</param>
/// <param name="filePrefix">Match files that begin with this prefix</param>
/// <returns>True if the file exists</returns>
public async Task<bool> FileExists(string bucket, string filePrefix)
{
// Set this to your S3 region (of course)
var region = Amazon.RegionEndpoint.USEast1;
using (var client = new AmazonS3Client(region))
{
var request = new ListObjectsRequest {
BucketName = bucket,
Prefix = filePrefix,
MaxKeys = 1
};
var response = await client.ListObjectsAsync(request, CancellationToken.None);
return response.S3Objects.Any();
}
}
And, if you want to search a folder:
/// <summary>
/// Determines whether a file exists within the specified folder
/// </summary>
/// <param name="bucket">The name of the bucket to search</param>
/// <param name="folder">The name of the folder to search</param>
/// <param name="filePrefix">Match files that begin with this prefix</param>
/// <returns>True if the file exists</returns>
public async Task<bool> FileExists(string bucket, string folder, string filePrefix)
{
return await FileExists(bucket, $"{folder}/{filePrefix}");
}
Usage:
var testExists = await FileExists("testBucket", "test_");
// or...
var testExistsInFolder = await FileExists("testBucket", "testFolder/testSubFolder", "test_");
This solves it:
List the bucket for existing objects and use a prefix like so.
var request = new ListObjectsRequest()
.WithBucketName(_bucketName)
.WithPrefix(keyPrefix);
var response = _amazonS3Client.ListObjects(request);
var exists = response.S3Objects.Count > 0;
foreach (var obj in response.S3Objects) {
// act
}
I know this question is a few years old but the new SDK handles this beautifully. If anyone is still searching this. You are looking for S3DirectoryInfo Class
using (IAmazonS3 s3Client = new AmazonS3Client(accessKey, secretKey))
{
S3DirectoryInfo s3DirectoryInfo = new Amazon.S3.IO.S3DirectoryInfo(s3Client, "testbucket");
if (s3DirectoryInfo.GetFiles("test*").Any())
{
//file exists -- do something
}
else
{
//file doesn't exist -- do something else
}
}
I know this question is a few years old but the new SDK nowdays handles this in an easier manner.
public async Task<bool> ObjectExistsAsync(string prefix)
{
var response = await _amazonS3.GetAllObjectKeysAsync(_awsS3Configuration.BucketName, prefix, null);
return response.Count > 0;
}
Where _amazonS3 is your IAmazonS3 instance and _awsS3Configuration.BucketName is your bucket name.
You can use your complete key as a prefix.
I used the following code in C# with Amazon S3 version 3.1.5(.net 3.5) to check if the bucket exists:
BasicAWSCredentials credentials = new BasicAWSCredentials("accessKey", "secretKey");
AmazonS3Config configurationAmazon = new AmazonS3Config();
configurationAmazon.RegionEndpoint = S3Region.EU; // or you can use ServiceUrl
AmazonS3Client s3Client = new AmazonS3Client(credentials, configurationAmazon);
S3DirectoryInfo directoryInfo = new S3DirectoryInfo(s3Client, bucketName);
bucketExists = directoryInfo.Exists;// true if the bucket exists in other case false.
I used the followings code(in C# with Amazon S3 version 3.1.5 .net 3.5) the file Exists.
Option 1:
S3FileInfo info = new S3FileInfo(s3Client, "butcketName", "key");
bool fileExists = info.Exists; // true if the key Exists in other case false
Option 2:
ListObjectsRequest request = new ListObjectsRequest();
try
{
request.BucketName = "bucketName";
request.Prefix = "prefix"; // or part of the key
request.MaxKeys = 1; // max limit to find objects
ListObjectsResponse response = s3Client .ListObjects(request);
return response.S3Objects.Count > 0;
}
I'm not familiar with C#, but I use this method from Java (conversion to c# is immediate):
public boolean exists(AmazonS3 s3, String bucket, String key) {
ObjectListing list = s3.listObjects(bucket, key);
return list.getObjectSummaries().size() > 0;
}
my 2 cents
public async Task<bool> DoesKeyExistsAsync(string key)
{
ListObjectsResponse response = null;
try
{
response = await _s3Client.ListObjectsAsync(new ListObjectsRequest { BucketName = _settings.BucketName, Prefix = key });
}
catch (Exception ex)
{
_logger.LogError($"Error while checking key {key}");
return false;
}
return (response?.S3Objects?.Count > 0);
}
s3 = new S3(S3KEY, S3SECRET, false);
res = s3->getObjectInfo(bucketName, filename);
It will return array if file exists
try this one:
NameValueCollection appConfig = ConfigurationManager.AppSettings;
AmazonS3 s3Client = AWSClientFactory.CreateAmazonS3Client(
appConfig["AWSAccessKey"],
appConfig["AWSSecretKey"],
Amazon.RegionEndpoint.USEast1
);
S3DirectoryInfo source = new S3DirectoryInfo(s3Client, "BUCKET_NAME", "Key");
if(source.Exist)
{
//do ur stuff
}
using Amazon;
using Amazon.S3;
using Amazon.S3.IO;
using Amazon.S3.Model;
string accessKey = "xxxxx";
string secretKey = "xxxxx";
string regionEndpoint = "EU-WEST-1";
string bucketName = "Bucket1";
string filePath = "https://Bucket1/users/delivery/file.json"
public bool FileExistsOnS3(string filePath)
{
try
{
Uri myUri = new Uri(filePath);
string absolutePath = myUri.AbsolutePath; // /users/delivery/file.json
string key = absolutePath.Substring(1); // users/delivery/file.json
using(var client = AWSClientFactory.CreateAmazonS3Client(accessKey, secretKey, regionEndpoint))
{
S3FileInfo file = new S3FileInfo(client, bucketName, key);
if (file.Exists)
{
return true;
// custom logic
}
else
{
return false;
// custom logic
}
}
}
catch(AmazonS3Exception ex)
{
return false;
}
}
There is an overload for GetFileSystemInfos
Notice this line has filename.*
var files= s3DirectoryInfo.GetFileSystemInfos("filename.*");
public bool Check()
{
var awsCredentials = new Amazon.Runtime.BasicAWSCredentials("AccessKey", "SecretKey");
using (var client = new AmazonS3Client(awsCredentials, Amazon.RegionEndpoint.USEast1))
{
S3DirectoryInfo s3DirectoryInfo = new S3DirectoryInfo(client, bucketName, "YourFilePath");
var files= s3DirectoryInfo.GetFileSystemInfos("filename.*");
if(files.Any())
{
//fles exists
}
}
}