Skip to content

Commit

Permalink
test: improvements to requester pays tests and bucket metadata updates (
Browse files Browse the repository at this point in the history
#2414)

* test: improvements to requester pays tests and bucket metadata updates

test: improvements to requester pays tests and bucket metadata updates

* remove console.log statements

* adjust wait time slightly

* add retries to failed tests
  • Loading branch information
ddelgrosso1 committed Mar 6, 2024
1 parent 55ff53b commit 86d0b81
Showing 1 changed file with 60 additions and 68 deletions.
128 changes: 60 additions & 68 deletions system-test/storage.ts
Expand Up @@ -60,15 +60,18 @@ const RUNNING_IN_VPCSC = !!process.env['GOOGLE_CLOUD_TESTS_IN_VPCSC'];

const UNIFORM_ACCESS_TIMEOUT = 60 * 1000; // 60s see: https://cloud.google.com/storage/docs/consistency#eventually_consistent_operations
const UNIFORM_ACCESS_WAIT_TIME = 5 * 1000; // 5s
const BUCKET_METADATA_UPDATE_WAIT_TIME = 1000; // 1s buckets have a max rate of one metadata update per second.
const BUCKET_METADATA_UPDATE_WAIT_TIME = 1250; // 1.25s buckets have a max rate of one metadata update per second.

// block all attempts to chat with the metadata server (kokoro runs on GCE)
nock('http://metadata.google.internal')
.get(() => true)
.replyWithError({code: 'ENOTFOUND'})
.persist();

describe('storage', () => {
// eslint-disable-next-line prefer-arrow-callback
describe('storage', function () {
this.retries(3);

const USER_ACCOUNT = 'user-spsawchuk@gmail.com';
const TESTS_PREFIX = `storage-tests-${shortUUID()}-`;
const RETENTION_DURATION_SECONDS = 10;
Expand Down Expand Up @@ -110,23 +113,18 @@ describe('storage', () => {
},
};

before(() => {
return bucket
.create()
.then(() => {
return pubsub.createTopic(generateName());
})
.then(data => {
topic = data[0];
return topic.iam.setPolicy({
bindings: [
{
role: 'roles/pubsub.editor',
members: ['allUsers'],
},
],
});
});
before(async () => {
await bucket.create();
const data = await pubsub.createTopic(generateName());
topic = data[0];
await topic.iam.setPolicy({
bindings: [
{
role: 'roles/pubsub.editor',
members: ['allUsers'],
},
],
});
});

after(() => {
Expand Down Expand Up @@ -231,8 +229,10 @@ describe('storage', () => {
describe('buckets', () => {
// Some bucket update operations have a rate limit.
// Introduce a delay between tests to avoid getting an error.
beforeEach(done => {
setTimeout(done, 1000);
beforeEach(async () => {
await new Promise(resolve =>
setTimeout(resolve, BUCKET_METADATA_UPDATE_WAIT_TIME)
);
});

it('should get access controls', async () => {
Expand Down Expand Up @@ -296,6 +296,9 @@ describe('storage', () => {
entity: 'allUsers',
role: 'READER',
});
await new Promise(resolve =>
setTimeout(resolve, BUCKET_METADATA_UPDATE_WAIT_TIME)
);
await bucket.acl.delete({entity: 'allUsers'});
});

Expand All @@ -319,6 +322,9 @@ describe('storage', () => {
it('should make a bucket private', async () => {
try {
await bucket.makePublic();
await new Promise(resolve =>
setTimeout(resolve, BUCKET_METADATA_UPDATE_WAIT_TIME)
);
await bucket.makePrivate();
assert.rejects(bucket.acl.get({entity: 'allUsers'}), err => {
assert.strictEqual((err as ApiError).code, 404);
Expand Down Expand Up @@ -1727,40 +1733,31 @@ describe('storage', () => {
//
// - file.save()
// -> file.createWriteStream()
before(() => {
before(async () => {
file = bucketNonAllowList.file(generateName());

return bucket
.enableRequesterPays()
.then(() => bucket.iam.getPolicy())
.then(data => {
const policy = data[0];

// Allow an absolute or relative path (from project root)
// for the key file.
let key2 = process.env.GCN_STORAGE_2ND_PROJECT_KEY;
if (key2 && key2.charAt(0) === '.') {
key2 = `${getDirName()}/../../../${key2}`;
}

// Get the service account for the "second" account (the
// one that will read the requester pays file).
const clientEmail = JSON.parse(
fs.readFileSync(key2!, 'utf-8')
).client_email;

policy.bindings.push({
role: 'roles/storage.admin',
members: [`serviceAccount:${clientEmail}`],
});

return bucket.iam.setPolicy(policy);
})
.then(() => file.save('abc', USER_PROJECT_OPTIONS))
.then(() => topic.getMetadata())
.then(data => {
topicName = data[0].name!;
});
await bucket.enableRequesterPays();
const data = await bucket.iam.getPolicy();
const policy = data[0];
// Allow an absolute or relative path (from project root)
// for the key file.
let key2 = process.env.GCN_STORAGE_2ND_PROJECT_KEY;
if (key2 && key2.charAt(0) === '.') {
key2 = `${getDirName()}/../../../${key2}`;
}
// Get the service account for the "second" account (the
// one that will read the requester pays file).
const clientEmail = JSON.parse(
fs.readFileSync(key2!, 'utf-8')
).client_email;
policy.bindings.push({
role: 'roles/storage.admin',
members: [`serviceAccount:${clientEmail}`],
});
await bucket.iam.setPolicy(policy);
await file.save('abc', USER_PROJECT_OPTIONS);
const data_2 = await topic.getMetadata();
topicName = data_2[0].name!;
});

// This acts as a test for the following methods:
Expand Down Expand Up @@ -1788,7 +1785,7 @@ describe('storage', () => {
type requesterPaysFunction<
T = {} | typeof USER_PROJECT_OPTIONS,
R = {} | void,
> = (options: T) => Promise<R>;
> = (options?: T) => Promise<R>;

/**
* Accepts a function and runs 2 tests - a test where the requester pays
Expand All @@ -1805,20 +1802,15 @@ describe('storage', () => {
const failureMessage =
'Bucket is a requester pays bucket but no user project provided.';

let expectedError: unknown = null;

try {
// Should raise an error on requester pays bucket
await testFunction({});
} catch (e) {
expectedError = e;
}

assert(expectedError instanceof Error);
assert(
expectedError.message.includes(failureMessage),
`Expected '${expectedError.message}' to include '${failureMessage}'`
);
await assert.rejects(testFunction(), err => {
assert(
(err as Error).message.includes(failureMessage),
`Expected '${
(err as Error).message
}' to include '${failureMessage}'`
);
return true;
});

// Validate the desired functionality
const results = await testFunction(USER_PROJECT_OPTIONS);
Expand Down

0 comments on commit 86d0b81

Please sign in to comment.