Fix bucket limit (#5216)

* Fix bucket limit

* Trigger tuist workflows when cloud is changed

---------

Co-authored-by: Daniele Formichelli <df@bendingspoons.com>
This commit is contained in:
Marek Fořt 2023-06-17 13:07:12 +02:00 committed by GitHub
parent 202cf15eae
commit 2c944afa50
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 977 additions and 755 deletions

View File

@ -18,6 +18,7 @@ on:
- projects/tuist/fixtures/**
- .package.resolved
- projects/fourier/**
- projects/cloud/**
- .github/workflows/tuist.yml
concurrency:

View File

@ -16,7 +16,7 @@ import DashboardPage from './pages/dashboard/DashboardPage';
import CommandEventDetailPage from './pages/commandEventDetail/CommandEventDetailPage';
import Home from './Home';
import { useMeQuery } from '@/graphql/types';
import RemoteCachePage from './pages/remote-cache/RemoteCachePage';
import { RemoteCachePage } from './pages/remote-cache/RemoteCachePage';
import OrganizationPage from './pages/organization/OrganizationPage';
import TuistCloudAppProvider from './TuistCloudAppProvider';

View File

@ -16,14 +16,17 @@ import {
Link,
Text,
Banner,
Modal,
ContextualSaveBar,
} from '@shopify/polaris';
import RemoteCachePageStore from './RemoteCachePageStore';
import { observer } from 'mobx-react-lite';
import { useApolloClient } from '@apollo/client';
import { HomeStoreContext } from '@/stores/HomeStore';
import { runInAction } from 'mobx';
import { CreateBucketModal, EditBucketForm } from './components';
const RemoteCachePage = observer(() => {
export const RemoteCachePage = observer(() => {
const client = useApolloClient();
const { projectStore } = useContext(HomeStoreContext);
const [remoteCachePageStore] = useState(
@ -34,62 +37,6 @@ const RemoteCachePage = observer(() => {
remoteCachePageStore.load();
}, [projectStore.project]);
const handleSelectChange = useCallback(
(newValue) => {
remoteCachePageStore.handleSelectOption(newValue);
},
[remoteCachePageStore],
);
const handleBucketNameChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.bucketName = newValue;
});
},
[remoteCachePageStore],
);
const handleRegionChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.region = newValue;
});
},
[remoteCachePageStore],
);
const handleAccessKeyIdChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.accessKeyId = newValue;
});
},
[remoteCachePageStore],
);
const handleSecretAccessKeyChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.secretAccessKey = newValue;
});
},
[remoteCachePageStore],
);
const handleRemoveSecretAccessKey = useCallback(() => {
remoteCachePageStore.removeAccessKey();
}, [remoteCachePageStore]);
const handleApplyChangesClicked = useCallback(() => {
if (projectStore.project == undefined) {
return;
}
remoteCachePageStore.applyChangesButtonClicked(
projectStore.project.account.id,
);
}, [remoteCachePageStore, projectStore]);
const clearCacheError =
remoteCachePageStore.remoteCacheStorageCleanError ? (
<Banner
@ -104,97 +51,61 @@ const RemoteCachePage = observer(() => {
</Banner>
) : null;
const handleSelectChange = useCallback(
(newValue) => {
remoteCachePageStore.handleSelectOption(newValue);
},
[remoteCachePageStore],
);
const bucket = remoteCachePageStore.isDefaultBucket ? (
<Text variant="bodyMd" color="subdued" as="p">
Default bucket created by Tuist Cloud. You can configure your
own if you want to own the data.
</Text>
) : (
<EditBucketForm
remoteCachePageStore={remoteCachePageStore}
projectStore={projectStore}
/>
);
return (
<Page title="Remote Cache">
<Card title="S3 Bucket">
<CreateBucketModal
onClose={() => {
runInAction(() => {
remoteCachePageStore.isCreatingBucket = false;
});
}}
onCreateBucket={(bucket) => {
remoteCachePageStore.bucketCreated(bucket);
}}
open={remoteCachePageStore.isCreatingBucket}
/>
<Card
title="S3 Bucket"
actions={[
{
content: 'Create new bucket',
onAction: () => {
runInAction(() => {
remoteCachePageStore.isCreatingBucket = true;
});
},
},
]}
>
<Card.Section>
<FormLayout>
<Stack vertical>
<Select
label="Current"
options={remoteCachePageStore.bucketOptions}
onChange={handleSelectChange}
value={remoteCachePageStore.selectedOption}
/>
{!remoteCachePageStore.isDefaultBucket && (
<TextField
type="text"
label="Bucket name"
value={remoteCachePageStore.bucketName}
onChange={handleBucketNameChange}
autoComplete="off"
/>
)}
{!remoteCachePageStore.isDefaultBucket && (
<TextField
type="text"
label="Region"
value={remoteCachePageStore.region}
onChange={handleRegionChange}
autoComplete="off"
/>
)}
{!remoteCachePageStore.isDefaultBucket && (
<TextField
type="text"
label="Access key ID"
value={remoteCachePageStore.accessKeyId}
onChange={handleAccessKeyIdChange}
autoComplete="off"
/>
)}
{!remoteCachePageStore.isDefaultBucket && (
<Stack alignment="trailing" distribution="fill">
<TextField
disabled={
remoteCachePageStore.isSecretAccessKeyTextFieldDisabled
}
type="password"
label="Secret access key"
value={remoteCachePageStore.secretAccessKey}
onChange={handleSecretAccessKeyChange}
autoComplete="password"
/>
{remoteCachePageStore.isCreatingBucket === false && (
<Button onClick={handleRemoveSecretAccessKey}>
Remove access key
</Button>
)}
</Stack>
)}
{!remoteCachePageStore.isDefaultBucket && (
<Button
primary
loading={
remoteCachePageStore.isApplyChangesButtonLoading
}
disabled={
remoteCachePageStore.isApplyChangesButtonDisabled
}
onClick={handleApplyChangesClicked}
>
{remoteCachePageStore.isCreatingBucket
? 'Create bucket'
: 'Edit bucket'}
</Button>
)}
{remoteCachePageStore.isDefaultBucket && (
<Text variant="bodyMd" color="subdued" as="p">
Default bucket created by Tuist Cloud. You can
configure your own if you want to own the data.
</Text>
)}
{!remoteCachePageStore.isDefaultBucket && (
<FooterHelp>
Learn more about getting{' '}
<Link
external={true}
url="https://docs.aws.amazon.com/powershell/latest/userguide/pstools-appendix-sign-up.html"
>
access key to your bucket
</Link>
</FooterHelp>
)}
</FormLayout>
{bucket}
</Stack>
</Card.Section>
<Card.Section title="Clear cache">
<Stack spacing="tight" vertical>
@ -236,5 +147,3 @@ const RemoteCachePage = observer(() => {
</Page>
);
});
export default RemoteCachePage;

View File

@ -1,8 +1,6 @@
import {
ChangeRemoteCacheStorageDocument,
ChangeRemoteCacheStorageMutation,
CreateS3BucketDocument,
CreateS3BucketMutation,
S3BucketsDocument,
S3BucketsQuery,
UpdateS3BucketMutation,
@ -22,12 +20,12 @@ class RemoteCachePageStore {
accessKeyId = '';
secretAccessKey = '';
region = '';
isDefaultBucket = false;
s3Buckets: S3Bucket[] = [];
isApplyChangesButtonLoading = false;
isCopyProjectButtonLoading = false;
isRemoteCacheStorageCleanLoading = false;
remoteCacheStorageCleanError: string | null = null;
isCreatingBucket = false;
client: ApolloClient<object>;
projectStore: ProjectStore;
@ -41,28 +39,29 @@ class RemoteCachePageStore {
makeAutoObservable(this);
}
get isDefaultBucket() {
return this.projectStore.project?.remoteCacheStorage == null;
}
get isSecretAccessKeyTextFieldDisabled(): boolean {
if (this.projectStore.project == null) {
return true;
}
return (
this.selectedOption !== 'new' &&
this.secretAccessKey ===
this.projectStore.project.remoteCacheStorage?.secretAccessKey
this.projectStore.project.remoteCacheStorage?.secretAccessKey
);
}
get bucketOptions(): SelectOption[] {
return [
{
label: 'Create new bucket',
value: 'new',
label: 'Default bucket',
value: 'default',
},
...this.s3Buckets.map((s3Bucket) => {
return {
label: s3Bucket.isDefault
? 'Default bucket'
: s3Bucket.name,
label: s3Bucket.name,
value: s3Bucket.name,
};
}),
@ -74,7 +73,7 @@ class RemoteCachePageStore {
this.projectStore.project == null ||
this.projectStore.project.remoteCacheStorage == null
) {
return 'new';
return 'default';
}
return this.projectStore.project.remoteCacheStorage.name;
}
@ -96,7 +95,7 @@ class RemoteCachePageStore {
async clearCache() {
this.remoteCacheStorageCleanError = null;
if (!this.projectStore.project?.remoteCacheStorage) {
if (!this.projectStore.project) {
return;
}
this.isRemoteCacheStorageCleanLoading = true;
@ -106,7 +105,7 @@ class RemoteCachePageStore {
mutation: ClearRemoteCacheStorageDocument,
variables: {
input: {
id: this.projectStore.project.remoteCacheStorage.id,
projectSlug: this.projectStore.project.slug,
},
},
});
@ -130,17 +129,14 @@ class RemoteCachePageStore {
}
async changeRemoteCacheStorage() {
if (
this.projectStore.project == null ||
this.projectStore.project.remoteCacheStorage == null
) {
if (this.projectStore.project == null) {
return;
}
await this.client.mutate<ChangeRemoteCacheStorageMutation>({
mutation: ChangeRemoteCacheStorageDocument,
variables: {
input: {
id: this.projectStore.project.remoteCacheStorage.id,
id: this.projectStore.project.remoteCacheStorage?.id,
projectId: this.projectStore.project.id,
},
},
@ -151,18 +147,11 @@ class RemoteCachePageStore {
if (this.projectStore.project == null) {
return;
}
if (option == 'new') {
this.projectStore.project.remoteCacheStorage = null;
this.bucketName = '';
this.accessKeyId = '';
this.secretAccessKey = '';
this.region = '';
this.isDefaultBucket = false;
}
const s3Bucket = this.s3Buckets.find(
(s3Bucket) => s3Bucket.name === option,
);
this.projectStore.project.remoteCacheStorage = s3Bucket ?? null;
this.changeRemoteCacheStorage();
if (s3Bucket == null) {
return;
}
@ -170,8 +159,6 @@ class RemoteCachePageStore {
this.accessKeyId = s3Bucket.accessKeyId;
this.secretAccessKey = s3Bucket.secretAccessKey;
this.region = s3Bucket.region;
this.isDefaultBucket = s3Bucket.isDefault;
this.changeRemoteCacheStorage();
}
get isApplyChangesButtonDisabled() {
@ -183,10 +170,6 @@ class RemoteCachePageStore {
);
}
get isCreatingBucket() {
return this.selectedOption === 'new';
}
async load() {
if (this.projectStore.project == null) {
return;
@ -212,86 +195,63 @@ class RemoteCachePageStore {
this.region = '';
return;
}
const {
name,
accessKeyId,
secretAccessKey,
region,
isDefault,
} = this.projectStore.project.remoteCacheStorage;
const { name, accessKeyId, secretAccessKey, region } =
this.projectStore.project.remoteCacheStorage;
this.bucketName = name;
this.accessKeyId = accessKeyId;
this.secretAccessKey = secretAccessKey;
this.region = region;
this.isDefaultBucket = isDefault;
});
}
bucketCreated(bucket: S3Bucket) {
this.s3Buckets.push(bucket);
this.isCreatingBucket = false;
this.bucketName = bucket.name;
this.accessKeyId = bucket.accessKeyId;
this.secretAccessKey = bucket.secretAccessKey;
this.region = bucket.region;
}
async applyChangesButtonClicked(accountId: string) {
this.isApplyChangesButtonLoading = true;
if (this.isCreatingBucket) {
const { data } =
await this.client.mutate<CreateS3BucketMutation>({
mutation: CreateS3BucketDocument,
variables: {
input: {
name: this.bucketName,
accessKeyId: this.accessKeyId,
secretAccessKey: this.secretAccessKey,
region: this.region,
accountId,
},
},
});
if (this.projectStore.project == null || data == null) {
return;
}
const s3Bucket = mapS3Bucket(data.createS3Bucket);
runInAction(() => {
this.isApplyChangesButtonLoading = false;
if (this.projectStore.project != null) {
this.projectStore.project.remoteCacheStorage = s3Bucket;
}
this.s3Buckets.push(s3Bucket);
});
} else {
if (this.projectStore.project?.remoteCacheStorage == null) {
return;
}
const { data } =
await this.client.mutate<UpdateS3BucketMutation>({
mutation: UpdateS3BucketDocument,
variables: {
input: {
id: this.projectStore.project.remoteCacheStorage.id,
name: this.bucketName,
accessKeyId: this.accessKeyId,
secretAccessKey: this.secretAccessKey,
region: this.region,
},
},
});
if (data == null) {
return;
}
const s3Bucket = mapS3Bucket(data.updateS3Bucket);
runInAction(() => {
this.isApplyChangesButtonLoading = false;
if (
this.projectStore.project == null ||
this.projectStore.project.remoteCacheStorage == null
) {
return;
}
const previousId =
this.projectStore.project.remoteCacheStorage.id;
this.s3Buckets = this.s3Buckets.filter(
(bucket) => bucket.id !== previousId,
);
this.s3Buckets.push(s3Bucket);
this.projectStore.project.remoteCacheStorage = s3Bucket;
});
if (this.projectStore.project?.remoteCacheStorage == null) {
return;
}
const { data } = await this.client.mutate<UpdateS3BucketMutation>(
{
mutation: UpdateS3BucketDocument,
variables: {
input: {
id: this.projectStore.project.remoteCacheStorage.id,
name: this.bucketName,
accessKeyId: this.accessKeyId,
secretAccessKey: this.secretAccessKey,
region: this.region,
},
},
},
);
if (data == null) {
return;
}
const s3Bucket = mapS3Bucket(data.updateS3Bucket);
runInAction(() => {
this.isApplyChangesButtonLoading = false;
if (
this.projectStore.project == null ||
this.projectStore.project.remoteCacheStorage == null
) {
return;
}
const previousId =
this.projectStore.project.remoteCacheStorage.id;
this.s3Buckets = this.s3Buckets.filter(
(bucket) => bucket.id !== previousId,
);
this.s3Buckets.push(s3Bucket);
this.projectStore.project.remoteCacheStorage = s3Bucket;
});
}
}

View File

@ -1,6 +1,6 @@
import ProjectStore from '@/stores/ProjectStore';
import RemoteCachePageStore from '../RemoteCachePageStore';
import { S3Bucket } from '@/models';
import { S3Bucket, Project } from '@/models';
import { S3BucketInfoFragment } from '@/graphql/types';
import { copyToClipboard } from '@/utilities/copyToClipboard';
@ -9,15 +9,17 @@ jest.mock('@/stores/ProjectStore');
jest.mock('@/utilities/copyToClipboard');
describe('RemoteCachePageStore', () => {
const client = {
query: jest.fn(),
mutate: jest.fn(),
} as any;
const projectStore = {} as ProjectStore;
let projectStore: ProjectStore;
let client: any;
let mockProject: Project;
beforeEach(() => {
jest.clearAllMocks();
projectStore.project = {
client = {
query: jest.fn(),
mutate: jest.fn(),
} as any;
mockProject = {
id: 'project',
account: {
id: 'account-id',
@ -32,6 +34,9 @@ describe('RemoteCachePageStore', () => {
name: 'project',
slug: 'org/project',
};
projectStore = {
project: mockProject,
} as ProjectStore;
});
it('keeps apply changes button disabled when not all fields are filled', () => {
@ -70,7 +75,7 @@ describe('RemoteCachePageStore', () => {
).toBeFalsy();
});
it('returns new as selected option when remoteCacheStorage is null', () => {
it('isDefaultBucket when remoteCacheStorage is null', () => {
// Given
projectStore.project = {
id: 'project',
@ -95,19 +100,63 @@ describe('RemoteCachePageStore', () => {
);
// Then
expect(remoteCachePageStore.selectedOption).toEqual('new');
expect(remoteCachePageStore.isCreatingBucket).toBeTruthy();
expect(remoteCachePageStore.isDefaultBucket).toBe(true);
expect(remoteCachePageStore.selectedOption).toBe('default');
expect(remoteCachePageStore.isCreatingBucket).toBe(false);
});
it('creates a new bucket', async () => {
// Given
const remoteCachePageStore = new RemoteCachePageStore(
client,
projectStore,
);
client.mutate.mockReturnValueOnce({
data: {
createS3Bucket: {
accessKeyId: 'access-key-id',
accountId: 'account-id',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
__typename: 'S3Bucket',
},
},
});
const expectedS3Bucket: S3Bucket = {
accessKeyId: 'access-key-id',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
};
// When
remoteCachePageStore.projectStore.project = {
...mockProject,
remoteCacheStorage: expectedS3Bucket,
};
remoteCachePageStore.bucketCreated(expectedS3Bucket);
// Then
expect(remoteCachePageStore.s3Buckets).toEqual([
expectedS3Bucket,
]);
expect(remoteCachePageStore.selectedOption).toEqual('S3 bucket');
});
it('returns remote cache storage name if it is set in the project', () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'accessKeyId',
id: 'id',
name: 'bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
projectStore.project = {
...mockProject,
remoteCacheStorage: {
accessKeyId: 'accessKeyId',
id: 'id',
name: 'bucket',
secretAccessKey: 'secret',
region: 'region',
},
};
// When
@ -120,82 +169,6 @@ describe('RemoteCachePageStore', () => {
expect(remoteCachePageStore.selectedOption).toEqual('bucket');
});
it('changes isDefaultBucket to false when going from a default bucket to creating a new one', async () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'accessKeyId',
id: 'id',
name: 'bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: true,
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
projectStore,
);
client.query.mockResolvedValueOnce({
data: {
s3Buckets: [
{
accessKeyId: 'key-id-1',
accountId: 'account-id-1',
id: 'id',
name: 'S3 bucket one',
region: 'region',
isDefault: true,
__typename: 'S3Bucket',
},
] as S3BucketInfoFragment[],
},
});
await remoteCachePageStore.load();
// When
remoteCachePageStore.handleSelectOption('new');
// Then
expect(remoteCachePageStore.isDefaultBucket).toEqual(false);
});
it('sets isDefaultBucket to true', async () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'accessKeyId',
id: 'id',
name: 'bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: true,
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
projectStore,
);
client.query.mockResolvedValueOnce({
data: {
s3Buckets: [
{
accessKeyId: 'key-id-1',
accountId: 'account-id-1',
id: 'id',
name: 'S3 bucket one',
region: 'region',
isDefault: true,
__typename: 'S3Bucket',
},
] as S3BucketInfoFragment[],
},
});
// When
await remoteCachePageStore.load();
// Then
expect(remoteCachePageStore.isDefaultBucket).toEqual(true);
});
it('copy pastes project token of the remote cache', () => {
// Given
projectStore.project = {
@ -234,15 +207,47 @@ describe('RemoteCachePageStore', () => {
);
});
it('updates bucket after option is selected', async () => {
// Given
projectStore.project = {
...mockProject,
remoteCacheStorage: {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
},
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
projectStore,
);
expect(remoteCachePageStore.selectedOption).toBe('S3 bucket');
// When
remoteCachePageStore.handleSelectOption('default');
// Then
expect(remoteCachePageStore.selectedOption).toBe('default');
expect(client.mutate).toHaveBeenCalledWith({
variables: {
input: { projectId: 'project' },
},
});
});
it('loads remote cache page', async () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket one',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
projectStore.project = {
...mockProject,
remoteCacheStorage: {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket one',
secretAccessKey: 'secret',
region: 'region',
},
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
@ -296,8 +301,8 @@ describe('RemoteCachePageStore', () => {
]);
expect(remoteCachePageStore.bucketOptions).toEqual([
{
label: 'Create new bucket',
value: 'new',
label: 'Default bucket',
value: 'default',
},
{
label: 'S3 bucket one',
@ -312,13 +317,15 @@ describe('RemoteCachePageStore', () => {
it('resets fields when changing reloading and project has no remote cache storage', async () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket one',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
projectStore.project = {
...mockProject,
remoteCacheStorage: {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket one',
secretAccessKey: 'secret',
region: 'region',
},
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
@ -339,8 +346,10 @@ describe('RemoteCachePageStore', () => {
},
});
await remoteCachePageStore.load();
remoteCachePageStore.projectStore.project!.remoteCacheStorage =
null;
remoteCachePageStore.projectStore.project = {
...mockProject,
remoteCacheStorage: null,
};
client.query.mockResolvedValueOnce({
data: {
s3Buckets: [],
@ -357,68 +366,23 @@ describe('RemoteCachePageStore', () => {
expect(remoteCachePageStore.s3Buckets).toEqual([]);
expect(remoteCachePageStore.bucketOptions).toEqual([
{
label: 'Create new bucket',
value: 'new',
label: 'Default bucket',
value: 'default',
},
]);
});
it('creates a new bucket', async () => {
// Given
const remoteCachePageStore = new RemoteCachePageStore(
client,
projectStore,
);
remoteCachePageStore.bucketName = 'S3 bucket';
remoteCachePageStore.secretAccessKey = 'secret';
remoteCachePageStore.accessKeyId = 'access-key-id';
remoteCachePageStore.region = 'region';
client.mutate.mockReturnValueOnce({
data: {
createS3Bucket: {
accessKeyId: 'access-key-id',
accountId: 'account-id',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
__typename: 'S3Bucket',
},
},
});
const expectedS3Bucket: S3Bucket = {
accessKeyId: 'access-key-id',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
};
// When
await remoteCachePageStore.applyChangesButtonClicked(
'account-id',
);
// Then
expect(
remoteCachePageStore.projectStore.project?.remoteCacheStorage,
).toEqual(expectedS3Bucket);
expect(remoteCachePageStore.s3Buckets).toEqual([
expectedS3Bucket,
]);
});
it('updates the current bucket', async () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
projectStore.project = {
...mockProject,
remoteCacheStorage: {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
},
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
@ -430,7 +394,6 @@ describe('RemoteCachePageStore', () => {
name: 'new name',
secretAccessKey: 'new secret',
region: 'region',
isDefault: false,
};
client.mutate.mockReturnValueOnce({
data: {
@ -441,7 +404,6 @@ describe('RemoteCachePageStore', () => {
name: expectedS3Bucket.name,
secretAccessKey: expectedS3Bucket.secretAccessKey,
region: expectedS3Bucket.region,
isDefault: expectedS3Bucket.isDefault,
__typename: 'S3Bucket',
},
},
@ -482,13 +444,15 @@ describe('RemoteCachePageStore', () => {
it('clears remote cache', async () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
projectStore.project = {
...mockProject,
remoteCacheStorage: {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
},
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
@ -505,15 +469,42 @@ describe('RemoteCachePageStore', () => {
expect(client.mutate).toHaveBeenCalled();
});
it('clears remote cache with default bucket', async () => {
// Given
projectStore.project = {
...mockProject,
remoteCacheStorage: null,
};
const remoteCachePageStore = new RemoteCachePageStore(
client,
projectStore,
);
client.mutate.mockReturnValueOnce({
data: {},
});
// When
await remoteCachePageStore.clearCache();
// Then
expect(client.mutate).toHaveBeenCalledWith({
variables: {
input: { projectSlug: 'org/project' },
},
});
});
it('sets a remote cache storage clear error', async () => {
// Given
projectStore.project!.remoteCacheStorage = {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
isDefault: false,
projectStore.project = {
...mockProject,
remoteCacheStorage: {
accessKeyId: 'key-id-1',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
},
};
const remoteCachePageStore = new RemoteCachePageStore(
client,

View File

@ -0,0 +1,99 @@
import { useApolloClient } from '@apollo/client';
import {
Button,
FormLayout,
Modal,
ModalProps,
Page,
Stack,
TextField,
} from '@shopify/polaris';
import { observer } from 'mobx-react-lite';
import React, { useContext, useState } from 'react';
import { CreateBucketStore } from './CreateBucketStore';
import { runInAction } from 'mobx';
import { S3Bucket } from '@/models';
import { HomeStoreContext } from '@/stores/HomeStore';
interface Props extends Pick<ModalProps, 'onClose' | 'open'> {
onCreateBucket: (bucket: S3Bucket) => void;
}
export const CreateBucketModal = observer(
({ onClose, open, onCreateBucket }: Props) => {
const client = useApolloClient();
const { projectStore } = useContext(HomeStoreContext);
const [createBucketStore] = useState(
() => new CreateBucketStore(client, projectStore),
);
return (
<Modal title="Create new bucket" onClose={onClose} open={open}>
<Page>
<FormLayout>
<TextField
type="text"
label="Bucket name"
value={createBucketStore.bucketName}
onChange={(bucketName) => {
runInAction(() => {
createBucketStore.bucketName = bucketName;
});
}}
autoComplete="off"
/>
<TextField
type="text"
label="Region"
value={createBucketStore.region}
onChange={(region) => {
runInAction(() => {
createBucketStore.region = region;
});
}}
autoComplete="off"
/>
<TextField
type="text"
label="Access key ID"
value={createBucketStore.accessKeyId}
onChange={(accessKeyId) => {
runInAction(() => {
createBucketStore.accessKeyId = accessKeyId;
});
}}
autoComplete="off"
/>
<Stack alignment="trailing" distribution="fill">
<TextField
type="password"
label="Secret access key"
value={createBucketStore.secretAccessKey}
onChange={(secretAccessKey) => {
runInAction(() => {
createBucketStore.secretAccessKey =
secretAccessKey;
});
}}
autoComplete="password"
/>
</Stack>
<Button
primary
loading={createBucketStore.saving}
disabled={createBucketStore.isCreateButtonDisabled}
onClick={async () => {
const bucket = await createBucketStore.createBucket();
if (bucket) {
onCreateBucket(bucket);
}
}}
>
Create bucket
</Button>
</FormLayout>
</Page>
</Modal>
);
},
);

View File

@ -0,0 +1,76 @@
import {
CreateS3BucketDocument,
CreateS3BucketMutation,
} from '@/graphql/types';
import { mapS3Bucket } from '@/models';
import ProjectStore from '@/stores/ProjectStore';
import { ApolloClient } from '@apollo/client';
import { makeAutoObservable, runInAction } from 'mobx';
export class CreateBucketStore {
bucketName: string = '';
accessKeyId = '';
secretAccessKey = '';
region = '';
saving = false;
client: ApolloClient<object>;
projectStore: ProjectStore;
constructor(
client: ApolloClient<object>,
projectStore: ProjectStore,
) {
this.client = client;
this.projectStore = projectStore;
makeAutoObservable(this);
}
get isCreateButtonDisabled() {
return (
this.bucketName.length === 0 ||
this.accessKeyId.length === 0 ||
this.secretAccessKey.length === 0 ||
this.region.length === 0
);
}
async createBucket() {
runInAction(() => {
this.saving = true;
});
if (this.projectStore.project == null) {
return;
}
const { data } = await this.client.mutate<CreateS3BucketMutation>(
{
mutation: CreateS3BucketDocument,
variables: {
input: {
name: this.bucketName,
accessKeyId: this.accessKeyId,
secretAccessKey: this.secretAccessKey,
region: this.region,
accountId: this.projectStore.project.account.id,
},
},
},
);
if (data == null) {
runInAction(() => {
this.saving = false;
});
return;
}
const s3Bucket = mapS3Bucket(data.createS3Bucket);
runInAction(() => {
this.saving = false;
if (this.projectStore.project != null) {
this.projectStore.project.remoteCacheStorage = s3Bucket;
}
});
return s3Bucket;
}
}

View File

@ -0,0 +1,74 @@
import ProjectStore from '@/stores/ProjectStore';
import { CreateBucketStore } from '../CreateBucketStore';
import { S3Bucket } from '@/models';
jest.mock('@apollo/client');
jest.mock('@/stores/ProjectStore');
describe('CreateBucketStore', () => {
const client = {
query: jest.fn(),
mutate: jest.fn(),
} as any;
const projectStore = {} as ProjectStore;
beforeEach(() => {
jest.clearAllMocks();
projectStore.project = {
id: 'project',
account: {
id: 'account-id',
name: 'acount-name',
owner: {
id: 'owner',
type: 'organization',
},
},
remoteCacheStorage: null,
token: '',
name: 'project',
slug: 'org/project',
};
});
it('creates a new bucket', async () => {
// Given
const createBucketStore = new CreateBucketStore(
client,
projectStore,
);
createBucketStore.bucketName = 'S3 bucket';
createBucketStore.secretAccessKey = 'secret';
createBucketStore.accessKeyId = 'access-key-id';
createBucketStore.region = 'region';
client.mutate.mockReturnValueOnce({
data: {
createS3Bucket: {
accessKeyId: 'access-key-id',
accountId: 'account-id',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
__typename: 'S3Bucket',
},
},
});
const expectedS3Bucket: S3Bucket = {
accessKeyId: 'access-key-id',
id: 'id-1',
name: 'S3 bucket',
secretAccessKey: 'secret',
region: 'region',
};
// When
const got = await createBucketStore.createBucket();
// Then
expect(
createBucketStore.projectStore.project?.remoteCacheStorage,
).toEqual(expectedS3Bucket);
expect(got).toEqual(expectedS3Bucket);
});
});

View File

@ -0,0 +1 @@
export { CreateBucketModal } from './CreateBucketModal';

View File

@ -0,0 +1,133 @@
import ProjectStore from '@/stores/ProjectStore';
import RemoteCachePageStore from '../RemoteCachePageStore';
import { useCallback } from 'react';
import { runInAction } from 'mobx';
import {
Button,
FooterHelp,
FormLayout,
Link,
Stack,
TextField,
Text,
} from '@shopify/polaris';
import React from 'react';
import { observer } from 'mobx-react-lite';
interface EditBucketProps {
remoteCachePageStore: RemoteCachePageStore;
projectStore: ProjectStore;
}
export const EditBucketForm = observer(
({ remoteCachePageStore, projectStore }: EditBucketProps) => {
const handleBucketNameChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.bucketName = newValue;
});
},
[remoteCachePageStore],
);
const handleRegionChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.region = newValue;
});
},
[remoteCachePageStore],
);
const handleAccessKeyIdChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.accessKeyId = newValue;
});
},
[remoteCachePageStore],
);
const handleSecretAccessKeyChange = useCallback(
(newValue) => {
runInAction(() => {
remoteCachePageStore.secretAccessKey = newValue;
});
},
[remoteCachePageStore],
);
const handleRemoveSecretAccessKey = useCallback(() => {
remoteCachePageStore.removeAccessKey();
}, [remoteCachePageStore]);
const handleApplyChangesClicked = useCallback(() => {
if (projectStore.project == undefined) {
return;
}
remoteCachePageStore.applyChangesButtonClicked(
projectStore.project.account.id,
);
}, [remoteCachePageStore, projectStore]);
return (
<FormLayout>
<TextField
type="text"
label="Bucket name"
value={remoteCachePageStore.bucketName}
onChange={handleBucketNameChange}
autoComplete="off"
/>
<TextField
type="text"
label="Region"
value={remoteCachePageStore.region}
onChange={handleRegionChange}
autoComplete="off"
/>
<TextField
type="text"
label="Access key ID"
value={remoteCachePageStore.accessKeyId}
onChange={handleAccessKeyIdChange}
autoComplete="off"
/>
<Stack alignment="trailing" distribution="fill">
<TextField
disabled={
remoteCachePageStore.isSecretAccessKeyTextFieldDisabled
}
type="password"
label="Secret access key"
value={remoteCachePageStore.secretAccessKey}
onChange={handleSecretAccessKeyChange}
autoComplete="password"
/>
{remoteCachePageStore.isCreatingBucket === false && (
<Button onClick={handleRemoveSecretAccessKey}>
Remove access key
</Button>
)}
</Stack>
<Button
primary
loading={remoteCachePageStore.isApplyChangesButtonLoading}
disabled={remoteCachePageStore.isApplyChangesButtonDisabled}
onClick={handleApplyChangesClicked}
>
Edit bucket
</Button>
<FooterHelp>
Learn more about getting{' '}
<Link
external={true}
url="https://docs.aws.amazon.com/powershell/latest/userguide/pstools-appendix-sign-up.html"
>
access key to your bucket
</Link>
</FooterHelp>
</FormLayout>
);
},
);

View File

@ -0,0 +1,2 @@
export { CreateBucketModal } from './CreateBucketModal';
export { EditBucketForm } from './EditBucketForm';

View File

@ -5,5 +5,4 @@ fragment S3BucketInfo on S3Bucket {
secretAccessKey
accountId
region
isDefault
}

View File

@ -49,7 +49,7 @@ export type CancelInviteInput = {
export type ChangeRemoteCacheStorageInput = {
/** A unique identifier for the client performing the mutation. */
clientMutationId?: InputMaybe<Scalars['String']>;
id: Scalars['ID'];
id?: InputMaybe<Scalars['ID']>;
projectId: Scalars['ID'];
};
@ -72,8 +72,7 @@ export type ClearRemoteCacheStorage = {
export type ClearRemoteCacheStorageInput = {
/** A unique identifier for the client performing the mutation. */
clientMutationId?: InputMaybe<Scalars['String']>;
id?: InputMaybe<Scalars['ID']>;
projectSlug?: InputMaybe<Scalars['String']>;
projectSlug: Scalars['String'];
};
export type CommandAverage = {
@ -177,7 +176,7 @@ export type Mutation = {
/** Cancel invite for a user to a given organization */
cancelInvite: Invitation;
/** Change remote cache storage */
changeRemoteCacheStorage: RemoteCacheStorage;
changeRemoteCacheStorage?: Maybe<RemoteCacheStorage>;
/** Change role of a user for a given organization */
changeUserRole: User;
/** Clears the remote cache storage */
@ -403,7 +402,6 @@ export type S3Bucket = {
accessKeyId: Scalars['String'];
accountId: Scalars['ID'];
id: Scalars['ID'];
isDefault: Scalars['Boolean'];
name: Scalars['String'];
region: Scalars['String'];
secretAccessKey?: Maybe<Scalars['String']>;
@ -476,7 +474,7 @@ export type ChangeRemoteCacheStorageMutationVariables = Exact<{
}>;
export type ChangeRemoteCacheStorageMutation = { __typename?: 'Mutation', changeRemoteCacheStorage: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string, isDefault: boolean } };
export type ChangeRemoteCacheStorageMutation = { __typename?: 'Mutation', changeRemoteCacheStorage?: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string } | null };
export type ChangeUserRoleMutationVariables = Exact<{
input: ChangeUserRoleInput;
@ -536,7 +534,7 @@ export type CreateS3BucketMutationVariables = Exact<{
}>;
export type CreateS3BucketMutation = { __typename?: 'Mutation', createS3Bucket: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string, isDefault: boolean } };
export type CreateS3BucketMutation = { __typename?: 'Mutation', createS3Bucket: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string } };
export type DeleteProjectMutationVariables = Exact<{
input: DeleteProjectInput;
@ -583,7 +581,7 @@ export type OrganizationQuery = { __typename?: 'Query', organization?: { __typen
export type PendingInvitationFragment = { __typename?: 'Invitation', inviteeEmail: string, id: string };
export type ProjectDetailFragment = { __typename?: 'Project', id: string, slug: string, name: string, token: string, account: { __typename?: 'Account', id: string, name: string, owner: { __typename?: 'Organization', id: string } | { __typename?: 'User', id: string } }, remoteCacheStorage?: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string, isDefault: boolean } | null };
export type ProjectDetailFragment = { __typename?: 'Project', id: string, slug: string, name: string, token: string, account: { __typename?: 'Account', id: string, name: string, owner: { __typename?: 'Organization', id: string } | { __typename?: 'User', id: string } }, remoteCacheStorage?: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string } | null };
export type ProjectQueryVariables = Exact<{
name: Scalars['String'];
@ -591,7 +589,7 @@ export type ProjectQueryVariables = Exact<{
}>;
export type ProjectQuery = { __typename?: 'Query', project?: { __typename?: 'Project', id: string, slug: string, name: string, token: string, account: { __typename?: 'Account', id: string, name: string, owner: { __typename?: 'Organization', id: string } | { __typename?: 'User', id: string } }, remoteCacheStorage?: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string, isDefault: boolean } | null } | null };
export type ProjectQuery = { __typename?: 'Query', project?: { __typename?: 'Project', id: string, slug: string, name: string, token: string, account: { __typename?: 'Account', id: string, name: string, owner: { __typename?: 'Organization', id: string } | { __typename?: 'User', id: string } }, remoteCacheStorage?: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string } | null } | null };
export type RemoveUserMutationVariables = Exact<{
input: RemoveUserInput;
@ -607,7 +605,7 @@ export type ResendInviteMutationVariables = Exact<{
export type ResendInviteMutation = { __typename?: 'Mutation', resendInvite: { __typename?: 'Invitation', inviteeEmail: string } };
export type S3BucketInfoFragment = { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string, isDefault: boolean };
export type S3BucketInfoFragment = { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string };
export type S3BucketsQueryVariables = Exact<{
accountName: Scalars['String'];
@ -615,7 +613,7 @@ export type S3BucketsQueryVariables = Exact<{
}>;
export type S3BucketsQuery = { __typename?: 'Query', s3Buckets: Array<{ __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string, isDefault: boolean }> };
export type S3BucketsQuery = { __typename?: 'Query', s3Buckets: Array<{ __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string }> };
export type UpdateLastVisitedProjectMutationVariables = Exact<{
input: UpdateLastVisitedProjectInput;
@ -629,7 +627,7 @@ export type UpdateS3BucketMutationVariables = Exact<{
}>;
export type UpdateS3BucketMutation = { __typename?: 'Mutation', updateS3Bucket: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string, isDefault: boolean } };
export type UpdateS3BucketMutation = { __typename?: 'Mutation', updateS3Bucket: { __typename?: 'S3Bucket', id: string, name: string, accessKeyId: string, secretAccessKey?: string | null, accountId: string, region: string } };
export type UserBasicInfoFragment = { __typename?: 'User', id: string, email: string, avatarUrl?: string | null, account: { __typename?: 'Account', name: string } };
@ -686,7 +684,6 @@ export const S3BucketInfoFragmentDoc = gql`
secretAccessKey
accountId
region
isDefault
}
`;
export const ProjectDetailFragmentDoc = gql`

View File

@ -1,5 +1,6 @@
import { ProjectDetailFragment } from '@/graphql/types';
import { Account, mapS3Bucket, S3Bucket } from '.';
import { Account } from './Account';
import { mapS3Bucket, S3Bucket } from './S3Bucket';
export interface Project {
id: string;

View File

@ -6,7 +6,6 @@ export interface S3Bucket {
accessKeyId: string;
secretAccessKey: string;
region: string;
isDefault: boolean;
}
export const mapS3Bucket = (bucketFragment: S3BucketInfoFragment) => {
@ -16,6 +15,5 @@ export const mapS3Bucket = (bucketFragment: S3BucketInfoFragment) => {
accessKeyId: bucketFragment.accessKeyId,
secretAccessKey: bucketFragment.secretAccessKey,
region: bucketFragment.region,
isDefault: bucketFragment.isDefault,
} as S3Bucket;
};

View File

@ -5,3 +5,5 @@ export type { S3Bucket } from './S3Bucket';
export { mapS3Bucket } from './S3Bucket';
export type { OrganizationDetail } from './OrganizationDetail';
export { mapOrganizationDetail } from './OrganizationDetail';
export type { Project } from './Project';
export { mapProject } from './Project';

View File

@ -19,7 +19,7 @@ export default class ProjectStore {
}
async load(name: string, accountName: string) {
const { data } = await this.client.query<ProjectQuery>({
const { data, errors } = await this.client.query<ProjectQuery>({
query: ProjectDocument,
variables: {
name,

View File

@ -2,7 +2,7 @@
module Mutations
class ChangeRemoteCacheStorage < ::Mutations::BaseMutation
argument :id, ID, required: true
argument :id, ID, required: false
argument :project_id, ID, required: true
def resolve(attributes)

View File

@ -2,14 +2,13 @@
module Mutations
class ClearRemoteCacheStorage < ::Mutations::BaseMutation
argument :id, ID, required: false
argument :project_slug, String, required: false
argument :project_slug, String, required: true
type Types::ClearRemoteCacheStorageType
def resolve(attributes)
begin
bucket = S3BucketClearService.call(clearer: context[:current_user], **attributes)
bucket = CacheClearService.call(clearer: context[:current_user], **attributes)
{
bucket: bucket,
errors: [],

View File

@ -9,7 +9,7 @@ module Types
mutation: Mutations::CreateProject
field :clear_remote_cache_storage,
ClearRemoteCacheStorageType,
null: false,
null: true,
description: "Clears the remote cache storage",
mutation: Mutations::ClearRemoteCacheStorage
field :delete_project,
@ -64,7 +64,7 @@ module Types
mutation: Mutations::UpdateS3Bucket
field :change_remote_cache_storage,
RemoteCacheStorageType,
null: false,
null: true,
description: "Change remote cache storage",
mutation: Mutations::ChangeRemoteCacheStorage
end

View File

@ -12,5 +12,15 @@ module Types
def slug
"#{object.account.name}/#{object.name}"
end
def remote_cache_storage
puts "Get remote_cache_storage for #{object.remote_cache_storage}"
if object.remote_cache_storage.is_a?(DefaultS3Bucket)
puts "Return nil"
nil
else
object.remote_cache_storage
end
end
end
end

View File

@ -8,6 +8,5 @@ module Types
field :secret_access_key, String, null: true
field :account_id, ID, null: false
field :region, String, null: false
field :is_default, Boolean, null: false
end
end

View File

@ -14,4 +14,39 @@ class Project < ApplicationRecord
# Validations
validates :name, exclusion: Defaults.fetch(:blocklisted_slug_keywords)
def remote_cache_storage
remote_cache_storage_id = self["remote_cache_storage_id"]
if remote_cache_storage_id.nil?
DefaultS3Bucket.new
else
S3Bucket.find(remote_cache_storage_id)
end
end
end
class DefaultS3Bucket
def name
if Rails.env.production?
"tuist"
else
"tuist-debug"
end
end
def access_key_id
Rails.application.credentials.aws[:access_key_id]
end
def secret_access_key
Rails.application.credentials.aws[:secret_access_key]
end
def region
"eu-west-1"
end
def account_id
nil
end
end

View File

@ -2,6 +2,6 @@
class S3Bucket < ApplicationRecord
# Associations
belongs_to :account, optional: false
belongs_to :account, optional: true
has_many :projects, as: :remote_cache_storage
end

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
class CacheClearService < ApplicationService
module Error
class Unauthorized < CloudError
def message
"You do not have a permission to clear this S3 bucket."
end
end
end
attr_reader :project_slug, :clearer
def initialize(project_slug:, clearer:)
super()
@project_slug = project_slug
@clearer = clearer
end
def call
project = ProjectFetchService.new.fetch_by_slug(
slug: project_slug,
user: clearer,
)
s3_bucket = project.remote_cache_storage
raise Error::Unauthorized.new unless ProjectPolicy.new(clearer, project).update?
s3_client = S3ClientService.call(s3_bucket: s3_bucket)
delete_objects(
s3_client: s3_client,
project: project,
)
if s3_bucket.is_a?(DefaultS3Bucket)
nil
else
s3_bucket
end
end
def delete_objects(s3_client:, project:, marker: nil)
objects_list = s3_client.list_objects(
bucket: project.remote_cache_storage.name,
prefix: "#{project_slug}/",
)
if objects_list.contents.empty?
return
end
s3_client.delete_objects(
bucket: project.remote_cache_storage.name,
delete: {
objects: objects_list.contents.map { |object| { key: object.key } },
},
marker: marker,
)
if !objects_list.next_marker.nil?
delete_objects(
s3_client: s3_client,
project: project,
marker: objects_list.next_marker,
)
end
end
end

View File

@ -6,27 +6,17 @@ class CacheService < ApplicationService
attr_reader :account_name, :project_name, :project_slug, :hash, :name, :user, :object_key, :project
module Error
class MissingRemoteCacheStorage < CloudError
attr_reader :project_slug
def initialize(project_slug)
@project_slug = project_slug
end
def message
remote_cache_storage_url = URI.join(root_url, "#{project_slug}/remote-cache")
"""
Project #{project_slug} has no remote cache. \
Define your remote cache at the following url: #{remote_cache_storage_url}.
"""
end
end
class S3BucketForbidden < CloudError
def message
"Ensure your secret access key is set correctly, following the instructions here: https://docs.aws.amazon.com/powershell/latest/userguide/pstools-appendix-sign-up.html."
end
end
class Unauthorized < CloudError
def message
"You do not have a permission to clear this S3 bucket."
end
end
end
def initialize(project_slug:, hash:, name:, user:, project:)
@ -36,7 +26,7 @@ Define your remote cache at the following url: #{remote_cache_storage_url}.
@project_name = split_project_slug.last
@project_slug = project_slug
@hash = hash
@object_key = "#{hash}/#{name}"
@object_key = "#{project_slug}/#{hash}/#{name}"
@name = name
@user = user
@project = project
@ -44,10 +34,7 @@ Define your remote cache at the following url: #{remote_cache_storage_url}.
def object_exists?
fetch_project_if_necessary
if project.remote_cache_storage.nil?
raise Error::MissingRemoteCacheStorage.new(project_slug)
end
s3_client = s3_client(s3_bucket: project.remote_cache_storage)
s3_client = S3ClientService.call(s3_bucket: project.remote_cache_storage)
begin
s3_client.head_object(
bucket: project.remote_cache_storage.name,
@ -63,7 +50,7 @@ Define your remote cache at the following url: #{remote_cache_storage_url}.
def fetch
fetch_project_if_necessary
s3_client = s3_client(s3_bucket: project.remote_cache_storage)
s3_client = S3ClientService.call(s3_bucket: project.remote_cache_storage)
signer = Aws::S3::Presigner.new(client: s3_client)
url = signer.presigned_url(
:get_object,
@ -75,7 +62,7 @@ Define your remote cache at the following url: #{remote_cache_storage_url}.
def upload
fetch_project_if_necessary
s3_client = s3_client(s3_bucket: project.remote_cache_storage)
s3_client = S3ClientService.call(s3_bucket: project.remote_cache_storage)
s3_client.put_object(
bucket: project.remote_cache_storage.name,
key: object_key,
@ -91,7 +78,7 @@ Define your remote cache at the following url: #{remote_cache_storage_url}.
def verify_upload
fetch_project_if_necessary
s3_client = s3_client(s3_bucket: project.remote_cache_storage)
s3_client = S3ClientService.call(s3_bucket: project.remote_cache_storage)
object = s3_client.get_object(
bucket: project.remote_cache_storage.name,
key: object_key,
@ -108,17 +95,4 @@ Define your remote cache at the following url: #{remote_cache_storage_url}.
)
end
end
private
def s3_client(s3_bucket:)
secret_access_key = DecipherService.call(
key: Base64.decode64(s3_bucket.secret_access_key),
iv: Base64.decode64(s3_bucket.iv),
)
Aws::S3::Client.new(
region: s3_bucket.region,
access_key_id: s3_bucket.access_key_id,
secret_access_key: secret_access_key,
)
end
end

View File

@ -41,7 +41,7 @@ class ProjectChangeRemoteCacheStorageService < ApplicationService
end
end
def initialize(id:, project_id:, user:)
def initialize(id: nil, project_id:, user:)
super()
@id = id
@project_id = project_id
@ -49,13 +49,15 @@ class ProjectChangeRemoteCacheStorageService < ApplicationService
end
def call
begin
s3_bucket = S3Bucket.find(id)
rescue ActiveRecord::RecordNotFound
raise Error::S3BucketNotFound.new(id)
end
if !id.nil?
begin
s3_bucket = S3Bucket.find(id)
rescue ActiveRecord::RecordNotFound
raise Error::S3BucketNotFound.new(id)
end
raise Error::Unauthorized.new(s3_bucket.account.name) unless AccountPolicy.new(user, s3_bucket.account).update?
raise Error::Unauthorized.new(s3_bucket.account.name) unless AccountPolicy.new(user, s3_bucket.account).update?
end
begin
project = Project.find(project_id)
@ -64,6 +66,10 @@ class ProjectChangeRemoteCacheStorageService < ApplicationService
end
project.update(remote_cache_storage: s3_bucket)
s3_bucket
if id.nil?
nil
else
project.remote_cache_storage
end
end
end

View File

@ -44,32 +44,7 @@ class ProjectCreateService < ApplicationService
token: Devise.friendly_token.first(8),
)
end
create_s3_bucket(project, organization)
project
end
end
def create_s3_bucket(project, organization)
# A prefix is added as the bucket name must be unique across the whole AWS and not just across the tuist one.
s3_bucket_name = "#{SecureRandom.uuid[0...-13]}-#{project.account.name}-#{name}"
s3_bucket = S3BucketCreateService.call(
name: s3_bucket_name,
access_key_id: Rails.application.credentials.aws[:access_key_id],
secret_access_key: Rails.application.credentials.aws[:secret_access_key],
region: "eu-west-1",
account_id: organization.nil? ? account_id : organization.account.id,
default_project: project,
)
project.update(remote_cache_storage: s3_bucket)
s3_client.create_bucket(bucket: s3_bucket_name)
project
end
def s3_client
Aws::S3::Client.new(
region: "eu-west-1",
access_key_id: Rails.application.credentials.aws[:access_key_id],
secret_access_key: Rails.application.credentials.aws[:secret_access_key],
)
end
end

View File

@ -33,17 +33,7 @@ class ProjectDeleteService < ApplicationService
project = ProjectFetchService.new.fetch_by_id(project_id: id, user: deleter)
raise Error::Unauthorized.new unless ProjectPolicy.new(deleter, project).update?
ActiveRecord::Base.transaction do
default_s3_bucket = S3Bucket.find_by(
default_project_id: project.id,
)
if default_s3_bucket != nil
s3_client.delete_bucket(bucket: default_s3_bucket.name)
default_s3_bucket.destroy
end
project.destroy
end
project.destroy
end
def s3_client

View File

@ -1,61 +0,0 @@
# frozen_string_literal: true
class S3BucketClearService < ApplicationService
module Error
class Unauthorized < CloudError
def message
"You do not have a permission to clear this S3 bucket."
end
end
class S3BucketNotFound < CloudError
def message
"S3 bucket was not found. Make sure it exists."
end
end
end
attr_reader :id, :project_slug, :clearer
def initialize(id: nil, project_slug: nil, clearer:)
super()
@id = id
@project_slug = project_slug
@clearer = clearer
end
def call
begin
if id.nil?
project = ProjectFetchService.new.fetch_by_slug(
slug: project_slug,
user: clearer,
)
bucket = project.remote_cache_storage
else
bucket = S3Bucket.find(id)
end
rescue ActiveRecord::RecordNotFound
raise Error::S3BucketNotFound
end
raise Error::Unauthorized.new unless AccountPolicy.new(clearer, bucket.account).update?
s3_bucket = s3_bucket(bucket: bucket)
s3_bucket.clear!
bucket
end
def s3_bucket(bucket:)
secret_access_key = DecipherService.call(
key: Base64.decode64(bucket.secret_access_key),
iv: Base64.decode64(bucket.iv),
)
Aws::S3::Bucket.new(
name: bucket.name,
region: bucket.region,
access_key_id: bucket.access_key_id,
secret_access_key: secret_access_key,
)
end
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
class S3BucketCreateService < ApplicationService
attr_reader :name, :access_key_id, :secret_access_key, :region, :account_id, :default_project
attr_reader :name, :access_key_id, :secret_access_key, :region, :account_id
module Error
class DuplicatedName < CloudError
@ -17,14 +17,13 @@ class S3BucketCreateService < ApplicationService
end
end
def initialize(name:, access_key_id:, secret_access_key:, region:, account_id:, default_project: nil)
def initialize(name:, access_key_id:, secret_access_key:, region:, account_id:)
super()
@name = name
@access_key_id = access_key_id
@secret_access_key = secret_access_key
@region = region
@account_id = account_id
@default_project = default_project
end
def call
@ -38,15 +37,23 @@ class S3BucketCreateService < ApplicationService
iv = cipher.random_iv
encrypted_secret_access_key = cipher.update(secret_access_key) + cipher.final
account = Account.find(account_id)
account.s3_buckets.create!(
name: name,
access_key_id: access_key_id,
secret_access_key: Base64.encode64(encrypted_secret_access_key),
iv: Base64.encode64(iv),
region: region,
is_default: default_project.nil? == false,
default_project_id: !default_project.nil? ? default_project.id : nil,
)
if account_id.nil?
S3Bucket.create!(
name: name,
access_key_id: access_key_id,
secret_access_key: Base64.encode64(encrypted_secret_access_key),
iv: Base64.encode64(iv),
region: region,
)
else
account = Account.find(account_id)
account.s3_buckets.create!(
name: name,
access_key_id: access_key_id,
secret_access_key: Base64.encode64(encrypted_secret_access_key),
iv: Base64.encode64(iv),
region: region,
)
end
end
end

View File

@ -28,8 +28,6 @@ class S3BucketsFetchService < ApplicationService
account = AccountFetchService.call(name: account_name)
raise Error::Unauthorized.new(account_name) unless AccountPolicy.new(user, account).show?
account.s3_buckets.reject { |s3_bucket|
s3_bucket.is_default && !s3_bucket.name.end_with?("#{account_name}-#{project_name}")
}
account.s3_buckets
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class S3ClientService < ApplicationService
attr_reader :s3_bucket
def initialize(s3_bucket:)
super()
@s3_bucket = s3_bucket
end
def call
if s3_bucket.secret_access_key.nil?
secret_access_key = DecipherService.call(
key: Base64.decode64(bucket.secret_access_key),
iv: Base64.decode64(bucket.iv),
)
else
secret_access_key = s3_bucket.secret_access_key
end
Aws::S3::Client.new(
region: s3_bucket.region,
access_key_id: s3_bucket.access_key_id,
secret_access_key: secret_access_key,
)
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class RemoveDefaultProject < ActiveRecord::Migration[7.0]
def change
S3Bucket.where(is_default: true).pluck(:default_project_id)
.filter { |id| id != nil }
.map { |id| Project.where(id: id) }
.each { |project| project.update(remote_cache_storage: nil) }
S3Bucket.where(is_default: true).delete_all
remove_reference(:s3_buckets, :default_project, index: true, foreign_key: false)
remove_column(:s3_buckets, :is_default)
# s3_bucket = S3BucketCreateService.call(
# name: "tuist",
# access_key_id: Rails.application.credentials.aws[:access_key_id],
# secret_access_key: Rails.application.credentials.aws[:secret_access_key],
# region: "eu-west-1",
# account_id: nil,
# )
# project.update(remote_cache_storage: s3_bucket)
# TODO: We should add instructions on how to create a bucket manually for self-hosted sollutions
# s3_client.create_bucket(bucket: )
end
end

View File

@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.0].define(version: 2023_01_15_112317) do
ActiveRecord::Schema[7.0].define(version: 2023_06_01_202210) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@ -95,10 +95,7 @@ ActiveRecord::Schema[7.0].define(version: 2023_01_15_112317) do
t.datetime "updated_at", null: false
t.bigint "account_id", null: false
t.string "region", null: false
t.boolean "is_default", default: false
t.bigint "default_project_id"
t.index ["account_id"], name: "index_s3_buckets_on_account_id"
t.index ["default_project_id"], name: "index_s3_buckets_on_default_project_id"
t.index ["name", "account_id"], name: "index_s3_buckets_on_name_and_account_id", unique: true
end

View File

@ -40,7 +40,7 @@ input ChangeRemoteCacheStorageInput {
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
id: ID!
id: ID
projectId: ID!
}
@ -70,8 +70,7 @@ input ClearRemoteCacheStorageInput {
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
id: ID
projectSlug: String
projectSlug: String!
}
type CommandAverage {
@ -231,7 +230,7 @@ type Mutation {
Parameters for ChangeRemoteCacheStorage
"""
input: ChangeRemoteCacheStorageInput!
): RemoteCacheStorage!
): RemoteCacheStorage
"""
Change role of a user for a given organization
@ -496,7 +495,6 @@ type S3Bucket {
accessKeyId: String!
accountId: ID!
id: ID!
isDefault: Boolean!
name: String!
region: String!
secretAccessKey: String

View File

@ -261,13 +261,9 @@
"name": "id",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
"kind": "SCALAR",
"name": "ID",
"ofType": null
},
"defaultValue": null,
"isDeprecated": false,
@ -436,25 +432,17 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
"description": null,
"type": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
},
"defaultValue": null,
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "projectSlug",
"description": null,
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"defaultValue": null,
"isDeprecated": false,
@ -1444,13 +1432,9 @@
}
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "UNION",
"name": "RemoteCacheStorage",
"ofType": null
}
"kind": "UNION",
"name": "RemoteCacheStorage",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
@ -2846,24 +2830,6 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "isDefault",
"description": null,
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "name",
"description": null,

View File

@ -2,7 +2,22 @@
require "test_helper"
class S3BucketClearServiceTest < ActiveSupport::TestCase
class CacheClearServiceTest < ActiveSupport::TestCase
class ContentsMock
class CacheObject
attr_reader :key
def initialize(key:)
@key = key
end
end
def contents
[CacheObject.new(key: "key")]
end
def next_marker
nil
end
end
setup do
@user = User.create!(email: "test@cloud.tuist.io", password: Devise.friendly_token.first(16))
@s3_bucket = @user.account.s3_buckets.create!(
@ -15,23 +30,10 @@ class S3BucketClearServiceTest < ActiveSupport::TestCase
DecipherService.stubs(:call).returns("decoded secret")
end
test "cache is cleared" do
# Given
Aws::S3::Bucket.any_instance.stubs(:clear!)
# When
got = S3BucketClearService.call(
id: @s3_bucket.id,
clearer: @user,
)
# Then
assert_equal @s3_bucket, got
end
test "cache is cleared with project slug" do
# Given
Aws::S3::Bucket.any_instance.stubs(:clear!)
Aws::S3::Client.any_instance.stubs(:delete_objects)
Aws::S3::Client.any_instance.stubs(:list_objects).returns(ContentsMock.new)
project = Project.create!(
name: "tuist-project",
account_id: @user.account.id,
@ -40,7 +42,7 @@ class S3BucketClearServiceTest < ActiveSupport::TestCase
ProjectFetchService.any_instance.stubs(:fetch_by_slug).returns(project)
# When
got = S3BucketClearService.call(
got = CacheClearService.call(
project_slug: "project/slug",
clearer: @user,
)

View File

@ -40,20 +40,23 @@ class CacheServiceTest < ActiveSupport::TestCase
assert_equal true, got
end
test "object exists when remote storage is not defined" do
test "uses default bucket when remote storage is not defined" do
# Given
Aws::S3::Client.any_instance.stubs(:head_object).returns(true)
@project.update(remote_cache_storage: nil)
# When / Then
assert_raises(CacheService::Error::MissingRemoteCacheStorage) do
CacheService.new(
project_slug: "my-project/tuist",
hash: "artifact-hash",
name: "MyFramework",
user: @user,
project: nil,
)
.object_exists?
end
# When
got = CacheService.new(
project_slug: "my-project/tuist",
hash: "artifact-hash",
name: "MyFramework",
user: nil,
project: @project,
)
.object_exists?
# Then
assert_equal true, got
end
test "object exists with using passed project" do

View File

@ -3,7 +3,7 @@
require "test_helper"
class ProjectChangeRemoteCacheStorageServiceTest < ActiveSupport::TestCase
test "fetches a project with a given name account_name" do
test "changes to the new bucket" do
# Given
user = User.create!(email: "test@cloud.tuist.io", password: Devise.friendly_token.first(16))
account = user.account
@ -69,4 +69,25 @@ class ProjectChangeRemoteCacheStorageServiceTest < ActiveSupport::TestCase
ProjectChangeRemoteCacheStorageService.call(id: "non-existent-id", project_id: project.id, user: user)
end
end
test "sets the remote cache storage to nil if the id is nil" do
# Given
user = User.create!(email: "test@cloud.tuist.io", password: Devise.friendly_token.first(16))
account = user.account
Project.create!(name: "tuist-project", account_id: account.id, token: Devise.friendly_token.first(16))
project = Project.create!(name: "tuist-project-2", account_id: account.id, token: Devise.friendly_token.first(16))
s3_bucket = account.s3_buckets.create!(
name: "s3-bucket",
access_key_id: "access key id",
region: "region",
)
project.update(remote_cache_storage: s3_bucket)
# When
got = ProjectChangeRemoteCacheStorageService.call(project_id: project.id, user: user)
# Then
assert_equal nil, got
assert_equal "tuist-debug", Project.find(project.id).remote_cache_storage.name
end
end

View File

@ -21,8 +21,7 @@ class ProjectCreateServiceTest < ActiveSupport::TestCase
# Then
assert_equal project_name, got.name
assert_equal account, got.account
assert_equal "this-is-a-uniq-#{account.name}-#{project_name}", got.remote_cache_storage.name
assert_equal true, got.remote_cache_storage.is_default
assert_equal "tuist-debug", got.remote_cache_storage.name
end
test "returns an error if a project with same slug already exists" do

View File

@ -8,21 +8,11 @@ class ProjectDeleteServicerviceTest < ActiveSupport::TestCase
Aws::S3::Client.stubs(:new).returns(client)
end
test "deletes a project and its default bucket with a given id" do
test "deletes a project" do
# Given
deleter = User.create!(email: "test@cloud.tuist.io", password: Devise.friendly_token.first(16))
account = deleter.account
project = Project.create!(name: "tuist-project", account_id: account.id, token: Devise.friendly_token.first(16))
s3_bucket = account.s3_buckets.create!(
name: "test-tuist-project",
access_key_id: "",
secret_access_key: "",
iv: "",
region: "",
is_default: true,
default_project_id: project.id,
)
project.update(remote_cache_storage: s3_bucket)
# When
got = ProjectDeleteService.call(id: project.id, deleter: deleter)
@ -32,7 +22,6 @@ class ProjectDeleteServicerviceTest < ActiveSupport::TestCase
assert_raises(ActiveRecord::RecordNotFound) do
Project.find(project.id)
end
assert_nil S3Bucket.find_by(id: s3_bucket.id)
end
test "fails to fetch a project if deleter does not have rights to update it" do

View File

@ -28,35 +28,6 @@ class S3BucketCreateServiceTest < ActiveSupport::TestCase
assert_equal region, got.region
end
test "creates a default S3 bucket" do
# Given
name = "bucket"
access_key_id = "access key id"
secret_access_key = "secret access key"
region = "region"
account = Account.create!(owner: Organization.create!, name: "tuist")
project = Project.create!(name: "tuist/tuist", account_id: account.id, token: Devise.friendly_token.first(8))
# When
got = S3BucketCreateService.call(
name: name,
access_key_id: access_key_id,
secret_access_key: secret_access_key,
account_id: account.id,
region: region,
default_project: project,
)
# Then
assert_equal name, got.name
assert_equal access_key_id, got.access_key_id
assert_not_equal secret_access_key, got.secret_access_key
assert_equal account.id, got.account_id
assert_equal region, got.region
assert_equal true, got.is_default
assert_equal project.id, got.default_project_id
end
test "creating an S3 bucket fails when another with the same name already exists" do
# Given
account = Account.create!(owner: Organization.create!, name: "tuist")

View File

@ -19,26 +19,12 @@ class S3BucketsFetchServiceTest < ActiveSupport::TestCase
secret_access_key: "secret",
region: "region",
)
s3_bucket_default_one = account.s3_buckets.create!(
access_key_id: "2",
is_default: true,
name: "random-id-#{account.name}-project_one",
secret_access_key: "secret",
region: "region",
)
account.s3_buckets.create!(
access_key_id: "2",
is_default: true,
name: "#{account.name}-project_two",
secret_access_key: "secret",
region: "region",
)
# When
got = S3BucketsFetchService.call(account_name: account.name, project_name: "project_one", user: user)
# Then
assert_equal [s3_bucket_one, s3_bucket_two, s3_bucket_default_one], got
assert_equal [s3_bucket_one, s3_bucket_two], got
end
test "fails to fetch S3 buckets if user does not have rights to access them" do