Skip to content

Commit

Permalink
Merge pull request #34 from faisalsayed10/v1.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
faisalsayed10 authored Mar 10, 2023
2 parents 5b1ce5d + 3c9c0ae commit 4cd03c6
Show file tree
Hide file tree
Showing 29 changed files with 899 additions and 190 deletions.
18 changes: 18 additions & 0 deletions apps/web/components/ProgressBar.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import Router from "next/router";
import NProgress from "nprogress";
import "nprogress/nprogress.css";

NProgress.configure({
minimum: 0.3,
easing: "ease",
speed: 500,
showSpinner: false,
});

Router.events.on("routeChangeStart", () => NProgress.start());
Router.events.on("routeChangeComplete", () => NProgress.done());
Router.events.on("routeChangeError", () => NProgress.done());

export default function () {
return null;
}
3 changes: 1 addition & 2 deletions apps/web/components/files/UploadProgress.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { Box, Flex, IconButton, Progress, Text } from "@chakra-ui/react";
import useBucket from "@hooks/useBucket";
import useKeys from "@hooks/useKeys";
import { Provider, UploadingFile } from "@util/types";
import { UploadingFile } from "@util/types";
import React from "react";
import toast from "react-hot-toast";
import { PlayerPause, PlayerPlay, X } from "tabler-icons-react";
Expand Down
7 changes: 2 additions & 5 deletions apps/web/components/ui/VideoModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,11 @@ const VideoModal: React.FC<Props> = ({ src }) => {
How do I get the credentials?
</Button>

<Modal isOpen={isOpen} onClose={onClose} isCentered>
<Modal isOpen={isOpen} onClose={onClose} isCentered size="2xl">
<ModalOverlay />
<ModalContent>
<ModalHeader py="2" px="4">
Here's How:
</ModalHeader>
<ModalCloseButton _focus={{ outline: "none" }} />
<ModalBody p="0">
<ModalBody p={0}>
<Player playsInline src={src} />
</ModalBody>
</ModalContent>
Expand Down
4 changes: 3 additions & 1 deletion apps/web/hooks/useBucket.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ export type ContextValue = {
setUploadingFiles: React.Dispatch<React.SetStateAction<UploadingFile[]>>;
addFolder: (name: string) => void;
removeFolder: (folder: DriveFolder) => Promise<void>;
addFile: (files: File[] | FileList) => Promise<void>;
addFile: (files: File[] | FileList) => Promise<any>;
removeFile: (file: DriveFile) => Promise<boolean>;
};

Expand All @@ -31,6 +31,8 @@ export default function useBucket(): ContextValue {
return useFirebase();
} else if ((Provider[keys.type] as Provider) === Provider.s3) {
return useS3();
} else if ((Provider[keys.type] as Provider) === Provider.backblaze) {
return useS3();
}

return null;
Expand Down
240 changes: 130 additions & 110 deletions apps/web/hooks/useS3.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@ import {
DeleteObjectsCommand,
GetObjectCommand,
ListObjectsV2Command,
S3Client,
S3Client
} from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { Drive } from "@prisma/client";
import { cryptoHexEncodedHash256, cryptoMd5Method, signRequest } from "@util/helpers/s3-helpers";
import { DriveFile, DriveFolder, UploadingFile } from "@util/types";
import Evaporate from "evaporate";
import { calculateVariablePartSize } from "@util/helpers/s3-helpers";
import { DriveFile, DriveFolder, Provider, UploadingFile } from "@util/types";
import { Upload } from "@util/upload";
import mime from "mime-types";
import { nanoid } from "nanoid";
import { createContext, useContext, useEffect, useState } from "react";
import { createContext, useContext, useEffect, useRef, useState } from "react";
import toast from "react-hot-toast";
import { ContextValue, ROOT_FOLDER } from "./useBucket";
import useUser from "./useUser";
Expand All @@ -31,6 +31,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
region: data.keys.region,
maxAttempts: 1,
credentials: { accessKeyId: data.keys.accessKey, secretAccessKey: data.keys.secretKey },
...(data.keys?.endpoint ? { endpoint: data.keys.endpoint } : {}),
})
);
const [loading, setLoading] = useState(false);
Expand All @@ -40,6 +41,24 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
const [folders, setFolders] = useState<DriveFolder[]>(null);
const [uploadingFiles, setUploadingFiles] = useState<UploadingFile[]>([]);
const [files, setFiles] = useState<DriveFile[]>(null);
const isMounted = useRef(false);

// Fallback for old buckets not already having the bucketUrl.
useEffect(() => {
if (isMounted.current || !data?.keys) return;
isMounted.current = true;
if (data.keys.bucketUrl) return;

if ((Provider[data.type] as Provider) === Provider.s3) {
data.keys.bucketUrl = `https://${data.keys.Bucket}.s3.${data.keys.region}.amazonaws.com`;
} else if ((Provider[data.type] as Provider) === Provider.backblaze) {
data.keys.bucketUrl = `https://${data.keys.Bucket}.s3.${data.keys.region}.backblazeb2.com`;
}

return () => {
isMounted.current = false;
};
}, [data]);

const addFolder = (name: string) => {
const path =
Expand All @@ -53,7 +72,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
parent: currentFolder.fullPath,
createdAt: new Date().toISOString(),
bucketName: data.keys.Bucket,
bucketUrl: `https://${data.keys.Bucket}.s3.${data.keys.region}.amazonaws.com`,
bucketUrl: data.keys.bucketUrl,
};

setFolders((folders) => [...folders, newFolder]);
Expand All @@ -79,112 +98,113 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
};

const addFile = async (filesToUpload: File[] | FileList) => {
const evaporate = await Evaporate.create({
bucket: data.keys.Bucket,
awsRegion: data.keys.region,
aws_key: data.keys.accessKey,
computeContentMd5: true,
cryptoMd5Method,
cryptoHexEncodedHash256,
customAuthMethod: (_, __, stringToSign) => signRequest(stringToSign, data.keys.secretKey),
logging: false,
});
Array.from(filesToUpload).forEach(async (file) => {
if (/[#\$\[\]\*/]/.test(file.name))
return toast.error("File name cannot contain special characters (#$[]*/).");

Array.from(filesToUpload).forEach(async (toUpload) => {
const id = nanoid();
if (/[#\$\[\]\*/]/.test(toUpload.name)) {
toast.error("File name cannot contain special characters (#$[]*/).");
return;
}

if (files?.filter((f) => f.name === toUpload.name).length > 0) {
toast.error("File with same name already exists.");
return;
}
if (files?.filter((f) => f.name === file.name).length > 0)
return toast.error("File with same name already exists.");

const filePath =
const id = nanoid();
const Key =
currentFolder === ROOT_FOLDER
? toUpload.name
: `${decodeURIComponent(currentFolder.fullPath)}${toUpload.name}`;

evaporate.add({
name: filePath,
file: toUpload,
contentType: mime.lookup(toUpload.name) || "application/octet-stream",
uploadInitiated: () => {
setUploadingFiles((prev) =>
prev.concat([
{
id,
name: toUpload.name,
key: `${data.keys.Bucket}/${filePath}`,
task: evaporate,
state: "running",
progress: 0,
error: false,
},
])
);
},
progress: (_, stats) => {
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.map((uploadFile) => {
return uploadFile.id === id
? {
...uploadFile,
state: "running",
progress: Math.round((stats.totalUploaded / stats.fileSize) * 100),
}
: uploadFile;
})
);
},
paused: () => {
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.map((uploadFile) => {
return uploadFile.id === id ? { ...uploadFile, state: "paused" } : uploadFile;
})
);
},
resumed: () => {
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.map((uploadFile) => {
return uploadFile.id === id ? { ...uploadFile, state: "running" } : uploadFile;
})
);
? file.name
: `${decodeURIComponent(currentFolder.fullPath)}${file.name}`;

const upload = new Upload({
client: s3Client,
params: {
Key,
Body: file,
Bucket: data.keys.Bucket,
ContentType: mime.lookup(file.name) || "application/octet-stream",
},
error: (_) => {
setUploadingFiles((prevUploadingFiles) => {
return prevUploadingFiles.map((uploadFile) => {
if (uploadFile.id === id) return { ...uploadFile, error: true };
return uploadFile;
});
partSize: calculateVariablePartSize(file.size),
});

upload.on("initiated", () => {
setUploadingFiles((prev) =>
prev.concat([
{
id,
name: file.name,
key: Key,
task: upload,
state: "running",
progress: 0,
error: false,
},
])
);
});

upload.on("progress", (progress) => {
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.map((uploadFile) => {
return uploadFile.id === id
? {
...uploadFile,
state: "running",
progress: Number(
parseFloat(((progress.loaded / progress.total) * 100).toString()).toFixed(2)
),
}
: uploadFile;
})
);
});

upload.on("paused", () => {
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.map((uploadFile) => {
return uploadFile.id === id ? { ...uploadFile, state: "paused" } : uploadFile;
})
);
});

upload.on("resumed", () => {
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.map((uploadFile) => {
return uploadFile.id === id ? { ...uploadFile, state: "running" } : uploadFile;
})
);
});

upload.on("error", (err) => {
toast.error(err.message);
setUploadingFiles((prevUploadingFiles) => {
return prevUploadingFiles.map((uploadFile) => {
if (uploadFile.id === id) return { ...uploadFile, error: true };
return uploadFile;
});
},
complete: async (_xhr, file_key) => {
console.log("complete", decodeURIComponent(file_key));
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.filter((uploadFile) => uploadFile.id !== id)
);
const newFile: DriveFile = {
fullPath: filePath,
name: toUpload.name,
parent: currentFolder.fullPath,
size: toUpload.size.toString(),
createdAt: new Date().toISOString(),
contentType: mime.lookup(toUpload.name) || "application/octet-stream",
bucketName: data.keys.Bucket,
bucketUrl: `https://${data.keys.Bucket}.s3.${data.keys.region}.amazonaws.com`,
url: await getSignedUrl(
s3Client,
new GetObjectCommand({ Bucket: data.keys.Bucket, Key: decodeURIComponent(file_key) }),
{ expiresIn: 3600 * 24 }
),
};
setFiles((files) => (files ? [...files, newFile] : [newFile]));
toast.success("File uploaded successfully.");
},
});
});

upload.on("completed", async () => {
setUploadingFiles((prevUploadingFiles) =>
prevUploadingFiles.filter((uploadFile) => uploadFile.id !== id)
);
const newFile: DriveFile = {
fullPath: Key,
name: file.name,
parent: currentFolder.fullPath,
size: file.size.toString(),
createdAt: new Date().toISOString(),
contentType: mime.lookup(file.name) || "application/octet-stream",
bucketName: data.keys.Bucket,
bucketUrl: `https://${data.keys.Bucket}.s3.${data.keys.region}.amazonaws.com`,
url: await getSignedUrl(
s3Client,
new GetObjectCommand({ Bucket: data.keys.Bucket, Key: Key }),
{ expiresIn: 3600 * 24 }
),
};

setFiles((files) => (files ? [...files, newFile] : [newFile]));
toast.success("File uploaded successfully.");
});

await upload.start();
});
};

Expand All @@ -210,7 +230,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
name: fullPath.split("/").pop(),
bucketName: data.keys.Bucket,
parent: fullPath.split("/").shift() + "/",
bucketUrl: `https://${data.keys.Bucket}.s3.${data.keys.region}.amazonaws.com`,
bucketUrl: data.keys.bucketUrl,
});
}, [fullPath, user]);

Expand Down Expand Up @@ -240,7 +260,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
size: result.Size.toString(),
contentType: mime.lookup(result.Key) || "",
bucketName: results.Name,
bucketUrl: `https://${results.Name}.s3.${data.keys.region}.amazonaws.com`,
bucketUrl: data.keys.bucketUrl,
url: await getSignedUrl(
s3Client,
new GetObjectCommand({ Bucket: results.Name, Key: result.Key }),
Expand Down Expand Up @@ -269,7 +289,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
name: results.CommonPrefixes[i].Prefix.slice(0, -1).split("/").pop(),
bucketName: results.Name,
parent: currentFolder.fullPath,
bucketUrl: `https://${results.Name}.s3.${data.keys.region}.amazonaws.com`,
bucketUrl: data.keys.bucketUrl,
};
setFolders((folders) => [...folders, driveFolder]);
}
Expand All @@ -295,7 +315,7 @@ export const S3Provider: React.FC<Props> = ({ data, fullPath, children }) => {
size: result.Size.toString(),
contentType: mime.lookup(result.Key) || "",
bucketName: results.Name,
bucketUrl: `https://${results.Name}.s3.${data.keys.region}.amazonaws.com`,
bucketUrl: data.keys.bucketUrl,
url: await getSignedUrl(
s3Client,
new GetObjectCommand({ Bucket: results.Name, Key: result.Key }),
Expand Down
Loading

2 comments on commit 4cd03c6

@vercel
Copy link

@vercel vercel bot commented on 4cd03c6 Mar 10, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

firefiles – ./apps/web

firefiles-fayd.vercel.app
firefiles-git-main-fayd.vercel.app
usefirefiles.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 4cd03c6 Mar 10, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.