Skip to content

Commit

Permalink
feat(s3): migrate files to s3, upload file on bdd and s3
Browse files Browse the repository at this point in the history
  • Loading branch information
iNeoO committed Sep 18, 2024
1 parent ec60728 commit f59663a
Show file tree
Hide file tree
Showing 9 changed files with 166 additions and 43 deletions.
6 changes: 4 additions & 2 deletions .talismanrc
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ fileignoreconfig:
- filename: docker-compose.prod.yaml
checksum: a9495759b787f06a1d8d5ff1bdb5fad2c9f3cf40e6fad67cad8dddf6fd3e6d15
- filename: docker-compose.yaml
checksum: c6f534640b06a94d8e7ad3e7e4138e39478bba7a85e71a9a015a5a9a51f8dff2
checksum: c882626f19ca66fb8ef048900b179923f959fea9fbff52b4913537bad55e795d
- filename: e2e/register-login-organisateur.spec.ts
checksum: 28122f4f8a3e21312046f7347bdffa570e24c8a38976417d011efc1e5004a0cf
- filename: package.json
Expand Down Expand Up @@ -130,7 +130,7 @@ fileignoreconfig:
- filename: packages/backend/src/services/DemandeSejour.js
checksum: 8a3761a96a2775987fea844c5e0d47c7c6f83ea2dfddfd878c30a90f7d9775cc
- filename: packages/backend/src/services/Document.js
checksum: 9da5142188aae29f88b045891fe04aa64071f1a7600ff8975ba360033175998a
checksum: cb9b7a55df4edbc587f87f30a2ae6dd01fe72c29d9996e4f2cb4920b93e2c041
- filename: packages/backend/src/services/User.js
checksum: 6f3bcdcbe1d0e813fb032939778cd11cf0acf3ba7f2e25ca4707f114f31cfae0
- filename: packages/backend/src/services/geo/Commune.js
Expand Down Expand Up @@ -245,6 +245,8 @@ fileignoreconfig:
checksum: 1d3be5d2c46636eb90b74382447f23ffefc943ed963736ffab7556336d2df667
- filename: packages/migrations/src/migrations/20240710125537_eig.js
checksum: c358c5f8cf7a408173e9ff8d273f05c8e1a193c3369ec2389bfd713ff1bab716
- filename: packages/migrations/src/migrations/20240918085236_migrate_files_to_minio.js
checksum: e064a2ee73c2ecb4da219f78c7458302383d264330cc7bf645239519f906f64f
- filename: packages/shared/src/components/Chat.vue
checksum: f2dbbf72bf098c7abd2c3aee230d220f5a5a106952883c0e116eb49f4f9d4db7
- filename: packages/shared/src/components/PasswordInput.vue
Expand Down
13 changes: 13 additions & 0 deletions create-bucket.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/sh

# Configure the alias for MinIO
mc alias set minio http://minio:9000 ${S3_BUCKET_ACCESS_KEY} ${S3_BUCKET_SECRET_KEY}

# Check if the bucket exists
if mc ls minio/${S3_BUCKET_NAME}; then
echo "Bucket already exists"
else
# Create the bucket if it doesn't exist
mc mb minio/${S3_BUCKET_NAME}
echo "Bucket created successfully"
fi
27 changes: 26 additions & 1 deletion docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@ services:
condition: service_started
migrations:
condition: service_completed_successfully
create-bucket:
condition: service_completed_successfully
env_file:
- ./.env
environment:
Expand All @@ -83,7 +85,10 @@ services:
volumes:
- $PWD:/app:delegated
depends_on:
- postgres
postgres:
condition: service_started
create-bucket:
condition: service_completed_successfully
env_file:
- ./.env
user: 1000:1000
Expand Down Expand Up @@ -137,3 +142,23 @@ services:
MINIO_ROOT_PASSWORD: ${S3_BUCKET_SECRET_KEY}
MINIO_REGION: ${S3_BUCKET_REGION}
command: server /data --console-address ":9001"

create-bucket:
image: alpine:3.15
volumes:
- ./create-bucket.sh:/app/create-bucket.sh
command: /bin/sh /app/create-bucket.sh
environment:
- S3_BUCKET_ACCESS_KEY=${S3_BUCKET_ACCESS_KEY}
- S3_BUCKET_SECRET_KEY=${S3_BUCKET_SECRET_KEY}
- S3_BUCKET_NAME=${S3_BUCKET_NAME}
depends_on:
- minio
entrypoint:
- /bin/sh
- -c
- |
apk add --no-cache curl && \
curl -O https://dl.min.io/client/mc/release/linux-amd64/mc && \
chmod +x mc && mv mc /usr/bin/mc && \
/bin/sh /app/create-bucket.sh
15 changes: 8 additions & 7 deletions packages/backend/src/controllers/documents/download.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
const stream = require("stream");
const DocumentService = require("../../services/Document");
const AppError = require("../../utils/error");
const logger = require("../../utils/logger");
Expand All @@ -9,16 +8,18 @@ module.exports = async (req, res, next) => {
try {
const { uuid } = req.params;
log.i("IN", { uuid });
const file = await DocumentService.download(uuid);
if (!file) {
const data = await DocumentService.download(uuid);
if (!data) {
log.w("DONE with error");
return next(new AppError("fichier introuvable", { statusCode: 404 }));
}
const readStream = new stream.PassThrough();
readStream.end(file.file);
res.set("Content-disposition", `attachment; filename=${file.filename}`);
const fileStream = data.Body;
res.set(
"Content-disposition",
`attachment; filename=${data.Metadata.originalname || uuid}`,
);
res.set("Content-Type", "text/plain");
readStream.pipe(res);
fileStream.pipe(res);
log.i("DONE");
} catch (error) {
log.w("DONE with error");
Expand Down
3 changes: 2 additions & 1 deletion packages/backend/src/controllers/documents/upload.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ module.exports = async (req, res, next) => {
}

try {
const uuid = await DocumentService.upload(category, file);
const uuid = await DocumentService.uploadLegacy(category, file);
await DocumentService.upload(category, file, uuid);
log.d("DONE", uuid);
return res.json({ uuid });
} catch (error) {
Expand Down
90 changes: 59 additions & 31 deletions packages/backend/src/services/Document.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,13 @@ const logger = require("../utils/logger");
const poolDoc = require("../utils/pgpoolDoc").getPool();
const AppError = require("../utils/error");

const { S3Client, ListObjectsV2Command } = require("@aws-sdk/client-s3");
const {
S3Client,
PutObjectCommand,
GetObjectCommand,
} = require("@aws-sdk/client-s3");

const S3_BUCKET_NAME = process.env.S3_BUCKET_NAME;
const S3_BUCKET_ROOT_DIR = process.env.S3_BUCKET_ROOT_DIR;

const log = logger(module.filename);

Expand Down Expand Up @@ -45,38 +48,81 @@ const query = {
],
};

// module.exports.download = async (uuid) => {
// log.i("IN");
// try {
// const { rows, rowCount } = await poolDoc.query(...query.getByUuid(uuid));
// if (rowCount > 0) {
// log.i("DONE", rows[0]);
// return rows[0];
// }
// log.i("DONE");
// return null;
// } catch (err) {
// log.w(err);
// throw new AppError("query.getByUuid failed", { cause: err });
// }
// };

module.exports.download = async (uuid) => {
log.i("IN");
try {
const { rows, rowCount } = await poolDoc.query(...query.getByUuid(uuid));
if (rowCount > 0) {
log.i("DONE", rows[0]);
return rows[0];
}
const data = await s3Client.send(
new GetObjectCommand({
Bucket: process.env.S3_BUCKET_NAME,
Key: `${uuid}.pdf`,
}),
);
log.i("DONE");
return null;
return data;
} catch (err) {
log.w(err);
throw new AppError("query.getByUuid failed", { cause: err });
}
};

module.exports.upload = async (category, file) => {
log.i("uploadFile - In");
module.exports.uploadLegacy = async (category, file) => {
log.i("uploadLegacy - In");
try {
const { path, originalname: filename } = file;
const data = await fs.readFile(path);
log.d("uploadFile", category, filename);
log.d("uploadLegacy", category, filename);
const {
rows: [{ uuid }],
} = await poolDoc.query(
...query.create(category, filename, file.mimetype, data),
);
log.d("uploadFile - Done");
log.d("uploadLegacy - Done");
return uuid;
} catch (err) {
log.w(err);
throw new AppError("uploadFile failed", { cause: err });
throw new AppError("uploadLegacy failed", { cause: err });
}
};

module.exports.upload = async (category, file, uuid = crypto.randomUUID()) => {
log.i("upload - In");
try {
const { path, originalname: filename } = file;
const data = await fs.readFile(path);
log.d("upload", category, filename);
await s3Client.send(
new PutObjectCommand({
Body: data,
Bucket: S3_BUCKET_NAME,
Key: `${uuid}.pdf`,
Metadata: {
category,
created_at: String(new Date()),
mimetype: file.mimetype,
originalname: filename,
},
}),
);
return uuid;
} catch (err) {
log.w(err);
throw new AppError("upload failed", { cause: err });
}
};

Expand All @@ -101,21 +147,3 @@ module.exports.getStatic = async (name) => {
log.i("getOrganisateurAvecUnRetrait - In");
return `${__dirname}/static/${name}`;
};

const listFiles = async () => {
try {
const command = new ListObjectsV2Command({
Bucket: S3_BUCKET_NAME,
Prefix: S3_BUCKET_ROOT_DIR,
});

const data = await s3Client.send(command);
console.log("Success:", data.Contents);
} catch (err) {
console.error("Error:", err);
}
};

// Example code to test S3 is ok
// TODO: delete
listFiles();
3 changes: 2 additions & 1 deletion packages/migrations/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"pg": "~8.12.0"
},
"devDependencies": {
"@aws-sdk/client-s3": "^3.649.0",
"@socialgouv/eslint-config-recommended": "^1.131.0",
"eslint": "^8.57.0",
"eslint-plugin-prettier": "^5.1.3",
Expand All @@ -26,4 +27,4 @@
"lint-staged": {
"src/*.{js,ts}": "npx eslint --cache --fix"
}
}
}
1 change: 1 addition & 0 deletions packages/migrations/src/knexfile.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ module.exports = {
user: process.env.PG_VAO_SUPERUSER,
},
migrations: {
directory: "./migrations",
tableName: "knex_migrations",
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");

const S3_BUCKET_NAME = process.env.S3_BUCKET_NAME;

const s3Client = new S3Client({
credentials: {
accessKeyId: process.env.S3_BUCKET_ACCESS_KEY,
secretAccessKey: process.env.S3_BUCKET_SECRET_KEY,
},
endpoint: process.env.S3_BUCKET_ENDPOINT,
forcePathStyle: true,
region: process.env.S3_BUCKET_REGION,
});

/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = (knex) => {
return knex
.withSchema("doc")
.select()
.from("documents")
.then(async (rows) => {
for (const row of rows) {
try {
await s3Client.send(
new PutObjectCommand({
Body: row.file,
Bucket: S3_BUCKET_NAME,
Key: `${row.uuid}.pdf`,
Metadata: {
category: String(row.category),
created_at: String(row.created_at),
mimetype: String(row.mime_type),
originalname: String(row.filename),
},
}),
);
} catch (err) {
console.error(`Failed to upload ${row.uuid}:`, err);
}
}
});
};

/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = () => {};

0 comments on commit f59663a

Please sign in to comment.