|
1 | 1 | const fs = require("fs");
|
2 | 2 | const path = require("path");
|
3 | 3 | const matter = require("gray-matter");
|
4 |
| -const { default: axios } = require("axios"); |
| 4 | +const { default: axios } = require("axios"); |
5 | 5 | require("dotenv").config();
|
6 | 6 | const { execSync } = require("child_process");
|
7 | 7 |
|
8 |
| -const API_ENDPOINT_PRODUCTION = |
9 |
| - process.env.BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_PRODUCTION; |
10 |
| -const API_ENDPOINT_DEVELOP = |
11 |
| - process.env.BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_DEVELOP; |
| 8 | +const API_ENDPOINT_PRODUCTION = |
| 9 | + process.env.BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_PRODUCTION; |
| 10 | +const API_ENDPOINT_DEVELOP = |
| 11 | + process.env.BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_DEVELOP; |
12 | 12 |
|
13 |
| -function transformFileNameFormat(str) { |
14 |
| - // Split the string into parts |
15 |
| - const parts = str.split("-"); |
| 13 | +function transformFileNameFormat(str) { |
| 14 | + // Split the string into parts |
| 15 | + const parts = str.split("-"); |
| 16 | + // Join the first three parts with a slash and then the rest with a dash |
| 17 | + const transformedString = `${parts[0]}/${parts[1]}/${parts[2]}/${parts |
| 18 | + .slice(3) |
| 19 | + .join("-")}`; |
16 | 20 |
|
17 |
| - // Join the first three parts with a slash and then the rest with a dash |
18 |
| - const transformedString = `${parts[0]}/${parts[1]}/${parts[2]}/${parts |
19 |
| - .slice(3) |
20 |
| - .join("-")}`; |
21 |
| - |
22 |
| - return transformedString; |
23 |
| -} |
24 |
| - |
25 |
| -function removeHtmlTags(str) { |
26 |
| - return str.replace(/<\/?[^>]+(>|$)/g, ""); |
| 21 | + return transformedString; |
27 | 22 | }
|
28 | 23 |
|
29 |
| -// Function to extract metadata from a file using 'gray-matter' |
30 |
| -const extractMetadata = (filePath) => { |
31 |
| - const fileContent = fs.readFileSync(filePath, "utf-8"); |
32 |
| - const parsed = matter(fileContent); |
33 |
| - return { |
34 |
| - title: parsed.data.title, |
35 |
| - url: `/${transformFileNameFormat(path.basename(filePath, ".md"))}`, |
36 |
| - description: removeHtmlTags(parsed.data.summary), |
37 |
| - isBlog: true, |
38 |
| - id: parsed.data.id, |
39 |
| - }; // This will contain the frontmatter (metadata) such as title, description, etc. |
40 |
| -}; |
| 24 | +function removeHtmlTags(str) { |
| 25 | + return str.replace(/<\/?[^>]+(>|$)/g, ""); |
| 26 | +} |
41 | 27 |
|
42 |
| -// Function to extract metadata from the previous version of a file (from the last commit) |
43 |
| -const extractPreviousMetadata = (filePath) => { |
44 |
| - try { |
45 |
| - const previousContent = execSync(`git show HEAD^:${filePath}`, { |
46 |
| - encoding: "utf-8", |
47 |
| - }); |
48 |
| - const parsed = matter(previousContent); |
| 28 | +// Function to extract metadata from a file using 'gray-matter' |
| 29 | +const extractMetadata = (filePath) => { |
| 30 | + const fileContent = fs.readFileSync(filePath, "utf-8"); |
| 31 | + const parsed = matter(fileContent); |
49 | 32 | return {
|
50 |
| - title: parsed.data.title, |
51 |
| - url: `/${transformFileNameFormat(path.basename(filePath, ".md"))}`, |
52 |
| - description: removeHtmlTags(parsed.data.summary), |
53 |
| - isBlog: true, |
54 |
| - id: parsed.data.id, |
55 |
| - }; |
56 |
| - } catch (error) { |
57 |
| - console.error( |
58 |
| - `Error extracting previous metadata for ${filePath}: ${error}` |
59 |
| - ); |
60 |
| - return null; |
61 |
| - } |
62 |
| -}; |
| 33 | + title: parsed.data.title, |
| 34 | + url: `/${transformFileNameFormat(path.basename(filePath, ".md"))}`, |
| 35 | + description: removeHtmlTags(parsed.data.summary), |
| 36 | + isBlog: true, |
| 37 | + id: parsed.data.id, |
| 38 | + }; // This will contain the frontmatter (metadata) such as title, description, etc. |
| 39 | +}; |
| 40 | + |
| 41 | +// Function to extract metadata from the previous version of a file (from the last commit) |
| 42 | +const extractPreviousMetadata = (filePath) => { |
| 43 | + try { |
| 44 | + const previousContent = execSync(`git show HEAD^:${filePath}`, { encoding: "utf-8",}); |
| 45 | + const parsed = matter(previousContent); |
| 46 | + return { |
| 47 | + title: parsed.data.title, |
| 48 | + url: `/${transformFileNameFormat(path.basename(filePath, ".md"))}`, |
| 49 | + description: removeHtmlTags(parsed.data.summary), |
| 50 | + isBlog: true, |
| 51 | + id: parsed.data.id, |
| 52 | + }; |
| 53 | + } catch (error) { |
| 54 | + console.error( |
| 55 | + `Error extracting previous metadata for ${filePath}: ${error}` |
| 56 | + ); |
| 57 | + return null; |
| 58 | + } |
| 59 | +}; |
63 | 60 |
|
64 | 61 | async function sendBlogsToAlgolia() {
|
65 |
| - // Get arguments passed from the GitHub Action (new, modified, deleted files) |
66 |
| - const newFiles = process.argv[2].split(" "); |
67 |
| - const modifiedFiles = process.argv[3].split(" "); |
68 |
| - const deletedFiles = process.argv[4].split(" "); |
69 |
| - |
70 |
| - // Arrays to store the metadata from new and modified files |
71 |
| - // These will be sent to the endpoint |
72 |
| - const newFilesData = []; |
73 |
| - const modifiedFilesData = []; |
74 |
| - const deletedFilesData = []; |
75 |
| - |
76 |
| - // Process new files |
77 |
| - newFiles.forEach((file) => { |
78 |
| - if (file && fs.existsSync(file)) { |
79 |
| - const metadata = extractMetadata(file); |
80 |
| - newFilesData.push({ file, ...metadata }); |
81 |
| - } |
82 |
| - }); |
83 |
| - |
84 |
| - // Process modified files (both before and after modification) |
85 |
| - modifiedFiles.forEach((file) => { |
86 |
| - if (file && fs.existsSync(file)) { |
87 |
| - const previousMetadata = extractPreviousMetadata(file); |
88 |
| - const currentMetadata = extractMetadata(file); |
89 |
| - modifiedFilesData.push({ |
90 |
| - previous: previousMetadata, |
91 |
| - current: currentMetadata, |
92 |
| - }); |
93 |
| - } |
94 |
| - }); |
95 |
| - |
96 |
| - // Process deleted files |
97 |
| - deletedFiles.forEach((file) => { |
98 |
| - if (file && fs.existsSync(file)) { |
99 |
| - const metadata = extractMetadata(file); |
100 |
| - deletedFilesData.push({ ...metadata }); |
101 |
| - } |
102 |
| - }); |
103 |
| - |
104 |
| - try { |
105 |
| - if (!API_ENDPOINT_PRODUCTION || !API_ENDPOINT_DEVELOP) { |
106 |
| - throw new Error( |
107 |
| - "BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_PRODUCTION or BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_DEVELOP environment variable not configured" |
108 |
| - ); |
109 |
| - } |
110 |
| - |
111 |
| - try { |
112 |
| - await axios.post(API_ENDPOINT_DEVELOP, { |
113 |
| - newDocuments: newFilesData, |
114 |
| - updatedDocuments: modifiedFilesData, |
115 |
| - deletedDocuments: deletedFilesData, |
116 |
| - }); |
117 |
| - } catch (error) { |
118 |
| - throw new Error( |
119 |
| - `Error sending posts to develop endpoint ${JSON.stringify(error)}` |
120 |
| - ); |
121 |
| - } |
122 |
| - |
| 62 | + // Get arguments passed from the GitHub Action (new, modified, deleted files) |
| 63 | + const newFiles = process.argv[2].split("|"); |
| 64 | + const modifiedFiles = process.argv[3].split("|"); |
| 65 | + const deletedFiles = process.argv[4].split("|"); |
| 66 | + |
| 67 | + // Arrays to store the metadata from new and modified files |
| 68 | + // These will be sent to the endpoint |
| 69 | + const newFilesData = []; |
| 70 | + const modifiedFilesData = []; |
| 71 | + const deletedFilesData = []; |
| 72 | + |
| 73 | + // Process new files |
| 74 | + newFiles.forEach((file) => { |
| 75 | + if (file && fs.existsSync(file)) { |
| 76 | + const metadata = extractMetadata(file); |
| 77 | + newFilesData.push({ file, ...metadata }); |
| 78 | + } |
| 79 | + }); |
| 80 | + |
| 81 | + // Process modified files (both before and after modification) |
| 82 | + modifiedFiles.forEach((file) => { |
| 83 | + if (file && fs.existsSync(file)) { |
| 84 | + const previousMetadata = extractPreviousMetadata(file); |
| 85 | + const currentMetadata = extractMetadata(file); |
| 86 | + modifiedFilesData.push({ |
| 87 | + previous: previousMetadata, |
| 88 | + current: currentMetadata, |
| 89 | + }); |
| 90 | + } |
| 91 | + }); |
| 92 | + |
| 93 | + // Process deleted files |
| 94 | + deletedFiles.forEach((file) => { |
| 95 | + if (file && fs.existsSync(file)) { |
| 96 | + const metadata = extractMetadata(file); |
| 97 | + deletedFilesData.push({ ...metadata }); |
| 98 | + } |
| 99 | + }); |
| 100 | + |
| 101 | + console.log( |
| 102 | + "New files:", |
| 103 | + newFilesData, |
| 104 | + "Modified files:", |
| 105 | + modifiedFilesData, |
| 106 | + "Deleted files:", |
| 107 | + deletedFilesData |
| 108 | + ); |
| 109 | + |
123 | 110 | try {
|
124 |
| - await axios.post(API_ENDPOINT_PRODUCTION, { |
125 |
| - newDocuments: newFilesData, |
126 |
| - updatedDocuments: modifiedFilesData, |
127 |
| - deletedDocuments: deletedFilesData, |
128 |
| - }); |
| 111 | + if (!API_ENDPOINT_PRODUCTION || !API_ENDPOINT_DEVELOP) { |
| 112 | + throw new Error( |
| 113 | + "BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_PRODUCTION or BLOG_POSTS_DEPLOY_TO_ALGOLIA_ENDPOINT_DEVELOP environment variable not configured" |
| 114 | + ); |
| 115 | + } |
| 116 | + |
| 117 | + try { |
| 118 | + await axios.post(API_ENDPOINT_DEVELOP, { |
| 119 | + newDocuments: newFilesData, |
| 120 | + updatedDocuments: modifiedFilesData, |
| 121 | + deletedDocuments: deletedFilesData, |
| 122 | + }); |
| 123 | + } catch (error) { |
| 124 | + throw new Error( |
| 125 | + `Error sending posts to develop endpoint ${JSON.stringify(error)}` |
| 126 | + ); |
| 127 | + } |
| 128 | + |
| 129 | + try { |
| 130 | + await axios.post(API_ENDPOINT_PRODUCTION, { |
| 131 | + newDocuments: newFilesData, |
| 132 | + updatedDocuments: modifiedFilesData, |
| 133 | + deletedDocuments: deletedFilesData, |
| 134 | + }); |
| 135 | + } catch (error) { |
| 136 | + throw new Error( |
| 137 | + `Error sending posts to production endpoint ${JSON.stringify(error)}` |
| 138 | + ); |
| 139 | + } |
| 140 | + |
| 141 | + console.log("Posts successfully sent to endpoints."); |
129 | 142 | } catch (error) {
|
130 |
| - throw new Error( |
131 |
| - `Error sending posts to production endpoint ${JSON.stringify(error)}` |
132 |
| - ); |
133 |
| - } |
134 |
| - |
135 |
| - console.log("Posts successfully sent to endpoints."); |
136 |
| - } catch (error) { |
137 |
| - console.error("Error generating or sending posts:", error); |
138 |
| - } |
| 143 | + console.error("Error generating or sending posts:", error); |
| 144 | + } |
139 | 145 | }
|
140 | 146 |
|
141 | 147 | sendBlogsToAlgolia();
|
0 commit comments