This commit is contained in:
砂糖
2026-01-24 16:54:44 +08:00
commit 70f337bb92
186 changed files with 23792 additions and 0 deletions

91
lib/email.ts Normal file
View File

@@ -0,0 +1,91 @@
const DISPOSABLE_EMAIL_DOMAINS = [
'tempmail.com',
'throwawaymail.com',
'tempmail100.com'
];
export type EmailValidationError =
| 'invalid_email_format'
| 'email_part_too_long'
| 'disposable_email_not_allowed'
| 'invalid_characters';
const EMAIL_REGEX = /^(?=[a-zA-Z0-9@._%+-]{6,254}$)[a-zA-Z0-9._%+-]{1,64}@(?:[a-zA-Z0-9-]{1,63}\.){1,8}[a-zA-Z]{2,63}$/;
export function validateEmail(email: string): {
isValid: boolean;
error?: string;
} {
// validate email format
if (!EMAIL_REGEX.test(email)) {
return {
isValid: false,
error: 'invalid_email_format'
};
}
// check domain length
const [localPart, domain] = email.split('@');
if (domain.length > 255 || localPart.length > 64) {
return {
isValid: false,
error: 'email_part_too_long'
};
}
// check if it's a disposable email
if (DISPOSABLE_EMAIL_DOMAINS.includes(domain.toLowerCase())) {
return {
isValid: false,
error: 'disposable_email_not_allowed'
};
}
// check for special characters
if (/[<>()[\]\\.,;:\s@"]+/.test(localPart)) {
return {
isValid: false,
error: 'invalid_characters'
};
}
return { isValid: true };
}
// email validation (including alias detection)
export function normalizeEmail(email: string): string {
if (!email) return '';
// convert to lowercase
let normalizedEmail = email.toLowerCase();
// separate email local part and domain part
const [localPart, domain] = normalizedEmail.split('@');
// handle different email service provider alias rules
switch (domain) {
case 'gmail.com':
// remove dot and + suffix
const gmailBase = localPart
.replace(/\./g, '')
.split('+')[0];
return `${gmailBase}@${domain}`;
case 'outlook.com':
case 'hotmail.com':
case 'live.com':
// remove + suffix
const microsoftBase = localPart.split('+')[0];
return `${microsoftBase}@${domain}`;
case 'yahoo.com':
// remove - suffix
const yahooBase = localPart.split('-')[0];
return `${yahooBase}@${domain}`;
default:
// for other emails, only remove + suffix
const baseLocalPart = localPart.split('+')[0];
return `${baseLocalPart}@${domain}`;
}
}

23
lib/getBlogDetail.ts Normal file
View File

@@ -0,0 +1,23 @@
import { BlogPost } from '@/types/blog';
export async function getPostDetail(articleId: string): Promise<BlogPost> {
let url = 'http://49.232.154.205:18081/export/article/' + articleId
const response = await fetch(url);
const json = await response.json();
const data = json.data;
const post = {
locale: data.langCode,
title: data.title,
description: data.summary,
image: data.cover || '',
slug: data.articleId,
tags: '',
date: data.publishedTime,
// visible: data.visible || 'published',
pin: false,
content: data.content,
metadata: data,
}
return post;
}

66
lib/getBlogs.ts Normal file
View File

@@ -0,0 +1,66 @@
import { DEFAULT_LOCALE } from '@/i18n/routing';
import { BlogPost } from '@/types/blog';
import fs from 'fs';
import matter from 'gray-matter';
import path from 'path';
const POSTS_BATCH_SIZE = 10;
export async function getPosts(locale: string = DEFAULT_LOCALE): Promise<{ posts: BlogPost[] }> {
const postsDirectory = path.join(process.cwd(), 'blogs', locale);
// is directory exist
if (!fs.existsSync(postsDirectory)) {
return { posts: [] };
}
let filenames = await fs.promises.readdir(postsDirectory);
filenames = filenames.reverse();
let allPosts: BlogPost[] = [];
// read file by batch
for (let i = 0; i < filenames.length; i += POSTS_BATCH_SIZE) {
const batchFilenames = filenames.slice(i, i + POSTS_BATCH_SIZE);
const batchPosts: BlogPost[] = await Promise.all(
batchFilenames.map(async (filename) => {
const fullPath = path.join(postsDirectory, filename);
const fileContents = await fs.promises.readFile(fullPath, 'utf8');
const { data, content } = matter(fileContents);
return {
locale, // use locale parameter
title: data.title,
description: data.description,
image: data.image || '',
slug: data.slug,
tags: data.tags,
date: data.date,
visible: data.visible || 'published',
pin: data.pin || false,
content,
metadata: data,
};
})
);
allPosts.push(...batchPosts);
}
// filter out non-published articles
allPosts = allPosts.filter(post => post.visible === 'published');
// sort posts by pin and date
allPosts = allPosts.sort((a, b) => {
if (a.pin !== b.pin) {
return (b.pin ? 1 : 0) - (a.pin ? 1 : 0);
}
return new Date(b.date).getTime() - new Date(a.date).getTime();
});
return {
posts: allPosts,
};
}

57
lib/getProducts.ts Normal file
View File

@@ -0,0 +1,57 @@
import { DEFAULT_LOCALE } from '@/i18n/routing';
import { Product } from '@/types/product';
import fs from 'fs';
import matter from 'gray-matter';
import path from 'path';
const POSTS_BATCH_SIZE = 10;
export async function getProducts(locale: string = DEFAULT_LOCALE): Promise<{ products: Product[] }> {
const postsDirectory = path.join(process.cwd(), 'content', 'product', locale);
// is directory exist
if (!fs.existsSync(postsDirectory)) {
return { products: [] };
}
let filenames = await fs.promises.readdir(postsDirectory);
filenames = filenames.reverse();
let allPosts: Product[] = [];
// read file by batch
for (let i = 0; i < filenames.length; i += POSTS_BATCH_SIZE) {
const batchFilenames = filenames.slice(i, i + POSTS_BATCH_SIZE);
const batchPosts: Product[] = await Promise.all(
batchFilenames.map(async (filename) => {
const fullPath = path.join(postsDirectory, filename);
const fileContents = await fs.promises.readFile(fullPath, 'utf8');
const { data, content } = matter(fileContents);
console.log(data);
return {
locale, // use locale parameter
title: data.title,
model: data.model,
place: data.place,
publishedTime: data.publishedTime,
images: data.images || [],
detail: data.detail,
spec: data.spec || [],
packaging: data.packaging || '',
slug: data.slug || '',
content,
metadata: data,
};
})
);
allPosts.push(...batchPosts);
}
return {
products: allPosts,
};
}

27
lib/lines.ts Normal file
View File

@@ -0,0 +1,27 @@
import { Line } from '@/types/line';
import fs from 'fs';
import matter from 'gray-matter';
import path from 'path';
// 获取指定语言的所有车间
export async function getLines(locale: string): Promise<Line[]> {
const contentPath = path.join(process.cwd(), `content/lines/${locale}`);
console.log(contentPath);
const files = fs.readdirSync(contentPath);
const workShops: Line[] = [];
for (const file of files.map((file) => file.replace('.mdx', ''))) {
const contentPath = path.join(process.cwd(), `content/lines/${locale}/${file}.mdx`);
const fileContent = fs.readFileSync(contentPath, 'utf8');
const { data: frontmatter, content } = matter(fileContent);
workShops.push(frontmatter as Line);
}
return workShops;
}
// 获取特定车间的内容
export async function getLine(locale: string, title: string): Promise<Line> {
const contentPath = path.join(process.cwd(), `content/lines/${locale}/${title}.mdx`);
const fileContent = fs.readFileSync(contentPath, 'utf8');
const { data: frontmatter, content } = matter(fileContent);
return frontmatter as Line;
}

40
lib/logger.ts Normal file
View File

@@ -0,0 +1,40 @@
import * as fs from 'fs';
import * as winston from 'winston';
import 'winston-daily-rotate-file';
const logDir: string = process.env.LOG_DIR || 'log';
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true });
}
const fileTransport = new winston.transports.DailyRotateFile({
filename: `${logDir}/%DATE%-results.log`,
datePattern: 'YYYY-MM-DD',
zippedArchive: true,
maxSize: '20m',
maxFiles: '3d',
level: 'info',
});
const logger: winston.Logger = winston.createLogger({
level: 'debug',
format: winston.format.combine(
winston.format.timestamp({
format: 'YYYY-MM-DD HH:mm:ss'
}),
winston.format.json()
),
transports: [
fileTransport,
new winston.transports.Console({
level: 'debug',
format: winston.format.combine(
winston.format.colorize(),
winston.format.simple()
)
})
]
});
export default logger;

100
lib/metadata.ts Normal file
View File

@@ -0,0 +1,100 @@
import { siteConfig } from '@/config/site'
import { DEFAULT_LOCALE, LOCALE_NAMES, Locale } from '@/i18n/routing'
import { Metadata } from 'next'
import { getTranslations } from 'next-intl/server'
type MetadataProps = {
page?: string
title?: string
description?: string
images?: string[]
noIndex?: boolean
locale: Locale
path?: string
canonicalUrl?: string
}
export async function constructMetadata({
page = 'Home',
title,
description,
images = [],
noIndex = false,
locale,
path,
canonicalUrl,
}: MetadataProps): Promise<Metadata> {
// get translations
const t = await getTranslations({ locale, namespace: 'Home' })
// get page specific metadata translations
const pageTitle = title || t(`title`)
const pageDescription = description || t(`description`)
// build full title
const finalTitle = page === 'Home'
? `${pageTitle} - ${t('tagLine')}`
: `${pageTitle} | ${t('title')}`
// build image URLs
const imageUrls = images.length > 0
? images.map(img => ({
url: img.startsWith('http') ? img : `${siteConfig.url}/${img}`,
alt: pageTitle,
}))
: [{
url: `${siteConfig.url}/og.png`,
alt: pageTitle,
}]
// Open Graph Site
const pageURL = `${locale === DEFAULT_LOCALE ? '' : `/${locale}`}${path}` || siteConfig.url
// build alternate language links
const alternateLanguages = Object.keys(LOCALE_NAMES).reduce((acc, lang) => {
const path = canonicalUrl
? `${lang === DEFAULT_LOCALE ? '' : `/${lang}`}${canonicalUrl === '/' ? '' : canonicalUrl}`
: `${lang === DEFAULT_LOCALE ? '' : `/${lang}`}`
acc[lang] = `${siteConfig.url}${path}`
return acc
}, {} as Record<string, string>)
return {
title: finalTitle,
description: pageDescription,
keywords: [],
authors: siteConfig.authors,
creator: siteConfig.creator,
metadataBase: new URL(siteConfig.url),
alternates: {
canonical: canonicalUrl ? `${siteConfig.url}${locale === DEFAULT_LOCALE ? '' : `/${locale}`}${canonicalUrl === '/' ? '' : canonicalUrl}` : undefined,
languages: alternateLanguages,
},
openGraph: {
type: 'website',
title: finalTitle,
description: pageDescription,
url: pageURL,
siteName: t('title'),
locale: locale,
images: imageUrls,
},
twitter: {
card: 'summary_large_image',
title: finalTitle,
description: pageDescription,
site: `${siteConfig.url}${pageURL === '/' ? '' : pageURL}`,
images: imageUrls,
creator: siteConfig.creator,
},
robots: {
index: !noIndex,
follow: !noIndex,
googleBot: {
index: !noIndex,
follow: !noIndex,
},
},
}
}

19
lib/utils.ts Normal file
View File

@@ -0,0 +1,19 @@
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs))
}
export const getDomain = (url: string) => {
try {
// Add https:// protocol if not present
const urlWithProtocol = url.startsWith('http') ? url : `https://${url}`;
const domain = new URL(urlWithProtocol).hostname;
// Remove 'www.' prefix if exists
return domain.replace(/^www\./, '');
} catch (error) {
// Return original input if URL parsing fails
return url;
}
};

27
lib/workshop.ts Normal file
View File

@@ -0,0 +1,27 @@
import { WorkShop } from '@/types/workShop';
import fs from 'fs';
import matter from 'gray-matter';
import path from 'path';
// 获取指定语言的所有车间
export async function getWorkShops(locale: string): Promise<WorkShop[]> {
const contentPath = path.join(process.cwd(), `content/workshop/${locale}`);
console.log(contentPath);
const files = fs.readdirSync(contentPath);
const workShops: WorkShop[] = [];
for (const file of files.map((file) => file.replace('.mdx', ''))) {
const contentPath = path.join(process.cwd(), `content/workshop/${locale}/${file}.mdx`);
const fileContent = fs.readFileSync(contentPath, 'utf8');
const { data: frontmatter, content } = matter(fileContent);
workShops.push(frontmatter as WorkShop);
}
return workShops;
}
// 获取特定车间的内容
export async function getWorkShop(locale: string, title: string): Promise<WorkShop> {
const contentPath = path.join(process.cwd(), `content/workshop/${locale}/${title}.mdx`);
const fileContent = fs.readFileSync(contentPath, 'utf8');
const { data: frontmatter, content } = matter(fileContent);
return frontmatter as WorkShop;
}