Initial commit with translated description
This commit is contained in:
140
lib/auth.js
Normal file
140
lib/auth.js
Normal file
@@ -0,0 +1,140 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Robust .env loading
|
||||
const possibleEnvPaths = [
|
||||
path.resolve(process.cwd(), '.env'),
|
||||
path.resolve(__dirname, '../../../.env'),
|
||||
path.resolve(__dirname, '../../../../.env')
|
||||
];
|
||||
|
||||
let envLoaded = false;
|
||||
for (const envPath of possibleEnvPaths) {
|
||||
if (fs.existsSync(envPath)) {
|
||||
try {
|
||||
require('dotenv').config({ path: envPath });
|
||||
envLoaded = true;
|
||||
break;
|
||||
} catch (e) {
|
||||
// Ignore load error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let tokenCache = {
|
||||
token: null,
|
||||
expireTime: 0
|
||||
};
|
||||
|
||||
function loadConfig() {
|
||||
const configPath = path.join(__dirname, '../config.json');
|
||||
let config = {};
|
||||
if (fs.existsSync(configPath)) {
|
||||
try {
|
||||
config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||
} catch (e) {
|
||||
console.error("Failed to parse config.json");
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
app_id: process.env.FEISHU_APP_ID || config.app_id,
|
||||
app_secret: process.env.FEISHU_APP_SECRET || config.app_secret
|
||||
};
|
||||
}
|
||||
|
||||
// Unified Token Cache (Shared with feishu-card and feishu-sticker)
|
||||
const TOKEN_CACHE_FILE = path.resolve(__dirname, '../../../memory/feishu_token.json');
|
||||
|
||||
async function getTenantAccessToken(forceRefresh = false) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
|
||||
// Try to load from disk first
|
||||
if (!forceRefresh && !tokenCache.token && fs.existsSync(TOKEN_CACHE_FILE)) {
|
||||
try {
|
||||
const saved = JSON.parse(fs.readFileSync(TOKEN_CACHE_FILE, 'utf8'));
|
||||
// Handle both 'expire' (standard) and 'expireTime' (legacy)
|
||||
const expiry = saved.expire || saved.expireTime;
|
||||
if (saved.token && expiry > now) {
|
||||
tokenCache.token = saved.token;
|
||||
tokenCache.expireTime = expiry; // Keep internal consistency
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore corrupted cache
|
||||
}
|
||||
}
|
||||
|
||||
// Force Refresh: Delete memory cache and file cache
|
||||
if (forceRefresh) {
|
||||
tokenCache.token = null;
|
||||
tokenCache.expireTime = 0;
|
||||
try { if (fs.existsSync(TOKEN_CACHE_FILE)) fs.unlinkSync(TOKEN_CACHE_FILE); } catch(e) {}
|
||||
}
|
||||
|
||||
if (tokenCache.token && tokenCache.expireTime > now) {
|
||||
return tokenCache.token;
|
||||
}
|
||||
|
||||
const config = loadConfig();
|
||||
if (!config.app_id || !config.app_secret) {
|
||||
throw new Error("Missing app_id or app_secret. Please set FEISHU_APP_ID and FEISHU_APP_SECRET environment variables or create a config.json file.");
|
||||
}
|
||||
|
||||
let lastError;
|
||||
for (let attempt = 1; attempt <= 3; attempt++) {
|
||||
try {
|
||||
const response = await fetch('https://open.feishu.cn/open-apis/auth/v3/tenant_access_token/internal', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
"app_id": config.app_id,
|
||||
"app_secret": config.app_secret
|
||||
}),
|
||||
timeout: 5000 // 5s timeout
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (data.code !== 0) {
|
||||
throw new Error(`Failed to get tenant_access_token: ${data.msg}`);
|
||||
}
|
||||
|
||||
tokenCache.token = data.tenant_access_token;
|
||||
tokenCache.expireTime = now + data.expire - 60; // Refresh 1 minute early
|
||||
|
||||
// Persist to disk (Unified Format)
|
||||
try {
|
||||
const cacheDir = path.dirname(TOKEN_CACHE_FILE);
|
||||
if (!fs.existsSync(cacheDir)) {
|
||||
fs.mkdirSync(cacheDir, { recursive: true });
|
||||
}
|
||||
// Save using 'expire' to match other skills
|
||||
fs.writeFileSync(TOKEN_CACHE_FILE, JSON.stringify({
|
||||
token: tokenCache.token,
|
||||
expire: tokenCache.expireTime
|
||||
}, null, 2));
|
||||
} catch (e) {
|
||||
console.error("Failed to save token cache:", e.message);
|
||||
}
|
||||
|
||||
return tokenCache.token;
|
||||
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (attempt < 3) {
|
||||
const delay = 1000 * Math.pow(2, attempt - 1);
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError || new Error("Failed to retrieve access token after retries");
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTenantAccessToken
|
||||
};
|
||||
74
lib/bitable.js
Normal file
74
lib/bitable.js
Normal file
@@ -0,0 +1,74 @@
|
||||
|
||||
async function fetchBitableContent(token, accessToken) {
|
||||
// 1. List tables
|
||||
const tablesUrl = `https://open.feishu.cn/open-apis/bitable/v1/apps/${token}/tables`;
|
||||
const tablesRes = await fetch(tablesUrl, {
|
||||
headers: { 'Authorization': `Bearer ${accessToken}` }
|
||||
});
|
||||
const tablesData = await tablesRes.json();
|
||||
|
||||
if (tablesData.code !== 0) {
|
||||
return { title: "Bitable", content: `Error fetching bitable tables: ${tablesData.msg}` };
|
||||
}
|
||||
|
||||
const tables = tablesData.data.items;
|
||||
if (!tables || tables.length === 0) {
|
||||
return { title: "Bitable", content: "Empty Bitable." };
|
||||
}
|
||||
|
||||
let fullContent = [];
|
||||
|
||||
// 2. Fetch records
|
||||
// Prioritize Ignacia's table (tblJgZHOmPybgX60) if present
|
||||
const targetTableId = "tblJgZHOmPybgX60";
|
||||
const targetTable = tables.find(t => t.table_id === targetTableId);
|
||||
// If target found, only fetch it. Otherwise fetch first 3 to be safe/fast.
|
||||
const tablesToFetch = targetTable ? [targetTable] : tables.slice(0, 3);
|
||||
|
||||
for (const table of tablesToFetch) {
|
||||
const tableId = table.table_id;
|
||||
const tableName = table.name;
|
||||
|
||||
// List records
|
||||
const recordsUrl = `https://open.feishu.cn/open-apis/bitable/v1/apps/${token}/tables/${tableId}/records?page_size=20`;
|
||||
const recRes = await fetch(recordsUrl, {
|
||||
headers: { 'Authorization': `Bearer ${accessToken}` }
|
||||
});
|
||||
const recData = await recRes.json();
|
||||
|
||||
fullContent.push(`## Table: ${tableName}`);
|
||||
|
||||
if (recData.code === 0 && recData.data && recData.data.items) {
|
||||
const records = recData.data.items;
|
||||
// Convert records (objects with fields) to table
|
||||
// We need to know all possible fields to make a header
|
||||
const allFields = new Set();
|
||||
records.forEach(r => Object.keys(r.fields).forEach(k => allFields.add(k)));
|
||||
const headers = Array.from(allFields);
|
||||
|
||||
let md = "| " + headers.join(" | ") + " |\n";
|
||||
md += "| " + headers.map(() => "---").join(" | ") + " |\n";
|
||||
|
||||
for (const rec of records) {
|
||||
md += "| " + headers.map(h => {
|
||||
const val = rec.fields[h];
|
||||
if (typeof val === 'object') return JSON.stringify(val);
|
||||
return val || "";
|
||||
}).join(" | ") + " |\n";
|
||||
}
|
||||
|
||||
fullContent.push(md);
|
||||
} else {
|
||||
fullContent.push(`(Could not fetch records: ${recData.msg})`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title: "Feishu Bitable",
|
||||
content: fullContent.join("\n\n")
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchBitableContent
|
||||
};
|
||||
192
lib/docx.js
Normal file
192
lib/docx.js
Normal file
@@ -0,0 +1,192 @@
|
||||
|
||||
async function fetchDocxContent(documentId, accessToken) {
|
||||
// 1. Get document info for title
|
||||
const infoUrl = `https://open.feishu.cn/open-apis/docx/v1/documents/${documentId}`;
|
||||
const infoRes = await fetch(infoUrl, {
|
||||
headers: { 'Authorization': `Bearer ${accessToken}` }
|
||||
});
|
||||
const infoData = await infoRes.json();
|
||||
let title = "Untitled Docx";
|
||||
if (infoData.code === 0 && infoData.data && infoData.data.document) {
|
||||
title = infoData.data.document.title;
|
||||
}
|
||||
|
||||
// 2. Fetch all blocks
|
||||
// List blocks API: GET https://open.feishu.cn/open-apis/docx/v1/documents/{document_id}/blocks
|
||||
// Use pagination if necessary, fetching all for now (basic implementation)
|
||||
let blocks = [];
|
||||
let pageToken = '';
|
||||
let hasMore = true;
|
||||
|
||||
while (hasMore) {
|
||||
const url = `https://open.feishu.cn/open-apis/docx/v1/documents/${documentId}/blocks?page_size=500${pageToken ? `&page_token=${pageToken}` : ''}`;
|
||||
const response = await fetch(url, {
|
||||
headers: { 'Authorization': `Bearer ${accessToken}` }
|
||||
});
|
||||
const data = await response.json();
|
||||
|
||||
if (data.code !== 0) {
|
||||
throw new Error(`Failed to fetch docx blocks: ${data.msg}`);
|
||||
}
|
||||
|
||||
if (data.data && data.data.items) {
|
||||
blocks = blocks.concat(data.data.items);
|
||||
}
|
||||
|
||||
hasMore = data.data.has_more;
|
||||
pageToken = data.data.page_token;
|
||||
}
|
||||
|
||||
const markdown = convertBlocksToMarkdown(blocks);
|
||||
return { title, content: markdown };
|
||||
}
|
||||
|
||||
function convertBlocksToMarkdown(blocks) {
|
||||
if (!blocks || blocks.length === 0) return "";
|
||||
|
||||
let md = [];
|
||||
|
||||
for (const block of blocks) {
|
||||
const type = block.block_type;
|
||||
|
||||
switch (type) {
|
||||
case 1: // page
|
||||
break;
|
||||
case 2: // text (paragraph)
|
||||
md.push(parseText(block.text));
|
||||
break;
|
||||
case 3: // heading1
|
||||
md.push(`# ${parseText(block.heading1)}`);
|
||||
break;
|
||||
case 4: // heading2
|
||||
md.push(`## ${parseText(block.heading2)}`);
|
||||
break;
|
||||
case 5: // heading3
|
||||
md.push(`### ${parseText(block.heading3)}`);
|
||||
break;
|
||||
case 6: // heading4
|
||||
md.push(`#### ${parseText(block.heading4)}`);
|
||||
break;
|
||||
case 7: // heading5
|
||||
md.push(`##### ${parseText(block.heading5)}`);
|
||||
break;
|
||||
case 8: // heading6
|
||||
md.push(`###### ${parseText(block.heading6)}`);
|
||||
break;
|
||||
case 9: // heading7
|
||||
md.push(`####### ${parseText(block.heading7)}`);
|
||||
break;
|
||||
case 10: // heading8
|
||||
md.push(`######## ${parseText(block.heading8)}`);
|
||||
break;
|
||||
case 11: // heading9
|
||||
md.push(`######### ${parseText(block.heading9)}`);
|
||||
break;
|
||||
case 12: // bullet
|
||||
md.push(`- ${parseText(block.bullet)}`);
|
||||
break;
|
||||
case 13: // ordered
|
||||
md.push(`1. ${parseText(block.ordered)}`);
|
||||
break;
|
||||
case 14: // code
|
||||
md.push('```' + (block.code?.style?.language === 1 ? '' : '') + '\n' + parseText(block.code) + '\n```');
|
||||
break;
|
||||
case 15: // quote
|
||||
md.push(`> ${parseText(block.quote)}`);
|
||||
break;
|
||||
case 27: // image
|
||||
md.push(``);
|
||||
break;
|
||||
default:
|
||||
// Ignore unknown blocks for now
|
||||
console.error(`Skipped block type: ${type}`, JSON.stringify(block).substring(0, 200));
|
||||
md.push(`[UNSUPPORTED BLOCK TYPE: ${type}]`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return md.join('\n\n');
|
||||
}
|
||||
|
||||
async function appendDocxContent(documentId, content, accessToken) {
|
||||
// 1. Convert markdown content to Feishu blocks
|
||||
const blocks = convertMarkdownToBlocks(content);
|
||||
|
||||
// 2. Append to the end of the document (root block)
|
||||
// POST https://open.feishu.cn/open-apis/docx/v1/documents/{document_id}/blocks/{block_id}/children
|
||||
// Use documentId as block_id to append to root
|
||||
const url = `https://open.feishu.cn/open-apis/docx/v1/documents/${documentId}/blocks/${documentId}/children`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json; charset=utf-8'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
children: blocks,
|
||||
index: -1 // Append to end
|
||||
})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
if (data.code !== 0) {
|
||||
throw new Error(`Failed to append to docx: ${data.msg}`);
|
||||
}
|
||||
|
||||
return { success: true, appended_blocks: data.data.children };
|
||||
}
|
||||
|
||||
function convertMarkdownToBlocks(markdown) {
|
||||
// Simple parser: split by newlines, treat # as headers, others as text
|
||||
// For robustness, this should be a real parser. Here we implement a basic one.
|
||||
const lines = markdown.split('\n');
|
||||
const blocks = [];
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) continue;
|
||||
|
||||
if (trimmed.startsWith('# ')) {
|
||||
blocks.push({ block_type: 3, heading1: { elements: [{ text_run: { content: trimmed.substring(2) } }] } });
|
||||
} else if (trimmed.startsWith('## ')) {
|
||||
blocks.push({ block_type: 4, heading2: { elements: [{ text_run: { content: trimmed.substring(3) } }] } });
|
||||
} else if (trimmed.startsWith('### ')) {
|
||||
blocks.push({ block_type: 5, heading3: { elements: [{ text_run: { content: trimmed.substring(4) } }] } });
|
||||
} else if (trimmed.startsWith('- ')) {
|
||||
blocks.push({ block_type: 12, bullet: { elements: [{ text_run: { content: trimmed.substring(2) } }] } });
|
||||
} else {
|
||||
// Default to text (paragraph)
|
||||
blocks.push({ block_type: 2, text: { elements: [{ text_run: { content: line } }] } });
|
||||
}
|
||||
}
|
||||
return blocks;
|
||||
}
|
||||
|
||||
function parseText(blockData) {
|
||||
if (!blockData || !blockData.elements) return "";
|
||||
|
||||
return blockData.elements.map(el => {
|
||||
if (el.text_run) {
|
||||
let text = el.text_run.content;
|
||||
const style = el.text_run.text_element_style;
|
||||
if (style) {
|
||||
if (style.bold) text = `**${text}**`;
|
||||
if (style.italic) text = `*${text}*`;
|
||||
if (style.strikethrough) text = `~~${text}~~`;
|
||||
if (style.inline_code) text = `\`${text}\``;
|
||||
if (style.link) text = `[${text}](${style.link.url})`;
|
||||
}
|
||||
return text;
|
||||
}
|
||||
if (el.mention_doc) {
|
||||
return `[Doc: ${el.mention_doc.token}]`;
|
||||
}
|
||||
return "";
|
||||
}).join("");
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchDocxContent,
|
||||
appendDocxContent
|
||||
};
|
||||
130
lib/sheet.js
Normal file
130
lib/sheet.js
Normal file
@@ -0,0 +1,130 @@
|
||||
|
||||
async function fetchSheetContent(token, accessToken) {
|
||||
// 1. Get metainfo to find sheetIds
|
||||
const metaUrl = `https://open.feishu.cn/open-apis/sheets/v3/spreadsheets/${token}/sheets/query`;
|
||||
const metaRes = await fetch(metaUrl, {
|
||||
headers: { 'Authorization': `Bearer ${accessToken}` }
|
||||
});
|
||||
const metaData = await metaRes.json();
|
||||
|
||||
if (metaData.code !== 0) {
|
||||
// Fallback or error
|
||||
return { title: "Sheet", content: `Error fetching sheet meta: ${metaData.msg}` };
|
||||
}
|
||||
|
||||
const sheets = metaData.data.sheets;
|
||||
if (!sheets || sheets.length === 0) {
|
||||
return { title: "Sheet", content: "Empty spreadsheet." };
|
||||
}
|
||||
|
||||
let fullContent = [];
|
||||
|
||||
// Sort sheets by index just in case
|
||||
sheets.sort((a, b) => a.index - b.index);
|
||||
|
||||
// 2. Fetch content for up to 3 sheets to balance context vs info
|
||||
// Skip hidden sheets
|
||||
const visibleSheets = sheets.filter(s => !s.hidden).slice(0, 3);
|
||||
|
||||
for (const sheet of visibleSheets) {
|
||||
const sheetId = sheet.sheet_id;
|
||||
const title = sheet.title;
|
||||
|
||||
// Determine Range based on grid properties
|
||||
// Default safe limits: Max 20 columns (T), Max 100 rows
|
||||
// This prevents massive JSON payloads
|
||||
let maxRows = 100;
|
||||
let maxCols = 20;
|
||||
|
||||
if (sheet.grid_properties) {
|
||||
maxRows = Math.min(sheet.grid_properties.row_count, 100);
|
||||
maxCols = Math.min(sheet.grid_properties.column_count, 20);
|
||||
}
|
||||
|
||||
// Avoid fetching empty grids (though unlikely for valid sheets)
|
||||
if (maxRows === 0 || maxCols === 0) {
|
||||
fullContent.push(`## Sheet: ${title} (Empty)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const lastColName = indexToColName(maxCols); // 1-based index to A, B, ... T
|
||||
const range = `${sheetId}!A1:${lastColName}${maxRows}`;
|
||||
|
||||
const valUrl = `https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/${token}/values/${range}`;
|
||||
|
||||
const valRes = await fetch(valUrl, {
|
||||
headers: { 'Authorization': `Bearer ${accessToken}` }
|
||||
});
|
||||
const valData = await valRes.json();
|
||||
|
||||
fullContent.push(`## Sheet: ${title}`);
|
||||
|
||||
if (valData.code === 0 && valData.data && valData.data.valueRange) {
|
||||
const rows = valData.data.valueRange.values;
|
||||
fullContent.push(markdownTable(rows));
|
||||
|
||||
if (sheet.grid_properties && sheet.grid_properties.row_count > maxRows) {
|
||||
fullContent.push(`*(Truncated: showing first ${maxRows} of ${sheet.grid_properties.row_count} rows)*`);
|
||||
}
|
||||
} else {
|
||||
fullContent.push(`(Could not fetch values: ${valData.msg})`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title: "Feishu Sheet",
|
||||
content: fullContent.join("\n\n")
|
||||
};
|
||||
}
|
||||
|
||||
function indexToColName(num) {
|
||||
let ret = '';
|
||||
while (num > 0) {
|
||||
num--;
|
||||
ret = String.fromCharCode(65 + (num % 26)) + ret;
|
||||
num = Math.floor(num / 26);
|
||||
}
|
||||
return ret || 'A';
|
||||
}
|
||||
|
||||
function markdownTable(rows) {
|
||||
if (!rows || rows.length === 0) return "";
|
||||
|
||||
// Normalize row length
|
||||
const maxLength = Math.max(...rows.map(r => r ? r.length : 0));
|
||||
|
||||
if (maxLength === 0) return "(Empty Table)";
|
||||
|
||||
// Ensure all rows are arrays and have strings
|
||||
const cleanRows = rows.map(row => {
|
||||
if (!Array.isArray(row)) return Array(maxLength).fill("");
|
||||
return row.map(cell => {
|
||||
if (cell === null || cell === undefined) return "";
|
||||
if (typeof cell === 'object') return JSON.stringify(cell); // Handle rich text segments roughly
|
||||
return String(cell).replace(/\n/g, "<br>"); // Keep single line
|
||||
});
|
||||
});
|
||||
|
||||
const header = cleanRows[0];
|
||||
const body = cleanRows.slice(1);
|
||||
|
||||
// Handle case where header might be shorter than max length
|
||||
const paddedHeader = [...header];
|
||||
while(paddedHeader.length < maxLength) paddedHeader.push("");
|
||||
|
||||
let md = "| " + paddedHeader.join(" | ") + " |\n";
|
||||
md += "| " + paddedHeader.map(() => "---").join(" | ") + " |\n";
|
||||
|
||||
for (const row of body) {
|
||||
// Pad row if needed
|
||||
const padded = [...row];
|
||||
while(padded.length < maxLength) padded.push("");
|
||||
md += "| " + padded.join(" | ") + " |\n";
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchSheetContent
|
||||
};
|
||||
34
lib/wiki.js
Normal file
34
lib/wiki.js
Normal file
@@ -0,0 +1,34 @@
|
||||
const { getTenantAccessToken } = require('./auth');
|
||||
|
||||
async function resolveWiki(token, accessToken) {
|
||||
// Try to resolve via get_node API first to get obj_token and obj_type
|
||||
// API: GET https://open.feishu.cn/open-apis/wiki/v2/spaces/get_node?token={token}
|
||||
|
||||
const url = `https://open.feishu.cn/open-apis/wiki/v2/spaces/get_node?token=${token}`;
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (data.code === 0 && data.data && data.data.node) {
|
||||
return {
|
||||
obj_token: data.data.node.obj_token,
|
||||
obj_type: data.data.node.obj_type, // 'docx', 'doc', 'sheet', 'bitable'
|
||||
title: data.data.node.title
|
||||
};
|
||||
}
|
||||
|
||||
// Handle specific errors if needed (e.g., node not found)
|
||||
if (data.code !== 0) {
|
||||
throw new Error(`Wiki resolution failed: ${data.msg} (Code: ${data.code})`);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
resolveWiki
|
||||
};
|
||||
Reference in New Issue
Block a user