Compare commits
13 Commits
488eee3aba
...
f9afbd017c
Author | SHA1 | Date |
---|---|---|
adueck | f9afbd017c | |
adueck | a0704e7808 | |
adueck | 2efa3086f2 | |
adueck | 1c11bf9cf2 | |
adueck | 9be5321d48 | |
adueck | a3c5d1b588 | |
adueck | c13c1d0168 | |
adueck | 0feaec0b62 | |
adueck | 04a0505d7f | |
adueck | e5bbcdf567 | |
adueck | 720868ebf0 | |
adueck | f128ee52f0 | |
adueck | 17041c1719 |
|
@ -1,34 +0,0 @@
|
||||||
name: Deploy Functions
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
paths:
|
|
||||||
- "functions/**"
|
|
||||||
- ".github/workflows/deploy-functions.yml"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy-functions:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
LINGDOCS_NPM_TOKEN: ${{ secrets.LINGDOCS_NPM_TOKEN }}
|
|
||||||
FIREBASE_TOKEN: ${{ secrets.FIREBASE_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: "npm"
|
|
||||||
- run: npm install -g firebase-tools
|
|
||||||
- run: |
|
|
||||||
cp .npmrc functions
|
|
||||||
cd website
|
|
||||||
npm install
|
|
||||||
cd ..
|
|
||||||
cd functions
|
|
||||||
npm install
|
|
||||||
- name: deploy functions and hosting routes
|
|
||||||
run: firebase deploy -f --token ${FIREBASE_TOKEN}
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
name: Deploy Hono
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Deploy
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Deploy
|
||||||
|
uses: cloudflare/wrangler-action@v3
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
workingDirectory: "new-functions"
|
|
@ -1,43 +0,0 @@
|
||||||
name: Functions CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
paths:
|
|
||||||
- "functions/**"
|
|
||||||
- ".github/workflows/functions-ci.yml"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-serve-functions:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
LINGDOCS_NPM_TOKEN: ${{ secrets.LINGDOCS_NPM_TOKEN }}
|
|
||||||
FIREBASE_TOKEN: ${{ secrets.FIREBASE_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: "npm"
|
|
||||||
- run: npm install -g firebase-tools
|
|
||||||
- name: build functions
|
|
||||||
run: |
|
|
||||||
cp .npmrc functions
|
|
||||||
cd website
|
|
||||||
npm install
|
|
||||||
cd ..
|
|
||||||
cd functions
|
|
||||||
npm install
|
|
||||||
npm run build
|
|
||||||
- name: start up emulator once
|
|
||||||
run: |
|
|
||||||
cd functions
|
|
||||||
firebase functions:config:get --token ${FIREBASE_TOKEN} > .runtimeconfig.json
|
|
||||||
echo '#!/bin/bash' > empty.sh
|
|
||||||
chmod +x empty.sh
|
|
||||||
firebase emulators:exec ./empty.sh --only functions --token ${FIREBASE_TOKEN}
|
|
||||||
rm .runtimeconfig.json
|
|
||||||
rm empty.sh
|
|
File diff suppressed because it is too large
Load Diff
|
@ -20,9 +20,9 @@
|
||||||
"@types/google-spreadsheet": "^3.0.2",
|
"@types/google-spreadsheet": "^3.0.2",
|
||||||
"@types/react": "^18.0.21",
|
"@types/react": "^18.0.21",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"firebase-admin": "^9.2.0",
|
"firebase-admin": "^13.0.1",
|
||||||
"firebase-functions": "^3.24.1",
|
"firebase-functions": "^6.1.1",
|
||||||
"google-spreadsheet": "^3.1.15",
|
"googleapis": "^144.0.0",
|
||||||
"nano": "^9.0.3",
|
"nano": "^9.0.3",
|
||||||
"node-fetch": "^2.6.1",
|
"node-fetch": "^2.6.1",
|
||||||
"react": "^17.0.1",
|
"react": "^17.0.1",
|
||||||
|
|
|
@ -1,19 +1,21 @@
|
||||||
import * as functions from "firebase-functions";
|
import * as functions from "firebase-functions/v2";
|
||||||
import * as FT from "../../website/src/types/functions-types";
|
import * as FT from "../../website/src/types/functions-types";
|
||||||
import { receiveSubmissions } from "./submissions";
|
import { receiveSubmissions } from "./submissions";
|
||||||
import lingdocsAuth from "./middleware/lingdocs-auth";
|
import lingdocsAuth from "./middleware/lingdocs-auth";
|
||||||
import publish from "./publish";
|
import publish from "./publish";
|
||||||
|
|
||||||
export const publishDictionary = functions
|
const couchdbUrl = functions.params.defineString("ABC");
|
||||||
.runWith({
|
console.log({ couchdb: couchdbUrl.value() });
|
||||||
|
|
||||||
|
export const publishDictionary = functions.https.onRequest(
|
||||||
|
{
|
||||||
timeoutSeconds: 525,
|
timeoutSeconds: 525,
|
||||||
memory: "2GB",
|
memory: "2GiB",
|
||||||
})
|
},
|
||||||
.https.onRequest(
|
|
||||||
lingdocsAuth(
|
lingdocsAuth(
|
||||||
async (
|
async (
|
||||||
req,
|
req,
|
||||||
res: functions.Response<FT.PublishDictionaryResponse | FT.FunctionError>
|
res // : functions.Response<FT.PublishDictionaryResponse | FT.FunctionError>
|
||||||
) => {
|
) => {
|
||||||
if (req.user.level !== "editor") {
|
if (req.user.level !== "editor") {
|
||||||
res.status(403).send({ ok: false, error: "403 forbidden" });
|
res.status(403).send({ ok: false, error: "403 forbidden" });
|
||||||
|
@ -28,18 +30,17 @@ export const publishDictionary = functions
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
export const submissions = functions
|
export const submissions = functions.https.onRequest(
|
||||||
.runWith({
|
{
|
||||||
timeoutSeconds: 60,
|
timeoutSeconds: 60,
|
||||||
memory: "1GB",
|
memory: "1GiB",
|
||||||
})
|
},
|
||||||
.https.onRequest(
|
|
||||||
lingdocsAuth(
|
lingdocsAuth(
|
||||||
async (
|
async (
|
||||||
req,
|
req,
|
||||||
res: functions.Response<FT.SubmissionsResponse | FT.FunctionError>
|
res // : functions.Response<FT.SubmissionsResponse | FT.FunctionError>
|
||||||
) => {
|
) => {
|
||||||
if (!Array.isArray(req.body)) {
|
if (!Array.isArray(req.body)) {
|
||||||
res.status(400).send({
|
res.status(400).send({
|
||||||
|
@ -59,4 +60,4 @@ export const submissions = functions
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,43 +1,63 @@
|
||||||
import cors from "cors";
|
import cors from "cors";
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
import type { https, Response } from "firebase-functions";
|
// unfortunately have to comment out all this typing because the new version
|
||||||
import * as FT from "../../../website/src/types/functions-types";
|
// of firebase-functions doesn't include it?
|
||||||
import type { LingdocsUser } from "../../../website/src/types/account-types";
|
// import type { https, Response } from "firebase-functions";
|
||||||
|
// import * as FT from "../../../website/src/types/functions-types";
|
||||||
|
// import type { LingdocsUser } from "../../../website/src/types/account-types";
|
||||||
|
|
||||||
const useCors = cors({ credentials: true, origin: /\.lingdocs\.com$/ });
|
const useCors = cors({ credentials: true, origin: /\.lingdocs\.com$/ });
|
||||||
|
|
||||||
interface ReqWUser extends https.Request {
|
// interface ReqWUser extends https.Request {
|
||||||
user: LingdocsUser;
|
// user: LingdocsUser;
|
||||||
}
|
// }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* creates a handler to pass to a firebase https.onRequest function
|
* creates a handler to pass to a firebase https.onRequest function
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
export default function makeHandler(toRun: (req: ReqWUser, res: Response<FT.FunctionResponse>) => any | Promise<any>) {
|
export default function makeHandler(
|
||||||
return function(reqPlain: https.Request, resPlain: Response<any>) {
|
toRun: (
|
||||||
|
req: any, //ReqWUser,
|
||||||
|
res: any /*Response<FT.FunctionResponse> */
|
||||||
|
) => any | Promise<any>
|
||||||
|
) {
|
||||||
|
return function (
|
||||||
|
reqPlain: any /* https.Request */,
|
||||||
|
resPlain: any /* Response<any> */
|
||||||
|
) {
|
||||||
useCors(reqPlain, resPlain, async () => {
|
useCors(reqPlain, resPlain, async () => {
|
||||||
const { req, res } = await authorize(reqPlain, resPlain);
|
const { req, res } = await authorize(reqPlain, resPlain);
|
||||||
if (!req) {
|
if (!req) {
|
||||||
res.status(401).send({ ok: false, error: "unauthorized" });
|
res.status(401).send({ ok: false, error: "unauthorized" });
|
||||||
return;
|
return;
|
||||||
};
|
}
|
||||||
toRun(req, res);
|
toRun(req, res);
|
||||||
return;
|
return;
|
||||||
});
|
});
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function authorize(req: https.Request, res: Response<any>): Promise<{ req: ReqWUser | null, res: Response<FT.FunctionResponse> }> {
|
async function authorize(
|
||||||
const { headers: { cookie }} = req;
|
req: any /* https.Request*/,
|
||||||
|
res: any /*Response<any>*/
|
||||||
|
): Promise<{
|
||||||
|
req: any; // ReqWUser | null;
|
||||||
|
res: any /*Response<FT.FunctionResponse>*/;
|
||||||
|
}> {
|
||||||
|
const {
|
||||||
|
headers: { cookie },
|
||||||
|
} = req;
|
||||||
if (!cookie) {
|
if (!cookie) {
|
||||||
return { req: null, res };
|
return { req: null, res };
|
||||||
}
|
}
|
||||||
const r = await fetch("https://account.lingdocs.com/api/user", { headers: { cookie }});
|
const r = await fetch("https://account.lingdocs.com/api/user", {
|
||||||
|
headers: { cookie },
|
||||||
|
});
|
||||||
const { ok, user } = await r.json();
|
const { ok, user } = await r.json();
|
||||||
if (ok === true && user) {
|
if (ok === true && user) {
|
||||||
req.user = user;
|
req.user = user;
|
||||||
return { req: req as ReqWUser, res };
|
return { req: req /* as ReqWUser*/, res };
|
||||||
}
|
}
|
||||||
return { req: null, res };
|
return { req: null, res };
|
||||||
}
|
}
|
|
@ -1,27 +1,30 @@
|
||||||
import Nano from "nano";
|
import Nano from "nano";
|
||||||
import { GoogleSpreadsheet } from "google-spreadsheet";
|
|
||||||
import {
|
|
||||||
dictionaryEntryTextFields,
|
|
||||||
dictionaryEntryBooleanFields,
|
|
||||||
dictionaryEntryNumberFields,
|
|
||||||
standardizeEntry,
|
|
||||||
} from "@lingdocs/inflect";
|
|
||||||
import * as FT from "../../website/src/types/functions-types";
|
import * as FT from "../../website/src/types/functions-types";
|
||||||
import * as functions from "firebase-functions";
|
// import * as functions from "firebase-functions/v2";
|
||||||
|
// @ts-ignore
|
||||||
|
import { defineString } from "firebase-functions/params";
|
||||||
|
|
||||||
const fieldsForEdit = [
|
// Define some parameters
|
||||||
...dictionaryEntryTextFields,
|
// // import {
|
||||||
...dictionaryEntryNumberFields,
|
// // addDictionaryEntries,
|
||||||
...dictionaryEntryBooleanFields,
|
// // deleteEntry,
|
||||||
].filter(field => !(["ts", "i"].includes(field)));
|
// // updateDictionaryEntries,
|
||||||
|
// // } from "./tools/spreadsheet-tools";
|
||||||
|
|
||||||
|
const couchdbUrl = defineString("ABC");
|
||||||
|
console.log({ couchdb: couchdbUrl });
|
||||||
|
|
||||||
const nano = Nano(functions.config().couchdb.couchdb_url);
|
const nano = Nano("");
|
||||||
const reviewTasksDb = nano.db.use("review-tasks");
|
const reviewTasksDb = nano.db.use("review-tasks");
|
||||||
|
|
||||||
export async function receiveSubmissions(e: FT.SubmissionsRequest, editor: boolean): Promise<FT.SubmissionsResponse> {
|
export async function receiveSubmissions(
|
||||||
|
e: FT.SubmissionsRequest,
|
||||||
|
editor: boolean
|
||||||
|
): Promise<FT.SubmissionsResponse> {
|
||||||
const { edits, reviewTasks } = sortSubmissions(e);
|
const { edits, reviewTasks } = sortSubmissions(e);
|
||||||
|
|
||||||
|
// TODO: guard against race conditions update!!
|
||||||
|
|
||||||
// TODO: BETTER PROMISE MULTI-TASKING
|
// TODO: BETTER PROMISE MULTI-TASKING
|
||||||
// 1. Add review tasks to the couchdb
|
// 1. Add review tasks to the couchdb
|
||||||
// 2. Edit dictionary entries
|
// 2. Edit dictionary entries
|
||||||
|
@ -35,73 +38,13 @@ export async function receiveSubmissions(e: FT.SubmissionsRequest, editor: boole
|
||||||
await reviewTasksDb.bulk({ docs });
|
await reviewTasksDb.bulk({ docs });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (editor && edits.length) {
|
if (edits.length && editor) {
|
||||||
|
// const { newEntries, entryEdits, entryDeletions } = sortEdits(edits);
|
||||||
const doc = new GoogleSpreadsheet(
|
// await updateDictionaryEntries(entryEdits);
|
||||||
functions.config().sheet.id,
|
// for (const ed of entryDeletions) {
|
||||||
);
|
// await deleteEntry(ed);
|
||||||
await doc.useServiceAccountAuth({
|
// }
|
||||||
client_email: functions.config().serviceacct.email,
|
// await addDictionaryEntries(newEntries);
|
||||||
private_key: functions.config().serviceacct.key,
|
|
||||||
});
|
|
||||||
await doc.loadInfo();
|
|
||||||
const dictionarySheet = doc.sheetsByIndex[0];
|
|
||||||
|
|
||||||
const {
|
|
||||||
newEntries,
|
|
||||||
entryEdits,
|
|
||||||
entryDeletions,
|
|
||||||
} = sortEdits(edits);
|
|
||||||
|
|
||||||
if (entryEdits.length || entryDeletions.length) {
|
|
||||||
const dictRows = await dictionarySheet.getRows();
|
|
||||||
entryEdits.forEach(async ({entry}) => {
|
|
||||||
const i = dictRows.findIndex((r: any) => parseInt(r.ts) === entry.ts);
|
|
||||||
if (i === -1) {
|
|
||||||
console.error("Tried editing an entry with a ts that doesn't exist");
|
|
||||||
} else {
|
|
||||||
fieldsForEdit.forEach((field) => {
|
|
||||||
const toWrite = entry[field];
|
|
||||||
const existing = dictRows[i][field];
|
|
||||||
if (toWrite) {
|
|
||||||
// something to write
|
|
||||||
dictRows[i][field] = toWrite;
|
|
||||||
} else if (existing && !toWrite) {
|
|
||||||
// something to erase
|
|
||||||
dictRows[i][field] = "";
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await dictRows[i].save();
|
|
||||||
} catch (error) {
|
|
||||||
console.error("error saving edit to entry " + entry.ts);
|
|
||||||
console.error(error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
entryDeletions.forEach(async ({ ts }) => {
|
|
||||||
const i = dictRows.findIndex((r: any) => parseInt(r.ts) === ts);
|
|
||||||
if (i === -1) {
|
|
||||||
console.error("Tried deleting an entry with ats that doesn't exist")
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await dictRows[i].delete();
|
|
||||||
} catch (error) {
|
|
||||||
console.error("error deleting error " + ts);
|
|
||||||
console.error(error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (newEntries.length) {
|
|
||||||
newEntries.forEach((n) => {
|
|
||||||
const entry = { ...standardizeEntry(n.entry) };
|
|
||||||
// @ts-ignore
|
|
||||||
delete entry.i; // i not used in dictionary spreadsheet; added while building it
|
|
||||||
// @ts-ignore
|
|
||||||
dictionarySheet.addRow(entry).catch(console.error);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -112,11 +55,13 @@ export async function receiveSubmissions(e: FT.SubmissionsRequest, editor: boole
|
||||||
}
|
}
|
||||||
|
|
||||||
type SortedSubmissions = {
|
type SortedSubmissions = {
|
||||||
edits: FT.Edit[],
|
edits: FT.Edit[];
|
||||||
reviewTasks: FT.ReviewTask[],
|
reviewTasks: FT.ReviewTask[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export function sortSubmissions(submissions: FT.Submission[]): SortedSubmissions {
|
export function sortSubmissions(
|
||||||
|
submissions: FT.Submission[]
|
||||||
|
): SortedSubmissions {
|
||||||
const base: SortedSubmissions = {
|
const base: SortedSubmissions = {
|
||||||
edits: [],
|
edits: [],
|
||||||
reviewTasks: [],
|
reviewTasks: [],
|
||||||
|
@ -124,35 +69,48 @@ export function sortSubmissions(submissions: FT.Submission[]): SortedSubmissions
|
||||||
return submissions.reduce((acc, s): SortedSubmissions => {
|
return submissions.reduce((acc, s): SortedSubmissions => {
|
||||||
return {
|
return {
|
||||||
...acc,
|
...acc,
|
||||||
...(s.type === "edit suggestion" || s.type === "issue" || s.type === "entry suggestion") ? {
|
...(s.type === "edit suggestion" ||
|
||||||
|
s.type === "issue" ||
|
||||||
|
s.type === "entry suggestion"
|
||||||
|
? {
|
||||||
reviewTasks: [...acc.reviewTasks, s],
|
reviewTasks: [...acc.reviewTasks, s],
|
||||||
} : {
|
}
|
||||||
|
: {
|
||||||
edits: [...acc.edits, s],
|
edits: [...acc.edits, s],
|
||||||
},
|
}),
|
||||||
};
|
};
|
||||||
}, base);
|
}, base);
|
||||||
}
|
}
|
||||||
|
|
||||||
type SortedEdits = {
|
type SortedEdits = {
|
||||||
entryEdits: FT.EntryEdit[],
|
entryEdits: FT.EntryEdit[];
|
||||||
newEntries: FT.NewEntry[],
|
newEntries: FT.NewEntry[];
|
||||||
entryDeletions: FT.EntryDeletion[],
|
entryDeletions: FT.EntryDeletion[];
|
||||||
}
|
};
|
||||||
|
|
||||||
export function sortEdits(edits: FT.Edit[]): SortedEdits {
|
export function sortEdits(edits: FT.Edit[]): SortedEdits {
|
||||||
const base: SortedEdits = {
|
const base: SortedEdits = {
|
||||||
entryEdits: [],
|
entryEdits: [],
|
||||||
newEntries: [],
|
newEntries: [],
|
||||||
entryDeletions: [],
|
entryDeletions: [],
|
||||||
}
|
};
|
||||||
return edits.reduce((acc, edit): SortedEdits => ({
|
return edits.reduce(
|
||||||
|
(acc, edit): SortedEdits => ({
|
||||||
...acc,
|
...acc,
|
||||||
...edit.type === "entry edit" ? {
|
...(edit.type === "entry edit"
|
||||||
|
? {
|
||||||
entryEdits: [...acc.entryEdits, edit],
|
entryEdits: [...acc.entryEdits, edit],
|
||||||
} : edit.type === "new entry" ? {
|
}
|
||||||
|
: edit.type === "new entry"
|
||||||
|
? {
|
||||||
newEntries: [...acc.newEntries, edit],
|
newEntries: [...acc.newEntries, edit],
|
||||||
} : edit.type === "entry deletion" ? {
|
}
|
||||||
|
: edit.type === "entry deletion"
|
||||||
|
? {
|
||||||
entryDeletions: [...acc.entryDeletions, edit],
|
entryDeletions: [...acc.entryDeletions, edit],
|
||||||
} : {},
|
}
|
||||||
}), base);
|
: {}),
|
||||||
|
}),
|
||||||
|
base
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,211 @@
|
||||||
|
import { google } from "googleapis";
|
||||||
|
import { Types as T } from "@lingdocs/inflect";
|
||||||
|
import * as FT from "../../../website/src/types/functions-types";
|
||||||
|
import { standardizeEntry } from "@lingdocs/inflect";
|
||||||
|
import {
|
||||||
|
dictionaryEntryBooleanFields,
|
||||||
|
dictionaryEntryNumberFields,
|
||||||
|
dictionaryEntryTextFields,
|
||||||
|
} from "@lingdocs/inflect";
|
||||||
|
import * as functions from "firebase-functions";
|
||||||
|
|
||||||
|
const spreadsheetId = functions.config().sheet.id;
|
||||||
|
const sheetId = 51288491;
|
||||||
|
const validFields = [
|
||||||
|
...dictionaryEntryTextFields,
|
||||||
|
...dictionaryEntryBooleanFields,
|
||||||
|
...dictionaryEntryNumberFields,
|
||||||
|
];
|
||||||
|
|
||||||
|
const SCOPES = [
|
||||||
|
"https://www.googleapis.com/auth/spreadsheets",
|
||||||
|
"https://www.googleapis.com/auth/drive.file",
|
||||||
|
];
|
||||||
|
|
||||||
|
const auth = new google.auth.GoogleAuth({
|
||||||
|
credentials: {
|
||||||
|
private_key: functions.config().serviceacct.key,
|
||||||
|
client_email: functions.config().serviceacct.email,
|
||||||
|
},
|
||||||
|
scopes: SCOPES,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { spreadsheets } = google.sheets({
|
||||||
|
version: "v4",
|
||||||
|
auth,
|
||||||
|
});
|
||||||
|
|
||||||
|
async function getTsIndex(): Promise<number[]> {
|
||||||
|
const values = await getRange("A2:A");
|
||||||
|
return values.map((r) => parseInt(r[0]));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getFirstEmptyRow(): Promise<number> {
|
||||||
|
const values = await getRange("A2:A");
|
||||||
|
return values.length + 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateDictionaryEntries(edits: FT.EntryEdit[]) {
|
||||||
|
if (edits.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const entries = edits.map((e) => e.entry);
|
||||||
|
const tsIndex = await getTsIndex();
|
||||||
|
const { keyRow, lastCol } = await getKeyInfo();
|
||||||
|
function entryToRowArray(e: T.DictionaryEntry): any[] {
|
||||||
|
return keyRow.slice(1).map((k) => e[k] || "");
|
||||||
|
}
|
||||||
|
const data = entries.flatMap((entry) => {
|
||||||
|
const rowNum = getRowNumFromTs(tsIndex, entry.ts);
|
||||||
|
if (rowNum === undefined) {
|
||||||
|
console.error(`couldn't find ${entry.ts} ${JSON.stringify(entry)}`);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
const values = [entryToRowArray(entry)];
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
range: `B${rowNum}:${lastCol}${rowNum}`,
|
||||||
|
values,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
});
|
||||||
|
await spreadsheets.values.batchUpdate({
|
||||||
|
spreadsheetId,
|
||||||
|
requestBody: {
|
||||||
|
data,
|
||||||
|
valueInputOption: "RAW",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function addDictionaryEntries(additions: FT.NewEntry[]) {
|
||||||
|
if (additions.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const entries = additions.map((x) => standardizeEntry(x.entry));
|
||||||
|
const endRow = await getFirstEmptyRow();
|
||||||
|
const { keyRow, lastCol } = await getKeyInfo();
|
||||||
|
const ts = Date.now();
|
||||||
|
function entryToRowArray(e: T.DictionaryEntry): any[] {
|
||||||
|
return keyRow.slice(1).map((k) => e[k] || "");
|
||||||
|
}
|
||||||
|
const values = entries.map((entry, i) => [ts + i, ...entryToRowArray(entry)]);
|
||||||
|
await spreadsheets.values.batchUpdate({
|
||||||
|
spreadsheetId,
|
||||||
|
requestBody: {
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
range: `A${endRow}:${lastCol}${endRow + (values.length - 1)}`,
|
||||||
|
values,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
valueInputOption: "RAW",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateDictionaryFields(
|
||||||
|
edits: { ts: number; col: keyof T.DictionaryEntry; val: any }[]
|
||||||
|
) {
|
||||||
|
const tsIndex = await getTsIndex();
|
||||||
|
const { colMap } = await getKeyInfo();
|
||||||
|
const data = edits.flatMap((edit) => {
|
||||||
|
const rowNum = getRowNumFromTs(tsIndex, edit.ts);
|
||||||
|
if (rowNum === undefined) {
|
||||||
|
console.error(`couldn't find ${edit.ts} ${JSON.stringify(edit)}`);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
const col = colMap[edit.col];
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
range: `${col}${rowNum}:${col}${rowNum}`,
|
||||||
|
values: [[edit.val]],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
});
|
||||||
|
await spreadsheets.values.batchUpdate({
|
||||||
|
spreadsheetId,
|
||||||
|
requestBody: {
|
||||||
|
data,
|
||||||
|
valueInputOption: "RAW",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteEntry(ed: FT.EntryDeletion) {
|
||||||
|
const tsIndex = await getTsIndex();
|
||||||
|
const row = getRowNumFromTs(tsIndex, ed.ts);
|
||||||
|
if (!row) {
|
||||||
|
console.error(`${ed.ts} not found to do delete`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const requests = [
|
||||||
|
{
|
||||||
|
deleteDimension: {
|
||||||
|
range: {
|
||||||
|
sheetId,
|
||||||
|
dimension: "ROWS",
|
||||||
|
startIndex: row - 1,
|
||||||
|
endIndex: row,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
await spreadsheets.batchUpdate({
|
||||||
|
spreadsheetId,
|
||||||
|
requestBody: {
|
||||||
|
requests,
|
||||||
|
includeSpreadsheetInResponse: false,
|
||||||
|
responseRanges: [],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRowNumFromTs(tsIndex: number[], ts: number): number | undefined {
|
||||||
|
const res = tsIndex.findIndex((x) => x === ts);
|
||||||
|
if (res === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return res + 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getKeyInfo(): Promise<{
|
||||||
|
colMap: Record<keyof T.DictionaryEntry, string>;
|
||||||
|
keyRow: (keyof T.DictionaryEntry)[];
|
||||||
|
lastCol: string;
|
||||||
|
}> {
|
||||||
|
const headVals = await getRange("A1:1");
|
||||||
|
const headRow: string[] = headVals[0];
|
||||||
|
const colMap: any = {};
|
||||||
|
headRow.forEach((c, i) => {
|
||||||
|
if (validFields.every((v) => c !== v)) {
|
||||||
|
throw new Error(`Invalid spreadsheet field ${c}`);
|
||||||
|
}
|
||||||
|
colMap[c] = getColumnLetters(i);
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
colMap: colMap as Record<keyof T.DictionaryEntry, string>,
|
||||||
|
keyRow: headRow as (keyof T.DictionaryEntry)[],
|
||||||
|
lastCol: getColumnLetters(headRow.length - 1),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getRange(range: string): Promise<any[][]> {
|
||||||
|
const { data } = await spreadsheets.values.get({
|
||||||
|
spreadsheetId,
|
||||||
|
range,
|
||||||
|
});
|
||||||
|
if (!data.values) {
|
||||||
|
throw new Error("data not found");
|
||||||
|
}
|
||||||
|
return data.values;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getColumnLetters(num: number) {
|
||||||
|
let letters = "";
|
||||||
|
while (num >= 0) {
|
||||||
|
letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"[num % 26] + letters;
|
||||||
|
num = Math.floor(num / 26) - 1;
|
||||||
|
}
|
||||||
|
return letters;
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
# prod
|
||||||
|
dist/
|
||||||
|
|
||||||
|
# dev
|
||||||
|
.yarn/
|
||||||
|
!.yarn/releases
|
||||||
|
.vscode/*
|
||||||
|
!.vscode/launch.json
|
||||||
|
!.vscode/*.code-snippets
|
||||||
|
.idea/workspace.xml
|
||||||
|
.idea/usage.statistics.xml
|
||||||
|
.idea/shelf
|
||||||
|
|
||||||
|
# deps
|
||||||
|
node_modules/
|
||||||
|
.wrangler
|
||||||
|
|
||||||
|
# env
|
||||||
|
.env
|
||||||
|
.env.production
|
||||||
|
.dev.vars
|
||||||
|
|
||||||
|
# logs
|
||||||
|
logs/
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
pnpm-debug.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
|
||||||
|
# misc
|
||||||
|
.DS_Store
|
|
@ -0,0 +1,8 @@
|
||||||
|
```
|
||||||
|
npm install
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
npm run deploy
|
||||||
|
```
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,14 @@
|
||||||
|
{
|
||||||
|
"name": "new-functions",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "wrangler dev",
|
||||||
|
"deploy": "wrangler deploy --minify"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"hono": "^4.6.12"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@cloudflare/workers-types": "^4.20241112.0",
|
||||||
|
"wrangler": "^3.88.0"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
import { Hono } from "hono";
|
||||||
|
import { cors } from "hono/cors";
|
||||||
|
import { authMiddleware } from "./middleware/lingdocs-auth";
|
||||||
|
|
||||||
|
const app = new Hono();
|
||||||
|
app.use(cors());
|
||||||
|
|
||||||
|
app.get("/", (c) => {
|
||||||
|
// c.env.LINGDOCS_COUCHDB
|
||||||
|
return c.text("Hi from hono updated");
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/wa", authMiddleware, async (c) => {
|
||||||
|
return c.json({ name: c.var.user?.name, admin: c.var.user?.admin });
|
||||||
|
});
|
||||||
|
|
||||||
|
export default app;
|
|
@ -0,0 +1,20 @@
|
||||||
|
import { createMiddleware } from "hono/factory";
|
||||||
|
import type { LingdocsUser } from "../../../website/src/types/account-types";
|
||||||
|
|
||||||
|
export const authMiddleware = createMiddleware<{
|
||||||
|
Variables: {
|
||||||
|
user: LingdocsUser | undefined;
|
||||||
|
};
|
||||||
|
}>(async (c, next) => {
|
||||||
|
const cookie = c.req.header("Cookie") || "";
|
||||||
|
const r = await fetch("https://account.lingdocs.com/api/user", {
|
||||||
|
headers: { cookie },
|
||||||
|
});
|
||||||
|
const res = (await r.json()) as { ok: boolean; user: LingdocsUser };
|
||||||
|
if (res.ok) {
|
||||||
|
c.set("user", res.user);
|
||||||
|
} else {
|
||||||
|
c.set("user", undefined);
|
||||||
|
}
|
||||||
|
await next();
|
||||||
|
});
|
|
@ -0,0 +1,17 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ESNext",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "Bundler",
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"lib": [
|
||||||
|
"ESNext"
|
||||||
|
],
|
||||||
|
"types": [
|
||||||
|
"@cloudflare/workers-types/2023-07-01"
|
||||||
|
],
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
"jsxImportSource": "hono/jsx"
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
name = "new-functions"
|
||||||
|
main = "src/index.ts"
|
||||||
|
compatibility_date = "2024-11-26"
|
||||||
|
|
||||||
|
# compatibility_flags = [ "nodejs_compat" ]
|
||||||
|
|
||||||
|
# [vars]
|
||||||
|
# MY_VAR = "my-variable"
|
||||||
|
|
||||||
|
# [[kv_namespaces]]
|
||||||
|
# binding = "MY_KV_NAMESPACE"
|
||||||
|
# id = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||||
|
|
||||||
|
# [[r2_buckets]]
|
||||||
|
# binding = "MY_BUCKET"
|
||||||
|
# bucket_name = "my-bucket"
|
||||||
|
|
||||||
|
# [[d1_databases]]
|
||||||
|
# binding = "DB"
|
||||||
|
# database_name = "my-database"
|
||||||
|
# database_id = ""
|
||||||
|
|
||||||
|
# [ai]
|
||||||
|
# binding = "AI"
|
||||||
|
|
||||||
|
# [observability]
|
||||||
|
# enabled = true
|
||||||
|
# head_sampling_rate = 1
|
Loading…
Reference in New Issue