WIP but dead end
parent
b2cd68f82c
commit
9042f4530f
|
@ -1,7 +1,9 @@
|
|||
import { FileMeta } from "$sb/types.ts";
|
||||
import { IndexEvent } from "$sb/app_event.ts";
|
||||
import { EventHook } from "../../plugos/hooks/event.ts";
|
||||
|
||||
import type { SpacePrimitives } from "./space_primitives.ts";
|
||||
import { fileMetaToPageMeta } from "../../web/space.ts";
|
||||
|
||||
/**
|
||||
* Events exposed:
|
||||
|
@ -135,8 +137,9 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
|||
await this.dispatchEvent("page:saved", pageName, newMeta);
|
||||
await this.dispatchEvent("page:index_text", {
|
||||
name: pageName,
|
||||
meta: fileMetaToPageMeta(newMeta),
|
||||
text,
|
||||
});
|
||||
} as IndexEvent);
|
||||
}
|
||||
return newMeta;
|
||||
} finally {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import type { ParseTree } from "$sb/lib/tree.ts";
|
||||
import { TextChange } from "$sb/lib/change.ts";
|
||||
import { Query } from "$sb/types.ts";
|
||||
import { PageMeta, Query } from "$sb/types.ts";
|
||||
|
||||
export type AppEvent =
|
||||
| "page:click"
|
||||
|
@ -32,11 +32,13 @@ export type ClickEvent = {
|
|||
|
||||
export type IndexEvent = {
|
||||
name: string;
|
||||
meta: PageMeta;
|
||||
text: string;
|
||||
};
|
||||
|
||||
export type IndexTreeEvent = {
|
||||
name: string;
|
||||
meta: PageMeta;
|
||||
tree: ParseTree;
|
||||
};
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { PromiseQueue, sleep } from "./async.ts";
|
||||
import { assert, assertEquals } from "../../test_deps.ts";
|
||||
import { batchRequests, PromiseQueue, sleep } from "./async.ts";
|
||||
|
||||
Deno.test("PromiseQueue test", async () => {
|
||||
const q = new PromiseQueue();
|
||||
|
@ -24,3 +24,19 @@ Deno.test("PromiseQueue test", async () => {
|
|||
});
|
||||
assertEquals(wasRun, true);
|
||||
});
|
||||
|
||||
Deno.test("Batch test", async () => {
|
||||
// Generate an array with numbers up to 100
|
||||
const elements = Array.from(Array(100).keys());
|
||||
const multiplied = await batchRequests(elements, async (batch) => {
|
||||
await sleep(2);
|
||||
// Batches should be 9 or smaller (last batch will be smaller)
|
||||
assert(batch.length <= 9);
|
||||
return batch.map((e) => e * 2);
|
||||
}, 9);
|
||||
assertEquals(multiplied, elements.map((e) => e * 2));
|
||||
const multiplied2 = await batchRequests(elements, async (batch) => {
|
||||
return batch.map((e) => e * 2);
|
||||
}, 10000);
|
||||
assertEquals(multiplied2, elements.map((e) => e * 2));
|
||||
});
|
||||
|
|
|
@ -67,3 +67,25 @@ export class PromiseQueue {
|
|||
this.run(); // Continue processing the next promise in the queue
|
||||
}
|
||||
}
|
||||
|
||||
export async function batchRequests<I, O>(
|
||||
values: I[],
|
||||
fn: (batch: I[]) => Promise<O[]>,
|
||||
batchSize: number,
|
||||
): Promise<O[]> {
|
||||
const results: O[] = [];
|
||||
// Split values into batches of batchSize
|
||||
const batches: I[][] = [];
|
||||
for (let i = 0; i < values.length; i += batchSize) {
|
||||
batches.push(values.slice(i, i + batchSize));
|
||||
}
|
||||
// Run fn on them in parallel
|
||||
const batchResults = await Promise.all(batches.map(fn));
|
||||
// Flatten the results
|
||||
for (const batchResult of batchResults) {
|
||||
if (Array.isArray(batchResult)) { // If fn returns an array, collect them
|
||||
results.push(...batchResult);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
|
|
@ -43,7 +43,10 @@ export async function anchorComplete(completeEvent: CompleteEvent) {
|
|||
// "bare" anchor, match any page for completion purposes
|
||||
filter = undefined;
|
||||
}
|
||||
const allAnchors = await queryObjects<AnchorObject>("anchor", { filter });
|
||||
const allAnchors = await queryObjects<AnchorObject>("anchor", {
|
||||
filter,
|
||||
cacheSecs: 5,
|
||||
});
|
||||
return {
|
||||
from: completeEvent.pos - match[1].length,
|
||||
options: allAnchors.map((a) => ({
|
||||
|
|
|
@ -42,23 +42,20 @@ export function determineType(v: any): string {
|
|||
export async function objectAttributeCompleter(
|
||||
attributeCompleteEvent: AttributeCompleteEvent,
|
||||
): Promise<AttributeCompletion[]> {
|
||||
const prefixFilter: QueryExpression = ["call", "startsWith", [[
|
||||
"attr",
|
||||
"name",
|
||||
], ["string", attributeCompleteEvent.prefix]]];
|
||||
const attributeFilter: QueryExpression | undefined =
|
||||
attributeCompleteEvent.source === ""
|
||||
? prefixFilter
|
||||
: ["and", prefixFilter, ["=", ["attr", "tagName"], [
|
||||
? undefined
|
||||
: ["=", ["attr", "tagName"], [
|
||||
"string",
|
||||
attributeCompleteEvent.source,
|
||||
]]];
|
||||
]];
|
||||
const allAttributes = await queryObjects<AttributeObject>("attribute", {
|
||||
filter: attributeFilter,
|
||||
distinct: true,
|
||||
select: [{ name: "name" }, { name: "attributeType" }, { name: "tag" }, {
|
||||
select: [{ name: "name" }, { name: "attributeType" }, { name: "tagName" }, {
|
||||
name: "readOnly",
|
||||
}],
|
||||
cacheSecs: 5,
|
||||
});
|
||||
return allAttributes.map((value) => {
|
||||
return {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { editor, events, markdown, mq, space, system } from "$sb/syscalls.ts";
|
||||
import { sleep } from "$sb/lib/async.ts";
|
||||
import { IndexEvent } from "$sb/app_event.ts";
|
||||
import { IndexEvent, IndexTreeEvent } from "$sb/app_event.ts";
|
||||
import { MQMessage } from "$sb/types.ts";
|
||||
import { isTemplate } from "$sb/lib/cheap_yaml.ts";
|
||||
|
||||
|
@ -44,30 +44,34 @@ export async function processIndexQueue(messages: MQMessage[]) {
|
|||
await events.dispatchEvent("page:indexTemplate", {
|
||||
name,
|
||||
tree: parsed,
|
||||
});
|
||||
} as IndexTreeEvent);
|
||||
} else {
|
||||
await events.dispatchEvent("page:index", {
|
||||
name,
|
||||
tree: parsed,
|
||||
});
|
||||
} as IndexTreeEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function parseIndexTextRepublish({ name, text }: IndexEvent) {
|
||||
export async function parseIndexTextRepublish(
|
||||
{ name, text, meta }: IndexEvent,
|
||||
) {
|
||||
const parsed = await markdown.parseMarkdown(text);
|
||||
|
||||
if (isTemplate(text)) {
|
||||
console.log("Indexing", name, "as template");
|
||||
await events.dispatchEvent("page:indexTemplate", {
|
||||
name,
|
||||
meta,
|
||||
tree: parsed,
|
||||
});
|
||||
} as IndexTreeEvent);
|
||||
} else {
|
||||
console.log("Indexing", name, "as page");
|
||||
await events.dispatchEvent("page:index", {
|
||||
name,
|
||||
meta,
|
||||
tree: parsed,
|
||||
});
|
||||
} as IndexTreeEvent);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,6 +73,7 @@ export async function tagComplete(completeEvent: CompleteEvent) {
|
|||
filter: ["=", ["attr", "parent"], ["string", parent]],
|
||||
select: [{ name: "name" }],
|
||||
distinct: true,
|
||||
cacheSecs: 5,
|
||||
});
|
||||
|
||||
if (parent === "page") {
|
||||
|
|
|
@ -9,7 +9,9 @@ export async function completeTaskState(completeEvent: CompleteEvent) {
|
|||
if (!taskMatch) {
|
||||
return null;
|
||||
}
|
||||
const allStates = await queryObjects<TaskStateObject>("taskstate", {});
|
||||
const allStates = await queryObjects<TaskStateObject>("taskstate", {
|
||||
cacheSecs: 5,
|
||||
});
|
||||
const states = [...new Set(allStates.map((s) => s.state))];
|
||||
|
||||
return {
|
||||
|
|
|
@ -56,6 +56,7 @@ export async function templateSlashComplete(
|
|||
"boolean",
|
||||
false,
|
||||
]]],
|
||||
cacheSecs: 5,
|
||||
});
|
||||
return allTemplates.map((template) => ({
|
||||
label: template.trigger!,
|
||||
|
|
|
@ -3,6 +3,9 @@ import { KV, KvKey, KvQuery } from "$sb/types.ts";
|
|||
import { DataStore } from "../plugos/lib/datastore.ts";
|
||||
import { rpcCall } from "./syscalls/datastore.proxy.ts";
|
||||
import { LimitedMap } from "../common/limited_map.ts";
|
||||
import { batchRequests } from "$sb/lib/async.ts";
|
||||
|
||||
const batchSize = 1000;
|
||||
|
||||
export class RemoteDataStore implements DataStore {
|
||||
private cache = new LimitedMap<any>(20);
|
||||
|
@ -29,6 +32,7 @@ export class RemoteDataStore implements DataStore {
|
|||
return results[0];
|
||||
}
|
||||
|
||||
// TODO: Batch these up
|
||||
batchGet<T = any>(keys: KvKey[]): Promise<(T | null)[]> {
|
||||
return this.proxy("datastore.batchGet", keys);
|
||||
}
|
||||
|
@ -37,16 +41,26 @@ export class RemoteDataStore implements DataStore {
|
|||
return this.batchSet([{ key, value }]);
|
||||
}
|
||||
|
||||
batchSet<T = any>(entries: KV<T>[]): Promise<void> {
|
||||
return this.proxy("datastore.batchSet", entries);
|
||||
// TODO: Batch these up
|
||||
async batchSet<T = any>(entries: KV<T>[]): Promise<void> {
|
||||
await batchRequests(
|
||||
entries,
|
||||
(entries) => this.proxy("datastore.batchSet", entries),
|
||||
batchSize,
|
||||
);
|
||||
}
|
||||
|
||||
delete(key: KvKey): Promise<void> {
|
||||
return this.batchDelete([key]);
|
||||
}
|
||||
|
||||
batchDelete(keys: KvKey[]): Promise<void> {
|
||||
return this.proxy("datastore.batchDelete", keys);
|
||||
// TODO: batch these up
|
||||
async batchDelete(keys: KvKey[]): Promise<void> {
|
||||
await batchRequests(
|
||||
keys,
|
||||
(keys) => this.proxy("datastore.batchDelete", keys),
|
||||
batchSize,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in New Issue