mirror of
https://github.com/johndoe6345789/low-code-react-app-b.git
synced 2026-04-24 13:44:54 +00:00
stuff
This commit is contained in:
3633
package-lock.json
generated
3633
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
9
packages/spark-tools/dist/agentPlugin.d.ts
vendored
9
packages/spark-tools/dist/agentPlugin.d.ts
vendored
@@ -1,9 +0,0 @@
|
||||
import type { PluginOption } from 'vite';
|
||||
interface Opts {
|
||||
serverURL?: string;
|
||||
disabled?: boolean;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
}
|
||||
export default function sparkAgent(opts?: Opts): PluginOption;
|
||||
export {};
|
||||
123
packages/spark-tools/dist/agentPlugin.js
vendored
123
packages/spark-tools/dist/agentPlugin.js
vendored
@@ -1,123 +0,0 @@
|
||||
function sparkAgent(opts = {}) {
|
||||
const serverURL = opts.serverURL || 'http://localhost:9000';
|
||||
const disabled = opts.disabled || false;
|
||||
const maxRetries = opts.maxRetries || 5;
|
||||
const retryDelay = opts.retryDelay || 1000; // ms
|
||||
async function sendEvent(event, retries = 0) {
|
||||
if (disabled)
|
||||
return true;
|
||||
try {
|
||||
const res = await fetch(`${serverURL}/notify`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(event),
|
||||
});
|
||||
if (!res.ok) {
|
||||
console.warn('Failed to send event to Spark Agent:', res.status, res.statusText);
|
||||
if (retries < maxRetries) {
|
||||
console.log(`Retrying event delivery (attempt ${retries + 1}/${maxRetries})...`);
|
||||
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
||||
return sendEvent(event, retries + 1);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
catch (err) {
|
||||
console.warn('Failed to send event to Spark Agent:', err);
|
||||
if (retries < maxRetries) {
|
||||
console.log(`Retrying event delivery (attempt ${retries + 1}/${maxRetries})...`);
|
||||
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
||||
return sendEvent(event, retries + 1);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function sendStartedEvent(file) {
|
||||
return sendEvent({ type: 'build:started', timestamp: new Date().getTime(), details: { file } });
|
||||
}
|
||||
function sendSuccessEvent(file) {
|
||||
return sendEvent({ type: 'build:success', timestamp: new Date().getTime(), details: { file } });
|
||||
}
|
||||
function sendErrorEvent(error) {
|
||||
const event = {
|
||||
type: 'build:failed',
|
||||
timestamp: new Date().getTime(),
|
||||
details: {
|
||||
error: {
|
||||
message: error || 'Unknown error',
|
||||
},
|
||||
},
|
||||
};
|
||||
return sendEvent(event);
|
||||
}
|
||||
if (disabled) {
|
||||
return { name: 'spark-agent:disabled', apply: 'build' };
|
||||
}
|
||||
return {
|
||||
name: 'spark-agent',
|
||||
apply: 'serve',
|
||||
configureServer(server) {
|
||||
server.watcher.on('change', (file) => {
|
||||
sendStartedEvent(file);
|
||||
});
|
||||
const wss = server.ws.send;
|
||||
server.ws.send = function (payload) {
|
||||
if (payload.type === 'update') {
|
||||
const file = payload.updates[0]?.path;
|
||||
sendSuccessEvent(file);
|
||||
}
|
||||
else if (payload.type === 'full-reload') {
|
||||
// Certain error corrections may trigger a full-reload, so we need to send success.
|
||||
// Vite may trigger a full-reload while we still have errors, but if so
|
||||
// we expect this event will be followed with an error notification.
|
||||
const file = payload.triggeredBy;
|
||||
sendSuccessEvent(file);
|
||||
}
|
||||
else if (payload.type === 'error') {
|
||||
let errorMessage;
|
||||
if (payload.err) {
|
||||
try {
|
||||
const parsedError = JSON.parse(JSON.stringify(payload.err));
|
||||
errorMessage = [
|
||||
parsedError.message,
|
||||
parsedError.frame,
|
||||
`at ${parsedError.id}:${parsedError.loc?.line}:${parsedError.loc?.column}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
catch {
|
||||
errorMessage = JSON.stringify(payload.err);
|
||||
}
|
||||
}
|
||||
else {
|
||||
errorMessage =
|
||||
payload.error?.stack ||
|
||||
payload.error?.message ||
|
||||
(typeof payload.error === 'string' ? payload.error : JSON.stringify(payload.error)) ||
|
||||
'Unknown error';
|
||||
}
|
||||
sendErrorEvent(errorMessage);
|
||||
}
|
||||
return wss.call(this, payload);
|
||||
};
|
||||
},
|
||||
buildStart() {
|
||||
sendStartedEvent();
|
||||
},
|
||||
buildEnd(err) {
|
||||
if (err) {
|
||||
sendErrorEvent(err.message);
|
||||
}
|
||||
else {
|
||||
sendSuccessEvent();
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { sparkAgent as default };
|
||||
//# sourceMappingURL=agentPlugin.js.map
|
||||
1
packages/spark-tools/dist/agentPlugin.js.map
vendored
1
packages/spark-tools/dist/agentPlugin.js.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"agentPlugin.js","sources":["../src/agentPlugin.ts"],"sourcesContent":[null],"names":[],"mappings":"AAUc,SAAU,UAAU,CAAC,OAAa,EAAE,EAAA;AAChD,IAAA,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,IAAI,uBAAuB;AAC3D,IAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,KAAK;AACvC,IAAA,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,CAAC;IACvC,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAA;AAE1C,IAAA,eAAe,SAAS,CAAC,KAAiB,EAAE,OAAO,GAAG,CAAC,EAAA;AACrD,QAAA,IAAI,QAAQ;AAAE,YAAA,OAAO,IAAI;AAEzB,QAAA,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,CAAA,EAAG,SAAS,SAAS,EAAE;AAC7C,gBAAA,MAAM,EAAE,MAAM;AACd,gBAAA,OAAO,EAAE;AACP,oBAAA,cAAc,EAAE,kBAAkB;AACnC,iBAAA;AACD,gBAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;AAC5B,aAAA,CAAC;AAEF,YAAA,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE;AACX,gBAAA,OAAO,CAAC,IAAI,CAAC,sCAAsC,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,UAAU,CAAC;AAEhF,gBAAA,IAAI,OAAO,GAAG,UAAU,EAAE;oBACxB,OAAO,CAAC,GAAG,CAAC,CAAA,iCAAA,EAAoC,OAAO,GAAG,CAAC,CAAA,CAAA,EAAI,UAAU,CAAA,IAAA,CAAM,CAAC;AAChF,oBAAA,MAAM,IAAI,OAAO,CAAC,CAAC,OAAO,KAAK,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;oBAC/D,OAAO,SAAS,CAAC,KAAK,EAAE,OAAO,GAAG,CAAC,CAAC;gBACtC;AAEA,gBAAA,OAAO,KAAK;YACd;AAEA,YAAA,OAAO,IAAI;QACb;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,sCAAsC,EAAE,GAAG,CAAC;AAEzD,YAAA,IAAI,OAAO,GAAG,UAAU,EAAE;gBACxB,OAAO,CAAC,GAAG,CAAC,CAAA,iCAAA,EAAoC,OAAO,GAAG,CAAC,CAAA,CAAA,EAAI,UAAU,CAAA,IAAA,CAAM,CAAC;AAChF,gBAAA,MAAM,IAAI,OAAO,CAAC,CAAC,OAAO,KAAK,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;gBAC/D,OAAO,SAAS,CAAC,KAAK,EAAE,OAAO,GAAG,CAAC,CAAC;YACtC;AAEA,YAAA,OAAO,KAAK;QACd;IACF;IAEA,SAAS,gBAAgB,CAAC,IAAa,EAAA;QACrC,OAAO,SAAS,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC;IACjG;IAEA,SAAS,gBAAgB,CAAC,IAAa,EAAA;QACrC,OAAO,SAAS,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC;IACjG;IAEA,SAAS,cAAc,CAAC,KAAa,EAAA;AACnC,QAAA,MAAM,KAAK,GAAG;AACZ,YAAA,IAAI,EAAE,cAAuB;AAC7B,YAAA,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE;AAC/B,YAAA,OAAO,EAAE;AACP,gBAAA,KAAK,EAAE;oBACL,OAAO,EAAE,KAAK,IAAI,eAAe;AAClC,iBAAA;AACF,aAAA;SACF;AACD,QAAA,OAAO,SAAS,CAAC,KAAK,CAAC;IACzB;IAEA,IAAI,QAAQ,EAAE;QACZ,OAAO,EAAE,IAAI,EAAE,sBAAsB,EAAE,KAAK,EAAE,OAAO,EAAE;IACzD;IAEA,OAAO;AACL,QAAA,IAAI,EAAE,aAAa;AACnB,QAAA,KAAK,EAAE,OAAO;AACd,QAAA,eAAe,CAAC,MAAM,EAAA;YACpB,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,CAAC,IAAI,KAAI;gBACnC,gBAAgB,CAAC,IAAI,CAAC;AACxB,YAAA,CAAC,CAAC;AAEF,YAAA,MAAM,GAAG,GAAG,MAAM,CAAC,EAAE,CAAC,IAAI;AAC1B,YAAA,MAAM,CAAC,EAAE,CAAC,IAAI,GAAG,UAAU,OAAY,EAAA;AACrC,gBAAA,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE;oBAC7B,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,IAAI;oBACrC,gBAAgB,CAAC,IAAI,CAAC;gBACxB;AACK,qBAAA,IAAI,OAAO,CAAC,IAAI,KAAK,aAAa,EAAE;;;;AAIvC,oBAAA,MAAM,IAAI,GAAG,OAAO,CAAC,WAAW;oBAChC,gBAAgB,CAAC,IAAI,CAAC;gBACxB;AAAO,qBAAA,IAAI,OAAO,CAAC,IAAI,KAAK,OAAO,EAAE;AACnC,oBAAA,IAAI,YAAoB;AAExB,oBAAA,IAAI,OAAO,CAAC,GAAG,EAAE;AACf,wBAAA,IAAI;AACF,4BAAA,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AAC3D,4BAAA,YAAY,GAAG;AACb,gCAAA,WAAW,CAAC,OAAO;AACnB,gCAAA,WAAW,CAAC,KAAK;AACjB,gCAAA,CAAA,GAAA,EAAM,WAAW,CAAC,EAAE,CAAA,CAAA,EAAI,WAAW,CAAC,GAAG,EAAE,IAAI,IAAI,WAAW,CAAC,GAAG,EAAE,MAAM,CAAA,CAAE;AAC3E;iCACE,MAAM,CAAC,OAAO;iCACd,IAAI,CAAC,IAAI,CAAC;wBACf;AAAE,wBAAA,MAAM;4BACN,YAAY,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC;wBAC5C;oBACF;yBAAO;wBACL,YAAY;4BACV,OAAO,CAAC,KAAK,EAAE,KAAK;gCACpB,OAAO,CAAC,KAAK,EAAE,OAAO;iCACrB,OAAO,OAAO,CAAC,KAAK,KAAK,QAAQ,GAAG,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACnF,gCAAA,eAAe;oBACnB;oBAEA,cAAc,CAAC,YAAY,CAAC;gBAC9B;gBAEA,OAAO,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC;AAChC,YAAA,CAAC;QACH,CAAC;QACD,UAAU,GAAA;AACR,YAAA,gBAAgB,EAAE;QACpB,CAAC;AACD,QAAA,QAAQ,CAAC,GAAG,EAAA;YACV,IAAI,GAAG,EAAE;AACP,gBAAA,cAAc,CAAC,GAAG,CAAC,OAAO,CAAC;YAC7B;iBAAO;AACL,gBAAA,gBAAgB,EAAE;YACpB;QACF,CAAC;KACF;AACH;;;;"}
|
||||
329
packages/spark-tools/dist/db.js
vendored
329
packages/spark-tools/dist/db.js
vendored
@@ -1,329 +0,0 @@
|
||||
import { K as KVClient } from './kv-DBiZoNWq.js';
|
||||
|
||||
// These values should NEVER change. The values are precisely for
|
||||
// generating ULIDs.
|
||||
const ENCODING = "0123456789ABCDEFGHJKMNPQRSTVWXYZ"; // Crockford's Base32
|
||||
const ENCODING_LEN = 32; // from ENCODING.length;
|
||||
const RANDOM_LEN = 16;
|
||||
const TIME_LEN = 10;
|
||||
const TIME_MAX = 281474976710655; // from Math.pow(2, 48) - 1;
|
||||
|
||||
var ULIDErrorCode;
|
||||
(function (ULIDErrorCode) {
|
||||
ULIDErrorCode["Base32IncorrectEncoding"] = "B32_ENC_INVALID";
|
||||
ULIDErrorCode["DecodeTimeInvalidCharacter"] = "DEC_TIME_CHAR";
|
||||
ULIDErrorCode["DecodeTimeValueMalformed"] = "DEC_TIME_MALFORMED";
|
||||
ULIDErrorCode["EncodeTimeNegative"] = "ENC_TIME_NEG";
|
||||
ULIDErrorCode["EncodeTimeSizeExceeded"] = "ENC_TIME_SIZE_EXCEED";
|
||||
ULIDErrorCode["EncodeTimeValueMalformed"] = "ENC_TIME_MALFORMED";
|
||||
ULIDErrorCode["PRNGDetectFailure"] = "PRNG_DETECT";
|
||||
ULIDErrorCode["ULIDInvalid"] = "ULID_INVALID";
|
||||
ULIDErrorCode["Unexpected"] = "UNEXPECTED";
|
||||
ULIDErrorCode["UUIDInvalid"] = "UUID_INVALID";
|
||||
})(ULIDErrorCode || (ULIDErrorCode = {}));
|
||||
class ULIDError extends Error {
|
||||
constructor(errorCode, message) {
|
||||
super(`${message} (${errorCode})`);
|
||||
this.name = "ULIDError";
|
||||
this.code = errorCode;
|
||||
}
|
||||
}
|
||||
|
||||
function randomChar(prng) {
|
||||
// Currently PRNGs generate fractions from 0 to _less than_ 1, so no "%" is necessary.
|
||||
// However, just in case a future PRNG can generate 1,
|
||||
// we are applying "% ENCODING LEN" to wrap back to the first character
|
||||
const randomPosition = Math.floor(prng() * ENCODING_LEN) % ENCODING_LEN;
|
||||
return ENCODING.charAt(randomPosition);
|
||||
}
|
||||
/**
|
||||
* Detect the best PRNG (pseudo-random number generator)
|
||||
* @param root The root to check from (global/window)
|
||||
* @returns The PRNG function
|
||||
*/
|
||||
function detectPRNG(root) {
|
||||
const rootLookup = detectRoot();
|
||||
const globalCrypto = (rootLookup && (rootLookup.crypto || rootLookup.msCrypto)) ||
|
||||
(null);
|
||||
if (typeof globalCrypto?.getRandomValues === "function") {
|
||||
return () => {
|
||||
const buffer = new Uint8Array(1);
|
||||
globalCrypto.getRandomValues(buffer);
|
||||
return buffer[0] / 256;
|
||||
};
|
||||
}
|
||||
else if (typeof globalCrypto?.randomBytes === "function") {
|
||||
return () => globalCrypto.randomBytes(1).readUInt8() / 256;
|
||||
}
|
||||
else ;
|
||||
throw new ULIDError(ULIDErrorCode.PRNGDetectFailure, "Failed to find a reliable PRNG");
|
||||
}
|
||||
function detectRoot() {
|
||||
if (inWebWorker())
|
||||
return self;
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function encodeRandom(len, prng) {
|
||||
let str = "";
|
||||
for (; len > 0; len--) {
|
||||
str = randomChar(prng) + str;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
/**
|
||||
* Encode the time portion of a ULID
|
||||
* @param now The current timestamp
|
||||
* @param len Length to generate
|
||||
* @returns The encoded time
|
||||
*/
|
||||
function encodeTime(now, len = TIME_LEN) {
|
||||
if (isNaN(now)) {
|
||||
throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be a number: ${now}`);
|
||||
}
|
||||
else if (now > TIME_MAX) {
|
||||
throw new ULIDError(ULIDErrorCode.EncodeTimeSizeExceeded, `Cannot encode a time larger than ${TIME_MAX}: ${now}`);
|
||||
}
|
||||
else if (now < 0) {
|
||||
throw new ULIDError(ULIDErrorCode.EncodeTimeNegative, `Time must be positive: ${now}`);
|
||||
}
|
||||
else if (Number.isInteger(now) === false) {
|
||||
throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be an integer: ${now}`);
|
||||
}
|
||||
let mod, str = "";
|
||||
for (let currentLen = len; currentLen > 0; currentLen--) {
|
||||
mod = now % ENCODING_LEN;
|
||||
str = ENCODING.charAt(mod) + str;
|
||||
now = (now - mod) / ENCODING_LEN;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
function inWebWorker() {
|
||||
// @ts-ignore
|
||||
return typeof WorkerGlobalScope !== "undefined" && self instanceof WorkerGlobalScope;
|
||||
}
|
||||
/**
|
||||
* Generate a ULID
|
||||
* @param seedTime Optional time seed
|
||||
* @param prng Optional PRNG function
|
||||
* @returns A ULID string
|
||||
* @example
|
||||
* ulid(); // "01HNZXD07M5CEN5XA66EMZSRZW"
|
||||
*/
|
||||
function ulid(seedTime, prng) {
|
||||
const currentPRNG = detectPRNG();
|
||||
const seed = Date.now() ;
|
||||
return encodeTime(seed, TIME_LEN) + encodeRandom(RANDOM_LEN, currentPRNG);
|
||||
}
|
||||
|
||||
const BASE_DB_SERVICE_URL = '/_spark/db';
|
||||
/**
|
||||
* DBClient provides methods to interact with Spark's document database.
|
||||
*/
|
||||
class DBClient {
|
||||
kv;
|
||||
constructor(kvClient) {
|
||||
this.kv = kvClient || new KVClient();
|
||||
}
|
||||
/**
|
||||
* Generate a unique document ID using ULID
|
||||
* @returns A unique document ID
|
||||
*/
|
||||
generateDocId() {
|
||||
return ulid();
|
||||
}
|
||||
/**
|
||||
* Get all documents in a collection using the DB API
|
||||
* @param collectionName The name of the collection
|
||||
* @returns Array of all documents in the collection with id field
|
||||
*/
|
||||
async getAll(collectionName) {
|
||||
try {
|
||||
const response = await fetch(`${BASE_DB_SERVICE_URL}/collections/${collectionName}`, {
|
||||
method: 'GET',
|
||||
});
|
||||
if (!response.ok) {
|
||||
const errorMessage = `Failed to fetch DB collection: ${response.statusText}`;
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
let json;
|
||||
try {
|
||||
json = await response.json();
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = 'Failed to parse DB collection response';
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
if (!Array.isArray(json)) {
|
||||
const errorMessage = 'DB collection response is not an array';
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
const entries = json;
|
||||
return entries
|
||||
.map((entry) => {
|
||||
return { _id: entry.key, ...JSON.parse(entry.value) };
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Error getting collection ${collectionName}:`, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Insert a document into a collection with schema validation
|
||||
* @param collectionName The name of the collection
|
||||
* @param schema The Zod schema for validation
|
||||
* @param data The document data to insert
|
||||
* @returns The inserted document with generated id
|
||||
*/
|
||||
async insert(collectionName, schema, data) {
|
||||
const id = this.generateDocId();
|
||||
const { _id: _, ...dataWithoutId } = data;
|
||||
const validatedData = schema.parse(dataWithoutId);
|
||||
await this.kv.setKey(id, validatedData, collectionName);
|
||||
return { ...validatedData, _id: id };
|
||||
}
|
||||
/**
|
||||
* Get a document by ID from a collection
|
||||
* @param collectionName The name of the collection
|
||||
* @param id The document ID
|
||||
* @returns The document with id field or null if not found
|
||||
*/
|
||||
async get(collectionName, id) {
|
||||
try {
|
||||
const doc = await this.kv.getKey(id, collectionName);
|
||||
if (!doc)
|
||||
return null;
|
||||
return { ...doc, _id: id };
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Error getting document ${id} from ${collectionName}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Update a document with partial data and schema validation
|
||||
* @param collectionName The name of the collection
|
||||
* @param id The document ID
|
||||
* @param schema The Zod schema for validation
|
||||
* @param data Partial data to update
|
||||
* @returns The updated document or null if not found
|
||||
*/
|
||||
async update(collectionName, id, schema, data) {
|
||||
try {
|
||||
const existing = await this.kv.getKey(id, collectionName);
|
||||
if (!existing)
|
||||
return null;
|
||||
const { _id: _, ...dataWithoutId } = data;
|
||||
const updated = { ...existing, ...dataWithoutId };
|
||||
const validatedData = schema.parse(updated);
|
||||
await this.kv.setKey(id, validatedData, collectionName);
|
||||
return { ...validatedData, _id: id };
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Error updating document ${id} in ${collectionName}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Delete a document from a collection
|
||||
* @param collectionName The name of the collection
|
||||
* @param id The document ID to delete
|
||||
* @returns true if document was deleted, false if not found
|
||||
*/
|
||||
async delete(collectionName, id) {
|
||||
try {
|
||||
await this.kv.deleteKey(id, collectionName);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Error deleting document ${id} from ${collectionName}:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Query documents with filtering
|
||||
* @param collectionName The name of the collection
|
||||
* @param filterFn Function to filter documents
|
||||
* @returns Array of filtered documents
|
||||
*/
|
||||
async query(collectionName, filterFn) {
|
||||
const collection = await this.getAll(collectionName);
|
||||
return collection.filter(filterFn);
|
||||
}
|
||||
}
|
||||
|
||||
const db = new DBClient();
|
||||
/**
|
||||
* Create a collection instance with schema validation and clean API
|
||||
* @param schema The Zod schema for the collection
|
||||
* @param collectionName The name of the collection
|
||||
* @returns Collection instance with CRUD operations
|
||||
*/
|
||||
function collection(schema, collectionName) {
|
||||
return {
|
||||
async insert(data) {
|
||||
return db.insert(collectionName, schema, data);
|
||||
},
|
||||
async get(id) {
|
||||
return db.get(collectionName, id);
|
||||
},
|
||||
async update(id, data) {
|
||||
return db.update(collectionName, id, schema, data);
|
||||
},
|
||||
async delete(id) {
|
||||
return db.delete(collectionName, id);
|
||||
},
|
||||
async getAll() {
|
||||
return db.getAll(collectionName);
|
||||
},
|
||||
async query(options) {
|
||||
// Get all documents first
|
||||
let results = await db.getAll(collectionName);
|
||||
// Apply where condition
|
||||
if (options?.where) {
|
||||
const condition = options.where;
|
||||
results = results.filter((doc) => {
|
||||
const fieldValue = doc[condition.field];
|
||||
switch (condition.operator) {
|
||||
case '==': return fieldValue === condition.value;
|
||||
case '!=': return fieldValue !== condition.value;
|
||||
case '>': return fieldValue > condition.value;
|
||||
case '<': return fieldValue < condition.value;
|
||||
case '>=': return fieldValue >= condition.value;
|
||||
case '<=': return fieldValue <= condition.value;
|
||||
default: return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
// Apply sorting
|
||||
if (options?.sortBy) {
|
||||
results.sort((a, b) => {
|
||||
const aVal = a[options.sortBy.field];
|
||||
const bVal = b[options.sortBy.field];
|
||||
if (aVal < bVal)
|
||||
return options.sortBy.direction === 'asc' ? -1 : 1;
|
||||
if (aVal > bVal)
|
||||
return options.sortBy.direction === 'asc' ? 1 : -1;
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
// Apply limit
|
||||
if (options?.limit !== undefined) {
|
||||
results = results.slice(0, options.limit);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export { DBClient as DB, collection };
|
||||
//# sourceMappingURL=db.js.map
|
||||
1
packages/spark-tools/dist/db.js.map
vendored
1
packages/spark-tools/dist/db.js.map
vendored
File diff suppressed because one or more lines are too long
27
packages/spark-tools/dist/designer-styles.css
vendored
27
packages/spark-tools/dist/designer-styles.css
vendored
@@ -1,27 +0,0 @@
|
||||
.debugger-overlay {
|
||||
z-index: 1000000;
|
||||
}
|
||||
|
||||
.debugger-overlay[data-element-name]::before {
|
||||
content: attr(data-element-name);
|
||||
display: var(--display-tag, none);
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
position: absolute;
|
||||
top: -1px;
|
||||
left: -1px;
|
||||
transform: translateY(-100%);
|
||||
background-color: var(--fg-color);
|
||||
color: white;
|
||||
border-radius: 4px 4px 0 0;
|
||||
border: 1px solid var(--fg-color);
|
||||
border-bottom: none;
|
||||
font-size: 11px;
|
||||
font-weight: 400;
|
||||
line-height: 1.5;
|
||||
padding: 0px 6px 2px;
|
||||
}
|
||||
|
||||
[contenteditable='true']:focus {
|
||||
outline: none;
|
||||
}
|
||||
1
packages/spark-tools/dist/designerHost.d.ts
vendored
1
packages/spark-tools/dist/designerHost.d.ts
vendored
@@ -1 +0,0 @@
|
||||
export {};
|
||||
601
packages/spark-tools/dist/designerHost.js
vendored
601
packages/spark-tools/dist/designerHost.js
vendored
@@ -1,601 +0,0 @@
|
||||
/**
|
||||
* typed function to send messages to the parent window
|
||||
*/
|
||||
function sendMessageToBridge(message) {
|
||||
window.parent.postMessage(message, '*');
|
||||
}
|
||||
let currentSelectedElement = null;
|
||||
let currentHighlightedElement = null;
|
||||
let mutationObserver = null;
|
||||
// Keyboard overlay state
|
||||
let keyboardOverlays = [];
|
||||
const extractProps = (props) => {
|
||||
return Object.entries(props || {}).reduce((acc, [key, value]) => {
|
||||
if (['string', 'number', 'boolean'].includes(typeof value) &&
|
||||
!['data-loc', 'data-component', 'children'].includes(key)) {
|
||||
acc[key] = value;
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
/**
|
||||
* Core element selection logic shared between mouse and keyboard selection
|
||||
* @param makeEditable - Whether to make text elements editable immediately (false during Tab navigation)
|
||||
*/
|
||||
function selectElement(element, makeEditable = true) {
|
||||
// Get React fiber info
|
||||
const reactPropsKey = Object.keys(element).find((key) => key.startsWith('__reactProps'));
|
||||
const reactFiberKey = Object.keys(element).find((key) => key.startsWith('__reactFiber'));
|
||||
const fiberProps = reactPropsKey ? element[reactPropsKey] : undefined;
|
||||
const fiberNode = reactFiberKey ? element[reactFiberKey] : undefined;
|
||||
if (!fiberNode) {
|
||||
return;
|
||||
}
|
||||
const elementDynamic = element.getAttribute('data-dynamic');
|
||||
const isTextElement = typeof fiberProps.children === 'string';
|
||||
const editable = !elementDynamic && isTextElement;
|
||||
currentSelectedElement = element;
|
||||
// Send selection message
|
||||
const payload = createElementPayload(element);
|
||||
sendMessageToBridge({
|
||||
type: 'spark:designer:host:element:selected',
|
||||
element: payload,
|
||||
});
|
||||
// Show selected overlay
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
showOverlay(element);
|
||||
// Disconnect previous observer if it exists
|
||||
if (mutationObserver) {
|
||||
mutationObserver.disconnect();
|
||||
mutationObserver = null;
|
||||
}
|
||||
// Set up mutation observer for the selected element
|
||||
mutationObserver = new MutationObserver((mutationsList) => {
|
||||
for (const mutation of mutationsList) {
|
||||
if (mutation.type === 'attributes') {
|
||||
sendMessageToBridge({
|
||||
type: 'spark:designer:bridge:element:updated',
|
||||
element: createElementPayload(element),
|
||||
});
|
||||
updateOverlayPositions();
|
||||
}
|
||||
}
|
||||
});
|
||||
mutationObserver.observe(element, {
|
||||
attributes: true,
|
||||
attributeFilter: ['data-loc', 'data-loc-end', 'data-component-loc', 'data-component-loc-end', 'class'],
|
||||
});
|
||||
// Make editable if applicable AND makeEditable is true
|
||||
// During Tab navigation (makeEditable=false), we don't steal focus with contentEditable
|
||||
// User can press Enter to explicitly make it editable
|
||||
if (editable && makeEditable) {
|
||||
element.contentEditable = 'true';
|
||||
element.focus();
|
||||
element.addEventListener('blur', () => {
|
||||
element.contentEditable = 'false';
|
||||
sendMessageToBridge({
|
||||
type: 'spark:designer:bridge:element:updated',
|
||||
element: createElementPayload(element),
|
||||
});
|
||||
}, { once: true });
|
||||
}
|
||||
}
|
||||
function createElementPayload(element) {
|
||||
const reactPropsKey = Object.keys(element).find((key) => key.startsWith('__reactProps'));
|
||||
const reactFiberKey = Object.keys(element).find((key) => key.startsWith('__reactFiber'));
|
||||
const fiberProps = reactPropsKey ? element[reactPropsKey] : undefined;
|
||||
const fiberNode = reactFiberKey ? element[reactFiberKey] : undefined;
|
||||
const elementDataLoc = element.getAttribute('data-loc')?.split(':');
|
||||
const elementDataLocEnd = element.getAttribute('data-loc-end')?.split(':');
|
||||
const componentLoc = element.getAttribute('data-component-loc')?.split(':');
|
||||
const componentLocEnd = element.getAttribute('data-component-loc-end')?.split(':');
|
||||
const elementDynamic = element.getAttribute('data-dynamic');
|
||||
const isTextElement = typeof fiberProps.children === 'string';
|
||||
const editable = !elementDynamic && isTextElement;
|
||||
const rect = element.getBoundingClientRect();
|
||||
return {
|
||||
tag: fiberNode.type?.name || fiberNode.type,
|
||||
component: {
|
||||
location: componentLoc && componentLocEnd
|
||||
? {
|
||||
start: {
|
||||
filePath: componentLoc?.[0],
|
||||
line: parseInt(componentLoc?.[1], 10),
|
||||
column: parseInt(componentLoc?.[2], 10),
|
||||
},
|
||||
end: {
|
||||
filePath: componentLocEnd?.[0],
|
||||
line: parseInt(componentLocEnd?.[1], 10),
|
||||
column: parseInt(componentLocEnd?.[2], 10),
|
||||
},
|
||||
}
|
||||
: null,
|
||||
},
|
||||
props: extractProps(fiberProps),
|
||||
location: elementDataLoc && elementDataLocEnd
|
||||
? {
|
||||
start: {
|
||||
filePath: elementDataLoc[0],
|
||||
line: parseInt(elementDataLoc[1], 10),
|
||||
column: parseInt(elementDataLoc[2], 10),
|
||||
},
|
||||
end: {
|
||||
filePath: elementDataLocEnd[0],
|
||||
line: parseInt(elementDataLocEnd[1], 10),
|
||||
column: parseInt(elementDataLocEnd[2], 10),
|
||||
},
|
||||
}
|
||||
: null,
|
||||
instanceCount: document.querySelectorAll(`[data-loc="${elementDataLoc}"]`).length,
|
||||
position: {
|
||||
top: rect.top,
|
||||
left: rect.left,
|
||||
width: rect.width,
|
||||
height: rect.height,
|
||||
},
|
||||
editable,
|
||||
text: isTextElement ? element.innerText : null,
|
||||
class: element.getAttribute('class'),
|
||||
};
|
||||
}
|
||||
function handleClick(event) {
|
||||
const element = event.target;
|
||||
if (!(element instanceof HTMLElement)) {
|
||||
return;
|
||||
}
|
||||
// Skip our keyboard overlay buttons - let their own handlers deal with selection
|
||||
// IMPORTANT: Check this BEFORE preventDefault/stopPropagation so button handler can fire
|
||||
if (element.classList.contains('spark-keyboard-overlay')) {
|
||||
return;
|
||||
}
|
||||
// Only prevent default and stop propagation for actual element clicks
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
if (element === currentSelectedElement && element.contentEditable === 'true') {
|
||||
return;
|
||||
}
|
||||
else {
|
||||
if (currentSelectedElement?.contentEditable === 'true') {
|
||||
currentSelectedElement.contentEditable = 'false';
|
||||
currentSelectedElement.blur();
|
||||
}
|
||||
}
|
||||
if (event.target === document.documentElement || element === currentSelectedElement) {
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
if (element === currentSelectedElement) {
|
||||
currentHighlightedElement = currentSelectedElement;
|
||||
showOverlay(currentHighlightedElement);
|
||||
}
|
||||
currentSelectedElement = null;
|
||||
sendMessageToBridge({
|
||||
type: 'spark:designer:bridge:element:deselected',
|
||||
element: null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
// Check if element has React fiber before selecting
|
||||
const reactFiberKey = Object.keys(element).find((key) => key.startsWith('__reactFiber'));
|
||||
if (!reactFiberKey || !element[reactFiberKey]) {
|
||||
return;
|
||||
}
|
||||
// Use shared selection logic
|
||||
selectElement(element);
|
||||
}
|
||||
function showOverlay(element) {
|
||||
const elementDataLoc = element.getAttribute('data-loc');
|
||||
const componentDataLoc = element.getAttribute('data-component-loc');
|
||||
const computedStyles = window.getComputedStyle(element);
|
||||
const elements = componentDataLoc
|
||||
? document.querySelectorAll(`[data-component-loc="${componentDataLoc}"]`)
|
||||
: document.querySelectorAll(`[data-loc="${elementDataLoc}"]`);
|
||||
elements.forEach((el) => {
|
||||
const rect = el.getBoundingClientRect();
|
||||
const overlay = document.createElement('div');
|
||||
overlay.style.setProperty('--fg-color', '#4493f8');
|
||||
overlay.className = 'debugger-overlay';
|
||||
overlay.style.position = 'fixed';
|
||||
overlay.style.pointerEvents = 'none';
|
||||
overlay.style.border = '1px solid var(--fg-color)';
|
||||
overlay.style.left = rect.left + 'px';
|
||||
overlay.style.top = rect.top + 'px';
|
||||
overlay.style.width = rect.width + 'px';
|
||||
overlay.style.height = rect.height + 'px';
|
||||
overlay.style.color = 'var(--fg-color)';
|
||||
overlay.style.borderRadius = parseInt(computedStyles.borderRadius) + 'px';
|
||||
overlay.style.borderTopLeftRadius = '0px';
|
||||
overlay.setAttribute('data-element-name', element.tagName.toLowerCase());
|
||||
overlay.setAttribute('data-overlay-loc', elementDataLoc);
|
||||
if (el === currentHighlightedElement || el === currentSelectedElement) {
|
||||
overlay.style.setProperty('--display-tag', 'flex');
|
||||
}
|
||||
if (componentDataLoc) {
|
||||
// overlay.setAttribute('data-element-name', componentName)
|
||||
overlay.style.setProperty('--fg-color', '#AB7DF8');
|
||||
}
|
||||
document.body.appendChild(overlay);
|
||||
});
|
||||
}
|
||||
function updateOverlayPositions() {
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
if (currentSelectedElement && currentSelectedElement !== currentHighlightedElement) {
|
||||
showOverlay(currentSelectedElement);
|
||||
}
|
||||
if (currentHighlightedElement) {
|
||||
showOverlay(currentHighlightedElement);
|
||||
}
|
||||
if (currentSelectedElement) {
|
||||
sendMessageToBridge({
|
||||
type: 'spark:designer:bridge:element:updated',
|
||||
element: createElementPayload(currentSelectedElement),
|
||||
});
|
||||
}
|
||||
}
|
||||
function handleMouseOver(event) {
|
||||
const element = event.target;
|
||||
if (!(element instanceof HTMLElement))
|
||||
return;
|
||||
if (element === currentSelectedElement) {
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
showOverlay(currentSelectedElement);
|
||||
return;
|
||||
}
|
||||
if (element !== currentHighlightedElement) {
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
}
|
||||
currentHighlightedElement = element;
|
||||
// if the element is not the same as the current selected element, show the overlay
|
||||
if (currentSelectedElement && currentSelectedElement !== currentHighlightedElement) {
|
||||
showOverlay(currentSelectedElement);
|
||||
}
|
||||
// we want to show the current overlay to be later in the DOM tree
|
||||
showOverlay(currentHighlightedElement);
|
||||
}
|
||||
function handleMouseOut(event) {
|
||||
if (!event.relatedTarget) {
|
||||
currentHighlightedElement = null;
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
if (currentSelectedElement) {
|
||||
showOverlay(currentSelectedElement);
|
||||
}
|
||||
}
|
||||
}
|
||||
function throttle(func, limit) {
|
||||
let inThrottle;
|
||||
return function (...args) {
|
||||
const context = this;
|
||||
if (!inThrottle) {
|
||||
func.apply(context, args);
|
||||
inThrottle = true;
|
||||
setTimeout(() => (inThrottle = false), limit);
|
||||
}
|
||||
};
|
||||
}
|
||||
const updateOverlayPositionsThrottled = throttle(updateOverlayPositions, 10); // ~60fps
|
||||
/**
|
||||
* Creates keyboard-accessible overlay buttons for selectable elements
|
||||
* These overlays enable Tab-based keyboard navigation without modifying user's elements
|
||||
*/
|
||||
function createKeyboardOverlays() {
|
||||
// Remove any existing overlays first
|
||||
removeKeyboardOverlays();
|
||||
// Find all selectable elements with data-loc attribute
|
||||
const elements = document.querySelectorAll('[data-loc]');
|
||||
const selectableElements = [];
|
||||
elements.forEach((element) => {
|
||||
// Skip root elements
|
||||
if (element.tagName === 'HTML' || element.tagName === 'BODY') {
|
||||
return;
|
||||
}
|
||||
// Skip our own overlay buttons
|
||||
if (element.classList.contains('spark-keyboard-overlay')) {
|
||||
return;
|
||||
}
|
||||
// Skip hidden or zero-size elements
|
||||
const rect = element.getBoundingClientRect();
|
||||
// In test environments (jsdom), getBoundingClientRect may return 0
|
||||
// So we also check computed styles
|
||||
const computedStyle = window.getComputedStyle(element);
|
||||
const hasSize = rect.width > 0 || rect.height > 0 ||
|
||||
(computedStyle.width !== '0px' && computedStyle.height !== '0px');
|
||||
if (!hasSize) {
|
||||
return;
|
||||
}
|
||||
// Only include elements with React fiber (valid components)
|
||||
const reactFiberKey = Object.keys(element).find((key) => key.startsWith('__reactFiber'));
|
||||
if (reactFiberKey) {
|
||||
const fiber = element[reactFiberKey];
|
||||
if (fiber && fiber.stateNode === element) {
|
||||
element._cachedComponentName = fiber?.type?.name || fiber?.type || element.tagName.toLowerCase();
|
||||
selectableElements.push(element);
|
||||
}
|
||||
}
|
||||
});
|
||||
// Create overlay button for each selectable element
|
||||
selectableElements.forEach((element, index) => {
|
||||
const rect = element.getBoundingClientRect();
|
||||
// Skip elements with zero dimensions (they break Tab navigation)
|
||||
// Even though they passed the initial size filter, getBoundingClientRect can return 0x0
|
||||
if (rect.width === 0 || rect.height === 0) {
|
||||
return;
|
||||
}
|
||||
// Create focusable button overlay
|
||||
const button = document.createElement('button');
|
||||
button.className = 'spark-keyboard-overlay';
|
||||
button.setAttribute('type', 'button');
|
||||
button.setAttribute('tabindex', '0');
|
||||
// Use cached component name from earlier lookup
|
||||
const componentName = element._cachedComponentName || element.tagName.toLowerCase();
|
||||
button.setAttribute('aria-label', `Select ${componentName} element, ${index + 1} of ${selectableElements.length}`);
|
||||
button.setAttribute('data-target-loc', element.getAttribute('data-loc') || '');
|
||||
// Position button over the element
|
||||
// Use rect if available, otherwise use element's offset/computed style
|
||||
const left = rect.left || element.offsetLeft || 0;
|
||||
const top = rect.top || element.offsetTop || 0;
|
||||
const width = rect.width || element.offsetWidth || parseFloat(window.getComputedStyle(element).width) || 0;
|
||||
const height = rect.height || element.offsetHeight || parseFloat(window.getComputedStyle(element).height) || 0;
|
||||
button.style.position = 'fixed';
|
||||
button.style.left = left + 'px';
|
||||
button.style.top = top + 'px';
|
||||
button.style.width = width + 'px';
|
||||
button.style.height = height + 'px';
|
||||
// Make invisible but focusable
|
||||
button.style.opacity = '0';
|
||||
button.style.border = 'none';
|
||||
button.style.background = 'transparent';
|
||||
button.style.cursor = 'pointer';
|
||||
button.style.zIndex = '9998';
|
||||
button.style.padding = '0';
|
||||
button.style.margin = '0';
|
||||
// Show visual feedback on focus
|
||||
button.addEventListener('focus', (e) => {
|
||||
currentHighlightedElement = element;
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
// Restore selected element overlay if different from focused element
|
||||
if (currentSelectedElement && currentSelectedElement !== element) {
|
||||
showOverlay(currentSelectedElement);
|
||||
}
|
||||
// Show hover overlay for focused element (same as mouse hover)
|
||||
showOverlay(element);
|
||||
// Auto-select the focused element so modal/input updates
|
||||
selectElement(element, false);
|
||||
});
|
||||
// Remove hover overlay when Tab moves away
|
||||
button.addEventListener('blur', (e) => {
|
||||
if (currentHighlightedElement === element) {
|
||||
currentHighlightedElement = null;
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
// Restore selected element overlay if exists AND it's different from the blurred element
|
||||
if (currentSelectedElement && currentSelectedElement !== element) {
|
||||
showOverlay(currentSelectedElement);
|
||||
}
|
||||
}
|
||||
});
|
||||
// Handle keyboard events on overlay buttons
|
||||
button.addEventListener('keydown', (e) => {
|
||||
// Escape = exit selector mode (tell parent to disable)
|
||||
if (e.key === 'Escape') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
// Tell parent window to disable selector mode
|
||||
sendMessageToBridge({
|
||||
type: 'spark:designer:host:disable-requested',
|
||||
});
|
||||
return; // Don't process other handlers
|
||||
}
|
||||
// Shift+Enter starts the cycle: element → input → theme panel → element
|
||||
if (e.key === 'Enter' && e.shiftKey) {
|
||||
// Prevent default Shift+Enter behavior
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
// Tell parent window to focus its input field (first step in cycle)
|
||||
sendMessageToBridge({
|
||||
type: 'spark:designer:host:focus-input-requested',
|
||||
buttonDataLoc: button.getAttribute('data-target-loc'),
|
||||
});
|
||||
return; // Don't process other handlers
|
||||
}
|
||||
});
|
||||
// Handle click/Enter to select element
|
||||
button.addEventListener('click', (e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
// Select element immediately (makeEditable=true for click, unlike Tab navigation)
|
||||
selectElement(element, true);
|
||||
});
|
||||
document.body.appendChild(button);
|
||||
keyboardOverlays.push(button);
|
||||
});
|
||||
// Auto-focus the first overlay button for keyboard-only users
|
||||
// This allows them to start Tab navigation immediately after enabling selector mode
|
||||
if (keyboardOverlays.length > 0) {
|
||||
// Use setTimeout to ensure the button is fully rendered and focusable
|
||||
setTimeout(() => {
|
||||
// Check again in case overlays were removed before timeout fires
|
||||
if (keyboardOverlays.length > 0 && keyboardOverlays[0]) {
|
||||
keyboardOverlays[0].focus();
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes all keyboard overlay buttons
|
||||
*/
|
||||
function removeKeyboardOverlays() {
|
||||
keyboardOverlays.forEach((button) => {
|
||||
button.remove();
|
||||
});
|
||||
keyboardOverlays = [];
|
||||
}
|
||||
/**
|
||||
* Updates positions of keyboard overlay buttons (for scroll/resize)
|
||||
*/
|
||||
function updateKeyboardOverlayPositions() {
|
||||
keyboardOverlays.forEach((button) => {
|
||||
const targetLoc = button.getAttribute('data-target-loc');
|
||||
if (!targetLoc)
|
||||
return;
|
||||
const element = document.querySelector(`[data-loc="${CSS.escape(targetLoc)}"]`);
|
||||
if (!element)
|
||||
return;
|
||||
const rect = element.getBoundingClientRect();
|
||||
button.style.left = rect.left + 'px';
|
||||
button.style.top = rect.top + 'px';
|
||||
button.style.width = rect.width + 'px';
|
||||
button.style.height = rect.height + 'px';
|
||||
});
|
||||
}
|
||||
const updateKeyboardOverlayPositionsThrottled = throttle(updateKeyboardOverlayPositions, 10); // ~100fps max (10ms throttle)
|
||||
/**
|
||||
* Prevents default behavior on native interactive elements
|
||||
* This allows them to be selected like any other element while preventing their normal actions
|
||||
*/
|
||||
function handleNativeElementInteraction(event) {
|
||||
const element = event.target;
|
||||
// Prevent default button/link/input behavior
|
||||
event.preventDefault();
|
||||
event.stopPropagation(); // For Enter key on native elements, select them like we do with overlay buttons
|
||||
if (event.type === 'keydown' && event.key === 'Enter') {
|
||||
selectElement(element);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Adds event listeners to native interactive elements to override their default behavior
|
||||
*/
|
||||
function disableNativeInteractivity() {
|
||||
const nativeElements = document.querySelectorAll('button, input, textarea, select, a[href]');
|
||||
nativeElements.forEach((element) => {
|
||||
// Prevent default click behavior (but allow selection via handleClick)
|
||||
element.addEventListener('click', handleNativeElementInteraction, true);
|
||||
// Prevent Enter key from triggering button action, use it for selection instead
|
||||
element.addEventListener('keydown', handleNativeElementInteraction, true);
|
||||
// Mark element as having listeners for cleanup
|
||||
element.setAttribute('data-spark-intercepted', 'true');
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Removes event listeners from native interactive elements
|
||||
*/
|
||||
function restoreNativeInteractivity() {
|
||||
const nativeElements = document.querySelectorAll('[data-spark-intercepted="true"]');
|
||||
nativeElements.forEach((element) => {
|
||||
element.removeEventListener('click', handleNativeElementInteraction, true);
|
||||
element.removeEventListener('keydown', handleNativeElementInteraction, true);
|
||||
element.removeAttribute('data-spark-intercepted');
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Handle messages from the parent window
|
||||
*/
|
||||
function handleMessage(message) {
|
||||
switch (message.type) {
|
||||
case 'spark:designer:bridge:enable': { // IMPORTANT: Disable native interactivity FIRST so our handlers fire before handleClick
|
||||
// This prevents native button/link behavior while allowing element selection
|
||||
disableNativeInteractivity();
|
||||
window.addEventListener('click', handleClick, true);
|
||||
window.addEventListener('mouseover', handleMouseOver, true);
|
||||
window.addEventListener('scroll', updateOverlayPositionsThrottled, {
|
||||
passive: true,
|
||||
});
|
||||
window.addEventListener('resize', updateOverlayPositionsThrottled, {
|
||||
passive: true,
|
||||
});
|
||||
// when cursor leaves the window
|
||||
document.addEventListener('mouseout', handleMouseOut, true);
|
||||
// Create keyboard-accessible overlays
|
||||
createKeyboardOverlays();
|
||||
// Update keyboard overlay positions on scroll/resize
|
||||
window.addEventListener('scroll', updateKeyboardOverlayPositionsThrottled, {
|
||||
passive: true,
|
||||
});
|
||||
window.addEventListener('resize', updateKeyboardOverlayPositionsThrottled, {
|
||||
passive: true,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'spark:designer:bridge:disable': {
|
||||
currentHighlightedElement = null;
|
||||
currentSelectedElement = null;
|
||||
window.removeEventListener('click', handleClick, true);
|
||||
window.removeEventListener('mouseover', handleMouseOver, true);
|
||||
window.removeEventListener('scroll', updateOverlayPositionsThrottled);
|
||||
window.removeEventListener('resize', updateOverlayPositionsThrottled);
|
||||
document.removeEventListener('mouseout', handleMouseOut, true);
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
if (mutationObserver) {
|
||||
mutationObserver.disconnect();
|
||||
mutationObserver = null;
|
||||
}
|
||||
// Clean up keyboard overlays
|
||||
removeKeyboardOverlays();
|
||||
// Remove keyboard-specific scroll/resize listeners (separate from mouse overlay listeners above)
|
||||
window.removeEventListener('scroll', updateKeyboardOverlayPositionsThrottled);
|
||||
window.removeEventListener('resize', updateKeyboardOverlayPositionsThrottled);
|
||||
// Restore native interactivity
|
||||
restoreNativeInteractivity();
|
||||
break;
|
||||
}
|
||||
case 'spark:designer:bridge:deselect': {
|
||||
document.querySelectorAll('.debugger-overlay').forEach((x) => x.remove());
|
||||
currentSelectedElement = null;
|
||||
if (mutationObserver) {
|
||||
mutationObserver.disconnect();
|
||||
mutationObserver = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'spark:designer:bridge:restore-focus': {
|
||||
const { buttonDataLoc } = message;
|
||||
// Find the overlay button at the specified data-loc and focus it
|
||||
if (buttonDataLoc) {
|
||||
const button = document.querySelector(`.spark-keyboard-overlay[data-target-loc="${CSS.escape(buttonDataLoc)}"]`);
|
||||
if (button) {
|
||||
button.focus();
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'spark:designer:bridge:restore-focus-from-theme-panel': {
|
||||
const { buttonDataLoc } = message;
|
||||
// Find the overlay button at the specified data-loc and focus it
|
||||
// Same as restore-focus, but specifically from theme panel navigation
|
||||
if (buttonDataLoc) {
|
||||
const button = document.querySelector(`.spark-keyboard-overlay[data-target-loc="${CSS.escape(buttonDataLoc)}"]`);
|
||||
if (button) {
|
||||
button.focus();
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'spark:designer:bridge:update-theme-token': {
|
||||
const { token, value } = message;
|
||||
document.documentElement.style.setProperty(`--${token}`, value);
|
||||
break;
|
||||
}
|
||||
case 'spark:designer:bridge:update-element-token': {
|
||||
const { location, name, value } = message;
|
||||
const { filePath, line, column } = location;
|
||||
document.querySelectorAll(`[data-loc="${filePath}:${line}:${column}"]`).forEach((el) => {
|
||||
el.style.setProperty(name, value);
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'spark:designer:bridge:update-class-name': {
|
||||
const { location, className, replace } = message;
|
||||
const { filePath, line, column } = location;
|
||||
document.querySelectorAll(`[data-loc="${filePath}:${line}:${column}"]`).forEach((el) => {
|
||||
const elementClassName = el.getAttribute('class') || '';
|
||||
// Simple concatenation - if more sophisticated merging is needed, consider adding tailwind-merge
|
||||
const newClassName = replace ? className : `${elementClassName} ${className}`.trim();
|
||||
el.setAttribute('class', newClassName);
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Listen for messages from the parent window
|
||||
*/
|
||||
window.addEventListener('message', (event) => {
|
||||
handleMessage(event.data);
|
||||
});
|
||||
//# sourceMappingURL=designerHost.js.map
|
||||
File diff suppressed because one or more lines are too long
@@ -1,8 +0,0 @@
|
||||
import type { PluginOption } from 'vite';
|
||||
export default function getCwd(): string;
|
||||
export declare function tagFile(source: string, filePath: string): {
|
||||
code: string;
|
||||
map: any;
|
||||
};
|
||||
export declare const tagSourcePlugin: () => PluginOption;
|
||||
export declare const designerHost: () => PluginOption;
|
||||
35709
packages/spark-tools/dist/designerPlugin.js
vendored
35709
packages/spark-tools/dist/designerPlugin.js
vendored
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,12 +0,0 @@
|
||||
const EventType = {
|
||||
SPARK_RUNTIME_ERROR: 'sparkRuntimeError',
|
||||
SPARK_RUNTIME_PING: 'sparkRuntimePing',
|
||||
SPARK_RUNTIME_LOADED: 'sparkRuntimeLoaded',
|
||||
SPARK_VITE_WS_CONNECT: 'sparkViteWsConnect',
|
||||
SPARK_VITE_WS_DISCONNECT: 'sparkViteWsDisconnect',
|
||||
SPARK_VITE_ERROR: 'sparkViteError',
|
||||
SPARK_VITE_AFTER_UPDATE: 'sparkViteAfterUpdate',
|
||||
ROOT_ELEMENT_STATUS: 'rootElementStatus'};
|
||||
|
||||
export { EventType as E };
|
||||
//# sourceMappingURL=heartbeat-event-types-BmKuwNhb.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"heartbeat-event-types-BmKuwNhb.js","sources":["../src/types/heartbeat-event-types.ts"],"sourcesContent":[null],"names":[],"mappings":"AAAO,MAAM,SAAS,GAAG;AACvB,IAAA,mBAAmB,EAAE,mBAAmB;AACxC,IAAA,kBAAkB,EAAE,kBAAkB;AACtC,IAAA,oBAAoB,EAAE,oBAAoB;AAC1C,IAAA,qBAAqB,EAAE,oBAAoB;AAC3C,IAAA,wBAAwB,EAAE,uBAAuB;AACjD,IAAA,gBAAgB,EAAE,gBAAgB;AAClC,IAAA,uBAAuB,EAAE,sBAAsB;AAC/C,IAAA,mBAAmB,EAAE;;;;"}
|
||||
157
packages/spark-tools/dist/heartbeat.js
vendored
157
packages/spark-tools/dist/heartbeat.js
vendored
@@ -1,157 +0,0 @@
|
||||
import { E as EventType } from './heartbeat-event-types-BmKuwNhb.js';
|
||||
|
||||
const VERSION = "ec61fa57186e5a2ceb3003a660f42fd762e82193";
|
||||
const WORKBENCH_ORIGIN = import.meta.env.VITE_WORKBENCH_ORIGIN || "https://github.com";
|
||||
async function getSourceMapConsumer(sourceMap) {
|
||||
if (window.sourceMap !== undefined) {
|
||||
return await new window.sourceMap.SourceMapConsumer(sourceMap);
|
||||
}
|
||||
// @ts-ignore
|
||||
await import('https://unpkg.com/source-map@0.7.3/dist/source-map.js');
|
||||
window.sourceMap.SourceMapConsumer.initialize({
|
||||
"lib/mappings.wasm": "https://unpkg.com/source-map@0.7.3/lib/mappings.wasm",
|
||||
});
|
||||
return await new window.sourceMap.SourceMapConsumer(sourceMap);
|
||||
}
|
||||
async function wait(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
/**
|
||||
* Check whether the root element of the app exists.
|
||||
*/
|
||||
function getRootElement() {
|
||||
return document.getElementById("root");
|
||||
}
|
||||
/**
|
||||
* Checks if the given element is null or empty.
|
||||
*/
|
||||
function isEmptyElement(element) {
|
||||
if (element === null) {
|
||||
return true; // Treat missing element as empty
|
||||
}
|
||||
return element.textContent?.trim() === "";
|
||||
}
|
||||
async function monitorRootElement() {
|
||||
await wait(200); // Wait a bit for the root element to be rendered
|
||||
console.info("Root element monitoring enabled");
|
||||
let checkInterval = 500; // Start with 500 milliseconds
|
||||
const checkRootElement = () => {
|
||||
const rootElement = getRootElement();
|
||||
window.parent.postMessage({
|
||||
type: EventType.ROOT_ELEMENT_STATUS,
|
||||
payload: {
|
||||
timestamp: Date.now(),
|
||||
isEmpty: isEmptyElement(rootElement),
|
||||
exists: !!rootElement,
|
||||
},
|
||||
}, WORKBENCH_ORIGIN);
|
||||
clearInterval(intervalId);
|
||||
checkInterval = 3000;
|
||||
intervalId = setInterval(checkRootElement, checkInterval);
|
||||
};
|
||||
let intervalId = setInterval(checkRootElement, checkInterval);
|
||||
checkRootElement();
|
||||
}
|
||||
// Handle JavaScript errors
|
||||
function setupErrorListener() {
|
||||
console.info("Runtime heartbeat enabled");
|
||||
window.addEventListener("error", (event) => {
|
||||
const { message, filename, lineno, colno } = event;
|
||||
fetch(filename)
|
||||
.then(async (response) => {
|
||||
if (response.ok) {
|
||||
const rawFile = await response.text();
|
||||
const base64SourceMap = rawFile.split("# sourceMappingURL=").pop();
|
||||
const rawBase64SourceMap = base64SourceMap.split("data:application/json;base64,").pop();
|
||||
const sourceMap = JSON.parse(atob(rawBase64SourceMap));
|
||||
const consumer = await getSourceMapConsumer(sourceMap);
|
||||
const originalPosition = consumer.originalPositionFor({
|
||||
line: lineno,
|
||||
column: colno,
|
||||
});
|
||||
const payload = {
|
||||
line: originalPosition.line,
|
||||
column: originalPosition.column,
|
||||
path: new URL(filename).pathname,
|
||||
message,
|
||||
};
|
||||
window.parent.postMessage({
|
||||
type: EventType.SPARK_RUNTIME_ERROR,
|
||||
payload,
|
||||
}, WORKBENCH_ORIGIN);
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
const payload = {
|
||||
line: lineno,
|
||||
column: colno,
|
||||
path: new URL(filename).pathname,
|
||||
message,
|
||||
sourceMap: false,
|
||||
};
|
||||
window.parent.postMessage({
|
||||
type: EventType.SPARK_RUNTIME_ERROR,
|
||||
payload,
|
||||
}, WORKBENCH_ORIGIN);
|
||||
});
|
||||
});
|
||||
}
|
||||
function initializeViteHeartbeat() {
|
||||
const viteServerSessionId = import.meta.env.VITE_SERVER_SESSION_ID || "unset";
|
||||
console.info("Vite heartbeat enabled. Server session ID:", viteServerSessionId);
|
||||
import.meta.hot?.on("vite:ws:connect", () => {
|
||||
console.info("Vite server WebSocket connected");
|
||||
window.parent.postMessage({
|
||||
type: EventType.SPARK_VITE_WS_CONNECT,
|
||||
payload: { timestamp: Date.now(), viteServerSessionId },
|
||||
}, WORKBENCH_ORIGIN);
|
||||
});
|
||||
import.meta.hot?.on("vite:ws:disconnect", () => {
|
||||
console.info("Vite server WebSocket disconnected");
|
||||
window.parent.postMessage({
|
||||
type: EventType.SPARK_VITE_WS_DISCONNECT,
|
||||
payload: { timestamp: Date.now(), viteServerSessionId },
|
||||
}, WORKBENCH_ORIGIN);
|
||||
});
|
||||
import.meta.hot?.on("vite:error", (error) => {
|
||||
console.warn("Vite server error:", error);
|
||||
window.parent.postMessage({
|
||||
type: EventType.SPARK_VITE_ERROR,
|
||||
payload: { error, timestamp: Date.now(), viteServerSessionId },
|
||||
}, WORKBENCH_ORIGIN);
|
||||
});
|
||||
import.meta.hot?.on("vite:afterUpdate", (updateInfo) => {
|
||||
window.parent.postMessage({
|
||||
type: EventType.SPARK_VITE_AFTER_UPDATE,
|
||||
payload: { updateInfo, timestamp: Date.now(), viteServerSessionId },
|
||||
}, WORKBENCH_ORIGIN);
|
||||
if (isEmptyElement(getRootElement())) {
|
||||
wait(100).then(() => {
|
||||
window.location.reload();
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
function heartbeat() {
|
||||
console.info(`Spark Tools version: ${VERSION}`);
|
||||
setupErrorListener();
|
||||
monitorRootElement();
|
||||
// Tell parent the runtime is ready.
|
||||
window.parent.postMessage({
|
||||
type: EventType.SPARK_RUNTIME_PING,
|
||||
payload: {
|
||||
version: VERSION,
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
}, WORKBENCH_ORIGIN);
|
||||
}
|
||||
heartbeat();
|
||||
if (import.meta.hot) {
|
||||
initializeViteHeartbeat();
|
||||
}
|
||||
else {
|
||||
console.error(`Vite HMR is not available`);
|
||||
}
|
||||
|
||||
export { setupErrorListener };
|
||||
//# sourceMappingURL=heartbeat.js.map
|
||||
1
packages/spark-tools/dist/heartbeat.js.map
vendored
1
packages/spark-tools/dist/heartbeat.js.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"heartbeat.js","sources":["../src/heartbeat/heartbeat.ts"],"sourcesContent":[null],"names":[],"mappings":";;AAEA,MAAM,OAAO,GAAG,0CAA+B;AAC/C,MAAM,gBAAgB,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,qBAAqB,IAAI,oBAAoB;AActF,eAAe,oBAAoB,CAAC,SAAc,EAAA;AAChD,IAAA,IAAI,MAAM,CAAC,SAAS,KAAK,SAAS,EAAE;QAClC,OAAO,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,iBAAiB,CAAC,SAAS,CAAC;IAChE;;AAGA,IAAA,MAAM,OAAO,uDAAuD,CAAC;AACrE,IAAA,MAAM,CAAC,SAAU,CAAC,iBAAiB,CAAC,UAAU,CAAC;AAC7C,QAAA,mBAAmB,EAAE,sDAAsD;AAC5E,KAAA,CAAC;IAEF,OAAO,MAAM,IAAI,MAAM,CAAC,SAAU,CAAC,iBAAiB,CAAC,SAAS,CAAC;AACjE;AAEA,eAAe,IAAI,CAAC,EAAU,EAAA;AAC5B,IAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAK,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;AAC1D;AAEA;;AAEG;AACH,SAAS,cAAc,GAAA;AACrB,IAAA,OAAO,QAAQ,CAAC,cAAc,CAAC,MAAM,CAAC;AACxC;AAEA;;AAEG;AACH,SAAS,cAAc,CAAC,OAA2B,EAAA;AACjD,IAAA,IAAI,OAAO,KAAK,IAAI,EAAE;QACpB,OAAO,IAAI,CAAC;IACd;IAEA,OAAO,OAAO,CAAC,WAAW,EAAE,IAAI,EAAE,KAAK,EAAE;AAC3C;AAEA,eAAe,kBAAkB,GAAA;AAC/B,IAAA,MAAM,IAAI,CAAC,GAAG,CAAC,CAAA;AACf,IAAA,OAAO,CAAC,IAAI,CAAC,iCAAiC,CAAC;AAE/C,IAAA,IAAI,aAAa,GAAG,GAAG,CAAA;IACvB,MAAM,gBAAgB,GAAG,MAAK;AAC5B,QAAA,MAAM,WAAW,GAAG,cAAc,EAAE;AAEpC,QAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;YACE,IAAI,EAAE,SAAS,CAAC,mBAAmB;AACnC,YAAA,OAAO,EAAE;AACP,gBAAA,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;AACrB,gBAAA,OAAO,EAAE,cAAc,CAAC,WAAW,CAAC;gBACpC,MAAM,EAAE,CAAC,CAAC,WAAW;AACtB,aAAA;SACF,EACD,gBAAgB,CACjB;QAED,aAAa,CAAC,UAAU,CAAC;QACzB,aAAa,GAAG,IAAI;AACpB,QAAA,UAAU,GAAG,WAAW,CAAC,gBAAgB,EAAE,aAAa,CAAC;AAC3D,IAAA,CAAC;IAED,IAAI,UAAU,GAAG,WAAW,CAAC,gBAAgB,EAAE,aAAa,CAAC;AAC7D,IAAA,gBAAgB,EAAE;AACpB;AAGA;SACgB,kBAAkB,GAAA;AAChC,IAAA,OAAO,CAAC,IAAI,CAAC,2BAA2B,CAAC;IAEzC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,CAAC,KAAiB,KAAI;QACrD,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,KAAK;QAElD,KAAK,CAAC,QAAQ;AACX,aAAA,IAAI,CAAC,OAAO,QAAQ,KAAI;AACvB,YAAA,IAAI,QAAQ,CAAC,EAAE,EAAE;AACf,gBAAA,MAAM,OAAO,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;gBACrC,MAAM,eAAe,GAAG,OAAO,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,GAAG,EAAG;gBACnE,MAAM,kBAAkB,GAAG,eAAe,CAAC,KAAK,CAAC,+BAA+B,CAAC,CAAC,GAAG,EAAG;gBACxF,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAEtD,gBAAA,MAAM,QAAQ,GAAG,MAAM,oBAAoB,CAAC,SAAS,CAAC;AACtD,gBAAA,MAAM,gBAAgB,GAAG,QAAQ,CAAC,mBAAmB,CAAC;AACpD,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,MAAM,EAAE,KAAK;AACd,iBAAA,CAAC;AAEF,gBAAA,MAAM,OAAO,GAAG;oBACd,IAAI,EAAE,gBAAgB,CAAC,IAAI;oBAC3B,MAAM,EAAE,gBAAgB,CAAC,MAAM;AAC/B,oBAAA,IAAI,EAAE,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,QAAQ;oBAChC,OAAO;iBACR;AACD,gBAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;oBACE,IAAI,EAAE,SAAS,CAAC,mBAAmB;oBACnC,OAAO;iBACR,EACD,gBAAgB,CACjB;YACH;AACF,QAAA,CAAC;aACA,KAAK,CAAC,MAAK;AACV,YAAA,MAAM,OAAO,GAAG;AACd,gBAAA,IAAI,EAAE,MAAM;AACZ,gBAAA,MAAM,EAAE,KAAK;AACb,gBAAA,IAAI,EAAE,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,QAAQ;gBAChC,OAAO;AACP,gBAAA,SAAS,EAAE,KAAK;aACjB;AACD,YAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;gBACE,IAAI,EAAE,SAAS,CAAC,mBAAmB;gBACnC,OAAO;aACR,EACD,gBAAgB,CACjB;AACH,QAAA,CAAC,CAAC;AACN,IAAA,CAAC,CAAC;AACJ;AAEA,SAAS,uBAAuB,GAAA;IAC9B,MAAM,mBAAmB,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,sBAAsB,IAAI,OAAO;AAE7E,IAAA,OAAO,CAAC,IAAI,CAAC,4CAA4C,EAAE,mBAAmB,CAAC;IAE/E,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,CAAC,iBAAiB,EAAE,MAAK;AAC1C,QAAA,OAAO,CAAC,IAAI,CAAC,iCAAiC,CAAC;AAE/C,QAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;YACE,IAAI,EAAE,SAAS,CAAC,qBAAqB;YACrC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,EAAE,mBAAmB,EAAE;SACxD,EACD,gBAAgB,CACjB;AACH,IAAA,CAAC,CAAC;IAEF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,CAAC,oBAAoB,EAAE,MAAK;AAC7C,QAAA,OAAO,CAAC,IAAI,CAAC,oCAAoC,CAAC;AAElD,QAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;YACE,IAAI,EAAE,SAAS,CAAC,wBAAwB;YACxC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,EAAE,mBAAmB,EAAE;SACxD,EACD,gBAAgB,CACjB;AACH,IAAA,CAAC,CAAC;AAEF,IAAA,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,CAAC,YAAY,EAAE,CAAC,KAAK,KAAI;AAC1C,QAAA,OAAO,CAAC,IAAI,CAAC,oBAAoB,EAAE,KAAK,CAAC;AAEzC,QAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;YACE,IAAI,EAAE,SAAS,CAAC,gBAAgB;AAChC,YAAA,OAAO,EAAE,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,EAAE,mBAAmB,EAAE;SAC/D,EACD,gBAAgB,CACjB;AACH,IAAA,CAAC,CAAC;AAEF,IAAA,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,CAAC,kBAAkB,EAAE,CAAC,UAAU,KAAI;AACrD,QAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;YACE,IAAI,EAAE,SAAS,CAAC,uBAAuB;AACvC,YAAA,OAAO,EAAE,EAAE,UAAU,EAAE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,EAAE,mBAAmB,EAAE;SACpE,EACD,gBAAgB,CACjB;AAED,QAAA,IAAI,cAAc,CAAC,cAAc,EAAE,CAAC,EAAE;AACpC,YAAA,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAK;AAClB,gBAAA,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE;AAC1B,YAAA,CAAC,CAAC;QACJ;AACF,IAAA,CAAC,CAAC;AACJ;AAEA,SAAS,SAAS,GAAA;AAChB,IAAA,OAAO,CAAC,IAAI,CAAC,wBAAwB,OAAO,CAAA,CAAE,CAAC;AAC/C,IAAA,kBAAkB,EAAE;AACpB,IAAA,kBAAkB,EAAE;;AAGpB,IAAA,MAAM,CAAC,MAAM,CAAC,WAAW,CACvB;QACE,IAAI,EAAE,SAAS,CAAC,kBAAkB;AAClC,QAAA,OAAO,EAAE;AACP,YAAA,OAAO,EAAE,OAAO;AAChB,YAAA,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;AACtB,SAAA;KACF,EACD,gBAAgB,CACjB;AACH;AAEA,SAAS,EAAE;AAEX,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;AACnB,IAAA,uBAAuB,EAAE;AAC3B;KAAO;AACL,IAAA,OAAO,CAAC,KAAK,CAAC,CAAA,yBAAA,CAA2B,CAAC;AAC5C;;;;"}
|
||||
@@ -1,13 +0,0 @@
|
||||
declare global {
|
||||
interface Window {
|
||||
sourceMap?: {
|
||||
SourceMapConsumer: {
|
||||
new (sourceMap: any): any;
|
||||
initialize(config: {
|
||||
[key: string]: string;
|
||||
}): void;
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
export declare function setupErrorListener(): void;
|
||||
@@ -1,3 +0,0 @@
|
||||
import type { PluginOption } from 'vite';
|
||||
export declare const heartbeatPlugin: () => PluginOption;
|
||||
export declare const runtimeTelemetryPlugin: () => PluginOption;
|
||||
30
packages/spark-tools/dist/heartbeatPlugin.js
vendored
30
packages/spark-tools/dist/heartbeatPlugin.js
vendored
@@ -1,30 +0,0 @@
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
const require$1 = createRequire(import.meta.url);
|
||||
const src = require$1.resolve('./heartbeat');
|
||||
const heartbeatPlugin = () => {
|
||||
return {
|
||||
name: 'heartbeat',
|
||||
apply: 'serve', // Only apply this plugin for the dev server only
|
||||
transformIndexHtml(html) {
|
||||
return {
|
||||
html,
|
||||
tags: [
|
||||
{
|
||||
tag: 'script',
|
||||
attrs: {
|
||||
type: 'module',
|
||||
src: src,
|
||||
},
|
||||
injectTo: 'head',
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
};
|
||||
};
|
||||
// Backward compatibility alias
|
||||
const runtimeTelemetryPlugin = heartbeatPlugin;
|
||||
|
||||
export { heartbeatPlugin, runtimeTelemetryPlugin };
|
||||
//# sourceMappingURL=heartbeatPlugin.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"heartbeatPlugin.js","sources":["../src/heartbeat/heartbeatPlugin.ts"],"sourcesContent":[null],"names":["require"],"mappings":";;AAGA,MAAMA,SAAO,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC;AAC9C,MAAM,GAAG,GAAGA,SAAO,CAAC,OAAO,CAAC,aAAa,CAAC;AAEnC,MAAM,eAAe,GAAG,MAAmB;IAChD,OAAO;AACL,QAAA,IAAI,EAAE,WAAW;QACjB,KAAK,EAAE,OAAO;AACd,QAAA,kBAAkB,CAAC,IAAI,EAAA;YACrB,OAAO;gBACL,IAAI;AACJ,gBAAA,IAAI,EAAE;AACJ,oBAAA;AACE,wBAAA,GAAG,EAAE,QAAQ;AACb,wBAAA,KAAK,EAAE;AACL,4BAAA,IAAI,EAAE,QAAQ;AACd,4BAAA,GAAG,EAAE,GAAG;AACT,yBAAA;AACD,wBAAA,QAAQ,EAAE,MAAM;AACjB,qBAAA;AACF,iBAAA;aACF;QACH,CAAC;KACF;AACH;AAEA;AACO,MAAM,sBAAsB,GAAG;;;;"}
|
||||
12
packages/spark-tools/dist/hooks/index.d.ts
vendored
12
packages/spark-tools/dist/hooks/index.d.ts
vendored
@@ -1,12 +0,0 @@
|
||||
/**
|
||||
* useKV Hook - Persistent key-value storage with localStorage and window.spark.kv integration
|
||||
*
|
||||
* This hook provides persistent state management that syncs with localStorage
|
||||
* and integrates with the Spark KV storage system if available.
|
||||
*
|
||||
* @param key - Storage key
|
||||
* @param defaultValue - Default value if key doesn't exist
|
||||
* @returns Tuple of [value, setValue, deleteValue]
|
||||
*/
|
||||
export declare function useKV<T>(key: string, defaultValue: T): [T, (value: T | ((prev: T) => T)) => void, () => void];
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/hooks/index.ts"],"names":[],"mappings":"AAEA;;;;;;;;;GASG;AACH,wBAAgB,KAAK,CAAC,CAAC,EACrB,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,CAAC,GACd,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,IAAI,EAAE,MAAM,IAAI,CAAC,CA8ExD"}
|
||||
21
packages/spark-tools/dist/hooks/use-kv.d.ts
vendored
21
packages/spark-tools/dist/hooks/use-kv.d.ts
vendored
@@ -1,21 +0,0 @@
|
||||
/**
|
||||
* A hook that works similarly to React.useState, but persists the value using the Spark Runtime.
|
||||
* The value is automatically retrieved from the Spark Runtime on mount and updated on state change.
|
||||
* While the initial value is being fetched, the `initialValue` is being used.
|
||||
* Use this component when you need to persist/store/remember values. Note that the current value
|
||||
* may be undefined if no value has been set yet or if the value has been deleted.
|
||||
*
|
||||
* @param key - The key under which to store the value.
|
||||
* @param initialValue - The initial value to use if no stored value is found.
|
||||
* @returns An array containing the current value, a setter function, and a delete function.
|
||||
*
|
||||
* @example
|
||||
* import { useKV } from "@github/spark/hooks";
|
||||
*
|
||||
* const [count, setCount, deleteCount] = useKV("count", 0);
|
||||
* @example
|
||||
* import { useKV } from "@github/spark/hooks";
|
||||
*
|
||||
* const [name, setName] = useKV("name", "");
|
||||
*/
|
||||
export declare function useKV<T = string>(key: string, initialValue?: NoInfer<T>): readonly [T | undefined, (newValue: T | ((oldValue?: T) => T)) => void, () => void];
|
||||
@@ -1 +0,0 @@
|
||||
export declare function useTheme(): Theme;
|
||||
8
packages/spark-tools/dist/index.d.ts
vendored
8
packages/spark-tools/dist/index.d.ts
vendored
@@ -1,8 +0,0 @@
|
||||
/**
|
||||
* @github/spark - Main Hooks Entry Point
|
||||
*
|
||||
* This is the entry point for the hooks exports from the Spark package.
|
||||
*/
|
||||
export { useKV } from './hooks/index';
|
||||
export { sparkRuntime } from './lib/spark-runtime';
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
packages/spark-tools/dist/index.d.ts.map
vendored
1
packages/spark-tools/dist/index.d.ts.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAA;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAA"}
|
||||
2
packages/spark-tools/dist/index.js
vendored
2
packages/spark-tools/dist/index.js
vendored
@@ -1,2 +0,0 @@
|
||||
import{useState as r,useCallback as e,useEffect as o}from"react";export{s as sparkRuntime}from"./spark-runtime-wNXbhm34.js";function t(t,n){const[s,a]=r(()=>{try{if("undefined"!=typeof window&&window.spark?.kv){const r=window.spark.kv.get(t);if(void 0!==r)return r}const r=localStorage.getItem(t);return r?JSON.parse(r):n}catch(r){return console.error("Error reading from storage:",r),n}}),i=e(r=>{try{a(e=>{const o="function"==typeof r?r(e):r;return localStorage.setItem(t,JSON.stringify(o)),"undefined"!=typeof window&&window.spark?.kv&&window.spark.kv.set(t,o),o})}catch(r){console.error("Error writing to storage:",r)}},[t]),w=e(()=>{try{localStorage.removeItem(t),"undefined"!=typeof window&&window.spark?.kv&&window.spark.kv.delete(t),a(n)}catch(r){console.error("Error deleting from storage:",r)}},[t,n]);return o(()=>{const r=r=>{if(r.key===t&&null!==r.newValue)try{a(JSON.parse(r.newValue))}catch(r){console.error("Error parsing storage event:",r)}};return window.addEventListener("storage",r),()=>window.removeEventListener("storage",r)},[t]),[s,i,w]}export{t as useKV};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
packages/spark-tools/dist/index.js.map
vendored
1
packages/spark-tools/dist/index.js.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"index.js","sources":["../src/hooks/index.ts"],"sourcesContent":["import { useState, useEffect, useCallback } from 'react'\n\n/**\n * useKV Hook - Persistent key-value storage with localStorage and window.spark.kv integration\n * \n * This hook provides persistent state management that syncs with localStorage\n * and integrates with the Spark KV storage system if available.\n * \n * @param key - Storage key\n * @param defaultValue - Default value if key doesn't exist\n * @returns Tuple of [value, setValue, deleteValue]\n */\nexport function useKV<T>(\n key: string,\n defaultValue: T\n): [T, (value: T | ((prev: T) => T)) => void, () => void] {\n // Initialize state from localStorage or default value\n const [value, setValueInternal] = useState<T>(() => {\n try {\n // Try to get from window.spark.kv first\n if (typeof window !== 'undefined' && window.spark?.kv) {\n const sparkValue = window.spark.kv.get(key)\n if (sparkValue !== undefined) {\n return sparkValue as T\n }\n }\n\n // Fallback to localStorage\n const item = localStorage.getItem(key)\n return item ? JSON.parse(item) : defaultValue\n } catch (error) {\n console.error('Error reading from storage:', error)\n return defaultValue\n }\n })\n\n // Set value and sync to storage\n const setValue = useCallback(\n (newValue: T | ((prev: T) => T)) => {\n try {\n setValueInternal((prevValue) => {\n const valueToStore =\n typeof newValue === 'function'\n ? (newValue as (prev: T) => T)(prevValue)\n : newValue\n\n // Store in localStorage\n localStorage.setItem(key, JSON.stringify(valueToStore))\n\n // Store in window.spark.kv if available\n if (typeof window !== 'undefined' && window.spark?.kv) {\n window.spark.kv.set(key, valueToStore)\n }\n\n return valueToStore\n })\n } catch (error) {\n console.error('Error writing to storage:', error)\n }\n },\n [key]\n )\n\n // Delete value from storage\n const deleteValue = useCallback(() => {\n try {\n localStorage.removeItem(key)\n if (typeof window !== 'undefined' && window.spark?.kv) {\n window.spark.kv.delete(key)\n }\n setValueInternal(defaultValue)\n } catch (error) {\n console.error('Error deleting from storage:', error)\n }\n }, [key, defaultValue])\n\n // Sync with localStorage changes from other tabs\n useEffect(() => {\n const handleStorageChange = (e: StorageEvent) => {\n if (e.key === key && e.newValue !== null) {\n try {\n setValueInternal(JSON.parse(e.newValue))\n } catch (error) {\n console.error('Error parsing storage event:', error)\n }\n }\n }\n\n window.addEventListener('storage', handleStorageChange)\n return () => window.removeEventListener('storage', handleStorageChange)\n }, [key])\n\n return [value, setValue, deleteValue]\n}\n"],"names":["useKV","key","defaultValue","value","setValueInternal","useState","window","spark","kv","sparkValue","get","undefined","item","localStorage","getItem","JSON","parse","error","console","setValue","useCallback","newValue","prevValue","valueToStore","setItem","stringify","set","deleteValue","removeItem","delete","useEffect","handleStorageChange","e","addEventListener","removeEventListener"],"mappings":"4HAYM,SAAUA,EACdC,EACAC,GAGA,MAAOC,EAAOC,GAAoBC,EAAY,KAC5C,IAEE,GAAsB,oBAAXC,QAA0BA,OAAOC,OAAOC,GAAI,CACrD,MAAMC,EAAaH,OAAOC,MAAMC,GAAGE,IAAIT,GACvC,QAAmBU,IAAfF,EACF,OAAOA,CAEX,CAGA,MAAMG,EAAOC,aAAaC,QAAQb,GAClC,OAAOW,EAAOG,KAAKC,MAAMJ,GAAQV,CACnC,CAAE,MAAOe,GAEP,OADAC,QAAQD,MAAM,8BAA+BA,GACtCf,CACT,IAIIiB,EAAWC,EACdC,IACC,IACEjB,EAAkBkB,IAChB,MAAMC,EACgB,mBAAbF,EACFA,EAA4BC,GAC7BD,EAUN,OAPAR,aAAaW,QAAQvB,EAAKc,KAAKU,UAAUF,IAGnB,oBAAXjB,QAA0BA,OAAOC,OAAOC,IACjDF,OAAOC,MAAMC,GAAGkB,IAAIzB,EAAKsB,GAGpBA,GAEX,CAAE,MAAON,GACPC,QAAQD,MAAM,4BAA6BA,EAC7C,GAEF,CAAChB,IAIG0B,EAAcP,EAAY,KAC9B,IACEP,aAAae,WAAW3B,GACF,oBAAXK,QAA0BA,OAAOC,OAAOC,IACjDF,OAAOC,MAAMC,GAAGqB,OAAO5B,GAEzBG,EAAiBF,EACnB,CAAE,MAAOe,GACPC,QAAQD,MAAM,+BAAgCA,EAChD,GACC,CAAChB,EAAKC,IAkBT,OAfA4B,EAAU,KACR,MAAMC,EAAuBC,IAC3B,GAAIA,EAAE/B,MAAQA,GAAsB,OAAf+B,EAAEX,SACrB,IACEjB,EAAiBW,KAAKC,MAAMgB,EAAEX,UAChC,CAAE,MAAOJ,GACPC,QAAQD,MAAM,+BAAgCA,EAChD,GAKJ,OADAX,OAAO2B,iBAAiB,UAAWF,GAC5B,IAAMzB,OAAO4B,oBAAoB,UAAWH,IAClD,CAAC9B,IAEG,CAACE,EAAOgB,EAAUQ,EAC3B"}
|
||||
177
packages/spark-tools/dist/kv-DBiZoNWq.js
vendored
177
packages/spark-tools/dist/kv-DBiZoNWq.js
vendored
@@ -1,177 +0,0 @@
|
||||
const KvEventType = {
|
||||
SPARK_KV_UPDATED: 'sparkKvUpdated',
|
||||
SPARK_KV_DELETED: 'sparkKvDeleted',
|
||||
};
|
||||
|
||||
// This function allows us to send messages from the Spark back to the Workbench application.
|
||||
// Specifically, we want to send updates about KV operations, to allow the Workbench
|
||||
// to update its UI accordingly.
|
||||
const sendEventToWorkbench = (message) => {
|
||||
if (import.meta.env.DEV) {
|
||||
window.parent.postMessage(message, '*');
|
||||
}
|
||||
};
|
||||
class KVClient {
|
||||
/**
|
||||
* Retrieves a list of all keys in the KV store.
|
||||
* @returns A list of all keys in the KV store, or an empty array if there are no keys.
|
||||
*/
|
||||
async getKeys() {
|
||||
// Fetching the root URL will return all keys in the KV store.
|
||||
const response = await fetch(BASE_KV_SERVICE_URL, {
|
||||
method: 'GET',
|
||||
});
|
||||
if (!response.ok) {
|
||||
const errorMessage = `Failed to fetch KV keys: ${response.statusText}`;
|
||||
return Promise.reject(new Error(errorMessage));
|
||||
}
|
||||
let json;
|
||||
try {
|
||||
json = await response.json();
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = 'Failed to parse KV keys response';
|
||||
return Promise.reject(new Error(errorMessage));
|
||||
}
|
||||
if (!Array.isArray(json)) {
|
||||
const errorMessage = 'KV keys response is not an array';
|
||||
return Promise.reject(new Error(errorMessage));
|
||||
}
|
||||
return json;
|
||||
}
|
||||
/**
|
||||
* Retrieves all key-value pairs from the KV store.
|
||||
* @returns An object containing all key-value pairs, or an empty object if there are no keys.
|
||||
*
|
||||
* TODO: replace with batch request
|
||||
*/
|
||||
async getAll() {
|
||||
const keys = await this.getKeys();
|
||||
const result = {};
|
||||
// Fetch all values concurrently
|
||||
const values = await Promise.all(keys.map(key => this.getKey(key)));
|
||||
// Build the result object
|
||||
keys.forEach((key, index) => {
|
||||
const value = values[index];
|
||||
if (value !== undefined) {
|
||||
result[key] = value;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Retrieves the value associated with the given key from the KV store.
|
||||
* @param key The key to retrieve.
|
||||
* @param collectionName Optional collection name to include as a URL parameter.
|
||||
* @returns The value associated with the key, or undefined if not found.
|
||||
*/
|
||||
async getKey(key, collectionName) {
|
||||
let url = `${BASE_KV_SERVICE_URL}/${encodeURIComponent(key)}`;
|
||||
if (collectionName) {
|
||||
url += `?collection=${encodeURIComponent(collectionName)}`;
|
||||
}
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': `text/plain`,
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
const errorMessage = `Failed to fetch KV key: ${response.statusText}`;
|
||||
if (response.status === 404) {
|
||||
// If the key does not exist, return undefined
|
||||
return undefined;
|
||||
}
|
||||
// For other errors, reject with an error message
|
||||
return Promise.reject(new Error(errorMessage));
|
||||
}
|
||||
const responseText = await response.text();
|
||||
// Extract the value from the response text.
|
||||
// Important to remember that even a simple string should be returned to us as a JSON-encoded value,
|
||||
// meaning that the parse should succeed.
|
||||
try {
|
||||
return JSON.parse(responseText);
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = `Failed to parse KV key response`;
|
||||
return Promise.reject(new Error(errorMessage));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Retrieves the value associated with the given key from the KV store, while also setting it if it does not exist.
|
||||
* @param key The key to retrieve.
|
||||
* @param value The value to set if the key does not exist.
|
||||
* @returns The value associated with the key, whether it was retrieved or newly set.
|
||||
*/
|
||||
async getOrSetKey(key, value) {
|
||||
const existingValue = await this.getKey(key);
|
||||
if (existingValue !== undefined) {
|
||||
return existingValue;
|
||||
}
|
||||
const response = await fetch(`${BASE_KV_SERVICE_URL}/${encodeURIComponent(key)}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': `text/plain`,
|
||||
'X-Spark-Initial': 'true',
|
||||
},
|
||||
body: JSON.stringify(value),
|
||||
});
|
||||
if (!response.ok) {
|
||||
const errorMessage = `Failed to set default value for key: ${response.statusText}`;
|
||||
return Promise.reject(new Error(errorMessage));
|
||||
}
|
||||
sendEventToWorkbench({
|
||||
type: KvEventType.SPARK_KV_UPDATED,
|
||||
payload: { key },
|
||||
});
|
||||
return value;
|
||||
}
|
||||
/**
|
||||
* Sets the value for the given key in the KV store.
|
||||
* @param key The key to set.
|
||||
* @param value The value to associate with the key.
|
||||
* @param collectionName Optional collection name to include as a URL parameter.
|
||||
* @returns A promise that resolves when the operation is complete.
|
||||
*/
|
||||
async setKey(key, value, collectionName) {
|
||||
let url = `${BASE_KV_SERVICE_URL}/${encodeURIComponent(key)}`;
|
||||
if (collectionName) {
|
||||
url += `?collection=${encodeURIComponent(collectionName)}`;
|
||||
}
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': `text/plain`,
|
||||
'X-Spark-Initial': 'false',
|
||||
},
|
||||
body: JSON.stringify(value),
|
||||
});
|
||||
if (!response.ok) {
|
||||
const errorMessage = `Failed to set key: ${response.statusText}`;
|
||||
return Promise.reject(new Error(errorMessage));
|
||||
}
|
||||
sendEventToWorkbench({
|
||||
type: KvEventType.SPARK_KV_UPDATED,
|
||||
payload: { key, value: JSON.stringify(value) },
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Deletes the value associated with the given key from the KV store.
|
||||
* @param key The key to delete from the KV store.
|
||||
* @param collectionName Optional collection name to include as a URL parameter.
|
||||
*/
|
||||
async deleteKey(key, collectionName) {
|
||||
let url = `${BASE_KV_SERVICE_URL}/${encodeURIComponent(key)}`;
|
||||
if (collectionName) {
|
||||
url += `?collection=${encodeURIComponent(collectionName)}`;
|
||||
}
|
||||
await fetch(url, { method: 'DELETE' });
|
||||
sendEventToWorkbench({
|
||||
type: KvEventType.SPARK_KV_DELETED,
|
||||
payload: { key },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export { KVClient as K, KvEventType as a };
|
||||
//# sourceMappingURL=kv-DBiZoNWq.js.map
|
||||
1
packages/spark-tools/dist/kv-DBiZoNWq.js.map
vendored
1
packages/spark-tools/dist/kv-DBiZoNWq.js.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"kv-DBiZoNWq.js","sources":["../src/types/kv-event-types.ts","../src/lib/kv.ts"],"sourcesContent":[null,null],"names":[],"mappings":"AAAO,MAAM,WAAW,GAAG;AACzB,IAAA,gBAAgB,EAAE,gBAAgB;AAClC,IAAA,gBAAgB,EAAE,gBAAgB;;;ACMpC;AACA;AACA;AACA,MAAM,oBAAoB,GAAG,CAAC,OAAoB,KAAI;IACpD,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE;QACvB,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,OAAO,EAAE,GAAG,CAAC;IACzC;AACF,CAAC;MAEY,QAAQ,CAAA;AACnB;;;AAGE;AACF,IAAA,MAAM,OAAO,GAAA;;AAEX,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,mBAAmB,EAAE;AAChD,YAAA,MAAM,EAAE,KAAK;AACd,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AAChB,YAAA,MAAM,YAAY,GAAG,CAAA,yBAAA,EAA4B,QAAQ,CAAC,UAAU,EAAE;YACtE,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChD;AAEA,QAAA,IAAI,IAAS;AACb,QAAA,IAAI;AACF,YAAA,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;QAC9B;QAAE,OAAO,KAAK,EAAE;YACd,MAAM,YAAY,GAAG,kCAAkC;YACvD,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChD;QAEA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;YACxB,MAAM,YAAY,GAAG,kCAAkC;YACvD,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChD;AAEA,QAAA,OAAO,IAAgB;IACzB;AAEA;;;;;AAKG;AACH,IAAA,MAAM,MAAM,GAAA;AACV,QAAA,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE;QACjC,MAAM,MAAM,GAAwB,EAAE;;QAGtC,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,GAAG,CAC9B,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAClC;;QAGD,IAAI,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,KAAK,KAAI;AAC1B,YAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;AAC3B,YAAA,IAAI,KAAK,KAAK,SAAS,EAAE;AACvB,gBAAA,MAAM,CAAC,GAAG,CAAC,GAAG,KAAK;YACrB;AACF,QAAA,CAAC,CAAC;AAEF,QAAA,OAAO,MAAM;IACf;AAEA;;;;;AAKG;AACH,IAAA,MAAM,MAAM,CAAI,GAAW,EAAE,cAAuB,EAAA;QAClD,IAAI,GAAG,GAAG,CAAA,EAAG,mBAAmB,CAAA,CAAA,EAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA,CAAE;QAC7D,IAAI,cAAc,EAAE;AAClB,YAAA,GAAG,IAAI,CAAA,YAAA,EAAe,kBAAkB,CAAC,cAAc,CAAC,EAAE;QAC5D;AAEA,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;AAChC,YAAA,MAAM,EAAE,KAAK;AACb,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,CAAA,UAAA,CAAY;AAC7B,aAAA;AACF,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AAChB,YAAA,MAAM,YAAY,GAAG,CAAA,wBAAA,EAA2B,QAAQ,CAAC,UAAU,EAAE;AACrE,YAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;;AAE3B,gBAAA,OAAO,SAAS;YAClB;;YAGA,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChD;AAEA,QAAA,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;;;;AAK1C,QAAA,IAAI;AACF,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,CAAM;QACtC;QAAE,OAAO,KAAK,EAAE;YACd,MAAM,YAAY,GAAG,CAAA,+BAAA,CAAiC;YACtD,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChD;IACF;AAEA;;;;;AAKG;AACH,IAAA,MAAM,WAAW,CAAI,GAAW,EAAE,KAAQ,EAAA;QACxC,MAAM,aAAa,GAAG,MAAM,IAAI,CAAC,MAAM,CAAI,GAAG,CAAC;AAC/C,QAAA,IAAI,aAAa,KAAK,SAAS,EAAE;AAC/B,YAAA,OAAO,aAAa;QACtB;AAEA,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,CAAA,EAAG,mBAAmB,CAAA,CAAA,EAAI,kBAAkB,CAAC,GAAG,CAAC,EAAE,EAAE;AAChF,YAAA,MAAM,EAAE,MAAM;AACd,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,CAAA,UAAA,CAAY;AAC5B,gBAAA,iBAAiB,EAAE,MAAM;AAC1B,aAAA;AACD,YAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;AAC5B,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AAChB,YAAA,MAAM,YAAY,GAAG,CAAA,qCAAA,EAAwC,QAAQ,CAAC,UAAU,EAAE;YAClF,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChD;AAEA,QAAA,oBAAoB,CAAC;YACnB,IAAI,EAAE,WAAW,CAAC,gBAAgB;YAClC,OAAO,EAAE,EAAE,GAAG,EAAE;AACjB,SAAA,CAAC;AAEF,QAAA,OAAO,KAAK;IACd;AAEA;;;;;;AAMG;AACH,IAAA,MAAM,MAAM,CAAI,GAAW,EAAE,KAAQ,EAAE,cAAuB,EAAA;QAC5D,IAAI,GAAG,GAAG,CAAA,EAAG,mBAAmB,CAAA,CAAA,EAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA,CAAE;QAC7D,IAAI,cAAc,EAAE;AAClB,YAAA,GAAG,IAAI,CAAA,YAAA,EAAe,kBAAkB,CAAC,cAAc,CAAC,EAAE;QAC5D;AAEA,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;AAChC,YAAA,MAAM,EAAE,MAAM;AACd,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,CAAA,UAAA,CAAY;AAC5B,gBAAA,iBAAiB,EAAE,OAAO;AAC3B,aAAA;AACD,YAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;AAC5B,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AAChB,YAAA,MAAM,YAAY,GAAG,CAAA,mBAAA,EAAsB,QAAQ,CAAC,UAAU,EAAE;YAChE,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;QAChD;AAEA,QAAA,oBAAoB,CAAC;YACnB,IAAI,EAAE,WAAW,CAAC,gBAAgB;AAClC,YAAA,OAAO,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;AAC/C,SAAA,CAAC;IACJ;AAEA;;;;AAIG;AACH,IAAA,MAAM,SAAS,CAAC,GAAW,EAAE,cAAuB,EAAA;QAClD,IAAI,GAAG,GAAG,CAAA,EAAG,mBAAmB,CAAA,CAAA,EAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA,CAAE;QAC7D,IAAI,cAAc,EAAE;AAClB,YAAA,GAAG,IAAI,CAAA,YAAA,EAAe,kBAAkB,CAAC,cAAc,CAAC,EAAE;QAC5D;QAEA,MAAM,KAAK,CAAC,GAAG,EAAE,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC;AAEtC,QAAA,oBAAoB,CAAC;YACnB,IAAI,EAAE,WAAW,CAAC,gBAAgB;YAClC,OAAO,EAAE,EAAE,GAAG,EAAE;AACjB,SAAA,CAAC;IACJ;AACD;;;;"}
|
||||
3
packages/spark-tools/dist/lib/db.d.ts
vendored
3
packages/spark-tools/dist/lib/db.d.ts
vendored
@@ -1,3 +0,0 @@
|
||||
export { collection } from './db/collection';
|
||||
export { DBClient as DB } from './db/db';
|
||||
export type { Collection, QueryOptions } from './db/collection';
|
||||
74
packages/spark-tools/dist/lib/db/collection.d.ts
vendored
74
packages/spark-tools/dist/lib/db/collection.d.ts
vendored
@@ -1,74 +0,0 @@
|
||||
import { z } from 'zod';
|
||||
import { Document } from './db';
|
||||
/**
|
||||
* Collection interface that provides document operations with schema validation
|
||||
*/
|
||||
export interface Collection<T extends z.ZodType> {
|
||||
/**
|
||||
* Insert a new document into the collection
|
||||
* @param data The document data to insert
|
||||
* @returns Promise that resolves to the inserted document with _id
|
||||
*/
|
||||
insert(data: z.infer<T>): Promise<Document<z.infer<T>>>;
|
||||
/**
|
||||
* Get a document by its ID
|
||||
* @param id The document ID
|
||||
* @returns Promise that resolves to the document or null if not found
|
||||
*/
|
||||
get(id: string): Promise<Document<z.infer<T>> | null>;
|
||||
/**
|
||||
* Update a document with partial data
|
||||
* @param id The document ID
|
||||
* @param data Partial data to update
|
||||
* @returns Promise that resolves to the updated document or null if not found
|
||||
*/
|
||||
update(id: string, data: Partial<z.infer<T>>): Promise<Document<z.infer<T>> | null>;
|
||||
/**
|
||||
* Delete a document by its ID
|
||||
* @param id The document ID
|
||||
* @returns Promise that resolves to true if deleted, false if not found
|
||||
*/
|
||||
delete(id: string): Promise<boolean>;
|
||||
/**
|
||||
* Get all documents in the collection
|
||||
* @returns Promise that resolves to an array of all documents
|
||||
*/
|
||||
getAll(): Promise<Document<z.infer<T>>[]>;
|
||||
/**
|
||||
* Query documents with filtering, sorting, and limiting
|
||||
* @param options Query options including where conditions, sorting, and limit
|
||||
* @returns Promise that resolves to an array of matching documents
|
||||
*/
|
||||
query(options?: QueryOptions<T>): Promise<Document<z.infer<T>>[]>;
|
||||
}
|
||||
/**
|
||||
* Query options for filtering, sorting, and limiting results
|
||||
*/
|
||||
export interface QueryOptions<T extends z.ZodType> {
|
||||
/**
|
||||
* Filter condition
|
||||
*/
|
||||
where?: {
|
||||
field: keyof z.infer<T>;
|
||||
operator: '==' | '!=' | '>' | '<' | '>=' | '<=';
|
||||
value: any;
|
||||
};
|
||||
/**
|
||||
* Sort configuration
|
||||
*/
|
||||
sortBy?: {
|
||||
field: keyof z.infer<T>;
|
||||
direction: 'asc' | 'desc';
|
||||
};
|
||||
/**
|
||||
* Maximum number of results to return
|
||||
*/
|
||||
limit?: number;
|
||||
}
|
||||
/**
|
||||
* Create a collection instance with schema validation and clean API
|
||||
* @param schema The Zod schema for the collection
|
||||
* @param collectionName The name of the collection
|
||||
* @returns Collection instance with CRUD operations
|
||||
*/
|
||||
export declare function collection<T extends z.ZodType>(schema: T, collectionName: string): Collection<T>;
|
||||
64
packages/spark-tools/dist/lib/db/db.d.ts
vendored
64
packages/spark-tools/dist/lib/db/db.d.ts
vendored
@@ -1,64 +0,0 @@
|
||||
import { z } from 'zod';
|
||||
import { KVClient } from '../kv';
|
||||
/**
|
||||
* Document type that combines a schema type with an _id field
|
||||
*/
|
||||
export type Document<T> = T & {
|
||||
_id: string;
|
||||
};
|
||||
/**
|
||||
* DBClient provides methods to interact with Spark's document database.
|
||||
*/
|
||||
export declare class DBClient {
|
||||
private kv;
|
||||
constructor(kvClient?: KVClient);
|
||||
/**
|
||||
* Generate a unique document ID using ULID
|
||||
* @returns A unique document ID
|
||||
*/
|
||||
generateDocId(): string;
|
||||
/**
|
||||
* Get all documents in a collection using the DB API
|
||||
* @param collectionName The name of the collection
|
||||
* @returns Array of all documents in the collection with id field
|
||||
*/
|
||||
getAll<T>(collectionName: string): Promise<Document<T>[]>;
|
||||
/**
|
||||
* Insert a document into a collection with schema validation
|
||||
* @param collectionName The name of the collection
|
||||
* @param schema The Zod schema for validation
|
||||
* @param data The document data to insert
|
||||
* @returns The inserted document with generated id
|
||||
*/
|
||||
insert<T extends z.ZodType>(collectionName: string, schema: T, data: z.infer<T>): Promise<Document<z.infer<T>>>;
|
||||
/**
|
||||
* Get a document by ID from a collection
|
||||
* @param collectionName The name of the collection
|
||||
* @param id The document ID
|
||||
* @returns The document with id field or null if not found
|
||||
*/
|
||||
get<T>(collectionName: string, id: string): Promise<Document<T> | null>;
|
||||
/**
|
||||
* Update a document with partial data and schema validation
|
||||
* @param collectionName The name of the collection
|
||||
* @param id The document ID
|
||||
* @param schema The Zod schema for validation
|
||||
* @param data Partial data to update
|
||||
* @returns The updated document or null if not found
|
||||
*/
|
||||
update<T extends z.ZodType>(collectionName: string, id: string, schema: T, data: Partial<z.infer<T>>): Promise<Document<z.infer<T>> | null>;
|
||||
/**
|
||||
* Delete a document from a collection
|
||||
* @param collectionName The name of the collection
|
||||
* @param id The document ID to delete
|
||||
* @returns true if document was deleted, false if not found
|
||||
*/
|
||||
delete(collectionName: string, id: string): Promise<boolean>;
|
||||
/**
|
||||
* Query documents with filtering
|
||||
* @param collectionName The name of the collection
|
||||
* @param filterFn Function to filter documents
|
||||
* @returns Array of filtered documents
|
||||
*/
|
||||
query<T>(collectionName: string, filterFn: (doc: Document<T>) => boolean): Promise<Document<T>[]>;
|
||||
}
|
||||
6
packages/spark-tools/dist/lib/index.d.ts
vendored
6
packages/spark-tools/dist/lib/index.d.ts
vendored
@@ -1,6 +0,0 @@
|
||||
export * from './db';
|
||||
export * from './llm';
|
||||
export * from './octokit';
|
||||
export * from './spark';
|
||||
export * from './user';
|
||||
export * from './utils';
|
||||
42
packages/spark-tools/dist/lib/kv.d.ts
vendored
42
packages/spark-tools/dist/lib/kv.d.ts
vendored
@@ -1,42 +0,0 @@
|
||||
export declare class KVClient {
|
||||
/**
|
||||
* Retrieves a list of all keys in the KV store.
|
||||
* @returns A list of all keys in the KV store, or an empty array if there are no keys.
|
||||
*/
|
||||
getKeys(): Promise<string[]>;
|
||||
/**
|
||||
* Retrieves all key-value pairs from the KV store.
|
||||
* @returns An object containing all key-value pairs, or an empty object if there are no keys.
|
||||
*
|
||||
* TODO: replace with batch request
|
||||
*/
|
||||
getAll(): Promise<Record<string, any>>;
|
||||
/**
|
||||
* Retrieves the value associated with the given key from the KV store.
|
||||
* @param key The key to retrieve.
|
||||
* @param collectionName Optional collection name to include as a URL parameter.
|
||||
* @returns The value associated with the key, or undefined if not found.
|
||||
*/
|
||||
getKey<T>(key: string, collectionName?: string): Promise<T | undefined>;
|
||||
/**
|
||||
* Retrieves the value associated with the given key from the KV store, while also setting it if it does not exist.
|
||||
* @param key The key to retrieve.
|
||||
* @param value The value to set if the key does not exist.
|
||||
* @returns The value associated with the key, whether it was retrieved or newly set.
|
||||
*/
|
||||
getOrSetKey<T>(key: string, value: T): Promise<T | undefined>;
|
||||
/**
|
||||
* Sets the value for the given key in the KV store.
|
||||
* @param key The key to set.
|
||||
* @param value The value to associate with the key.
|
||||
* @param collectionName Optional collection name to include as a URL parameter.
|
||||
* @returns A promise that resolves when the operation is complete.
|
||||
*/
|
||||
setKey<T>(key: string, value: T, collectionName?: string): Promise<void>;
|
||||
/**
|
||||
* Deletes the value associated with the given key from the KV store.
|
||||
* @param key The key to delete from the KV store.
|
||||
* @param collectionName Optional collection name to include as a URL parameter.
|
||||
*/
|
||||
deleteKey(key: string, collectionName?: string): Promise<void>;
|
||||
}
|
||||
2
packages/spark-tools/dist/lib/llm.d.ts
vendored
2
packages/spark-tools/dist/lib/llm.d.ts
vendored
@@ -1,2 +0,0 @@
|
||||
export declare function llm(prompt: string, modelName?: string, jsonMode?: boolean): Promise<string>;
|
||||
export declare function llmPrompt(strings: string[], ...values: any[]): string;
|
||||
8
packages/spark-tools/dist/lib/octokit.d.ts
vendored
8
packages/spark-tools/dist/lib/octokit.d.ts
vendored
@@ -1,8 +0,0 @@
|
||||
declare const octokit: import("@octokit/core").Octokit & {
|
||||
paginate: import("@octokit/plugin-paginate-rest").PaginateInterface;
|
||||
} & import("@octokit/plugin-paginate-graphql").paginateGraphQLInterface & import("@octokit/plugin-rest-endpoint-methods").Api & {
|
||||
retry: {
|
||||
retryRequest: (error: import("octokit").RequestError, retries: number, retryAfter: number) => import("octokit").RequestError;
|
||||
};
|
||||
};
|
||||
export { octokit };
|
||||
36
packages/spark-tools/dist/lib/spark-runtime.d.ts
vendored
36
packages/spark-tools/dist/lib/spark-runtime.d.ts
vendored
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Spark Runtime - Core runtime services for Spark applications
|
||||
*
|
||||
* This module provides mock implementations of Spark services including:
|
||||
* - KV storage (key-value store)
|
||||
* - LLM service (language model integration)
|
||||
* - User authentication
|
||||
*/
|
||||
interface LLMChatResponse {
|
||||
role: string;
|
||||
content: string;
|
||||
}
|
||||
export declare const sparkRuntime: {
|
||||
kv: {
|
||||
get: <T = any>(key: string) => T | undefined;
|
||||
set: (key: string, value: any) => void;
|
||||
delete: (key: string) => void;
|
||||
clear: () => void;
|
||||
keys: () => string[];
|
||||
};
|
||||
llm: {
|
||||
(prompt: string, model?: string, jsonMode?: boolean): Promise<string>;
|
||||
chat(messages: any[]): Promise<LLMChatResponse>;
|
||||
complete(prompt: string): Promise<string>;
|
||||
};
|
||||
user: {
|
||||
getCurrentUser: () => {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
isAuthenticated: () => boolean;
|
||||
};
|
||||
};
|
||||
export {};
|
||||
//# sourceMappingURL=spark-runtime.d.ts.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"spark-runtime.d.ts","sourceRoot":"","sources":["../../src/lib/spark-runtime.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAGH,UAAU,eAAe;IACvB,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;CAChB;AAwBD,eAAO,MAAM,YAAY;;cAEf,CAAC,aAAa,MAAM,KAAG,CAAC,GAAG,SAAS;mBAa/B,MAAM,SAAS,GAAG;sBAQf,MAAM;;;;;iBAzCW,MAAM,UAAU,MAAM,aAAa,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;uBAK3D,GAAG,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC;yBAQ9B,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;;;;;;;;;;CA4D7D,CAAA"}
|
||||
14
packages/spark-tools/dist/lib/spark.d.ts
vendored
14
packages/spark-tools/dist/lib/spark.d.ts
vendored
@@ -1,14 +0,0 @@
|
||||
/**
|
||||
* Spark Initialization Module
|
||||
*
|
||||
* This module initializes the Spark runtime and makes it available globally
|
||||
* via window.spark. It should be imported early in the application lifecycle.
|
||||
*/
|
||||
import { sparkRuntime } from './spark-runtime';
|
||||
declare global {
|
||||
interface Window {
|
||||
spark: typeof sparkRuntime;
|
||||
}
|
||||
}
|
||||
export default sparkRuntime;
|
||||
//# sourceMappingURL=spark.d.ts.map
|
||||
1
packages/spark-tools/dist/lib/spark.d.ts.map
vendored
1
packages/spark-tools/dist/lib/spark.d.ts.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"spark.d.ts","sourceRoot":"","sources":["../../src/lib/spark.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAA;AAG9C,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,MAAM;QACd,KAAK,EAAE,OAAO,YAAY,CAAA;KAC3B;CACF;AAOD,eAAe,YAAY,CAAA"}
|
||||
8
packages/spark-tools/dist/lib/user.d.ts
vendored
8
packages/spark-tools/dist/lib/user.d.ts
vendored
@@ -1,8 +0,0 @@
|
||||
export interface User {
|
||||
avatarUrl: string;
|
||||
email: string;
|
||||
id: number;
|
||||
isOwner: boolean;
|
||||
login: string;
|
||||
}
|
||||
export declare function fetchUser(): Promise<null | User>;
|
||||
22
packages/spark-tools/dist/lib/utils.d.ts
vendored
22
packages/spark-tools/dist/lib/utils.d.ts
vendored
@@ -1,22 +0,0 @@
|
||||
import { type ClassValue } from 'clsx';
|
||||
export declare function composeEventHandlers<E>(originalEventHandler?: (event: E) => void, ourEventHandler?: (event: E) => void, { checkForDefaultPrevented }?: {
|
||||
checkForDefaultPrevented?: boolean | undefined;
|
||||
}): (event: E) => void;
|
||||
export declare function cn(...inputs: ClassValue[]): string;
|
||||
export declare function findEventHandlers(props: Record<string, unknown>): string[];
|
||||
type ContainerStyleProps = {
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
};
|
||||
type SeparatedStyles = {
|
||||
containerClasses: string;
|
||||
containerStyles: React.CSSProperties;
|
||||
innerClasses: string;
|
||||
innerStyles: React.CSSProperties;
|
||||
};
|
||||
/**
|
||||
* Extracts container-related styles (margin, display, position) from className and style props.
|
||||
* Returns separated classes and styles for container and inner elements.
|
||||
*/
|
||||
export declare function extractContainerStyles({ className, style }: ContainerStyleProps): SeparatedStyles;
|
||||
export {};
|
||||
64
packages/spark-tools/dist/llm.js
vendored
64
packages/spark-tools/dist/llm.js
vendored
@@ -1,64 +0,0 @@
|
||||
// Earlier versions of our generation recommended models without the prefix
|
||||
// that GH Models wants. For compatibility, correct those that were on the list explicitly.
|
||||
const MODEL_FIXES = {
|
||||
'ai21-jamba-instruct': 'ai21-labs/ai21-jamba-instruct',
|
||||
'cohere-command-r-plus': 'cohere/cohere-command-r-plus',
|
||||
'cohere-command-r': 'cohere/cohere-command-r',
|
||||
'gpt-4o-mini': 'openai/gpt-4o-mini',
|
||||
'gpt-4o': 'openai/gpt-4o',
|
||||
'meta-llama-3.1-405b-instruct': 'meta/meta-llama-3.1-405b-instruct',
|
||||
'meta-llama-3.1-70b-instruct': 'meta/meta-llama-3.1-70b-instruct',
|
||||
'meta-llama-3.1-8b-instruct': 'meta/meta-llama-3.1-8b-instruct',
|
||||
'meta-llama-3-70b-instruct': 'meta/meta-llama-3-70b-instruct',
|
||||
'meta-llama-3-8b-instruct': 'meta/meta-llama-3-8b-instruct',
|
||||
'mistral-large-2407': 'mistral-ai/mistral-large-2407',
|
||||
'mistral-large': 'mistral-ai/mistral-large',
|
||||
'mistral-nemo': 'mistral-ai/mistral-nemo',
|
||||
'mistral-small': 'mistral-ai/mistral-small',
|
||||
'phi-3-medium-128K-instruct': 'microsoft/phi-3-medium-128K-instruct',
|
||||
'phi-3-medium-4K-instruct': 'microsoft/phi-3-medium-4K-instruct',
|
||||
'phi-3-mini-128K-instruct': 'microsoft/phi-3-mini-128K-instruct',
|
||||
'phi-3-mini-4K-instruct': 'microsoft/phi-3-mini-4K-instruct',
|
||||
'phi-3-small-128K-instruct': 'microsoft/phi-3-small-128K-instruct',
|
||||
'phi-3-small-8K-instruct': 'microsoft/phi-3-small-8K-instruct',
|
||||
};
|
||||
const fixModelName = (modelName) => {
|
||||
if (!modelName)
|
||||
return 'openai/gpt-4o';
|
||||
return MODEL_FIXES[modelName] || modelName;
|
||||
};
|
||||
async function llm(prompt, modelName, jsonMode) {
|
||||
const tidiedModelName = fixModelName(modelName);
|
||||
const response_format = { type: jsonMode ? 'json_object' : 'text' };
|
||||
const body = {
|
||||
messages: [
|
||||
{ role: 'system', content: 'You are a helpful assistant.' },
|
||||
{ role: 'user', content: prompt },
|
||||
],
|
||||
temperature: 1.0,
|
||||
top_p: 1.0,
|
||||
max_tokens: 1000,
|
||||
model: tidiedModelName,
|
||||
response_format,
|
||||
};
|
||||
const response = await fetch('/_spark/llm', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(body),
|
||||
headers: {
|
||||
'Content-Type': `application/json`,
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`LLM request failed: ${response.status} ${response.statusText} - ${errorText}`);
|
||||
}
|
||||
const data = (await response.json());
|
||||
const content = data.choices[0].message.content;
|
||||
return content;
|
||||
}
|
||||
function llmPrompt(strings, ...values) {
|
||||
return strings.reduce((result, str, i) => result + str + (values[i] || ''), '');
|
||||
}
|
||||
|
||||
export { llm, llmPrompt };
|
||||
//# sourceMappingURL=llm.js.map
|
||||
1
packages/spark-tools/dist/llm.js.map
vendored
1
packages/spark-tools/dist/llm.js.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"llm.js","sources":["../src/lib/llm.ts"],"sourcesContent":[null],"names":[],"mappings":"AAAA;AACA;AACA,MAAM,WAAW,GAA2B;AAC1C,IAAA,qBAAqB,EAAE,+BAA+B;AACtD,IAAA,uBAAuB,EAAE,8BAA8B;AACvD,IAAA,kBAAkB,EAAE,yBAAyB;AAC7C,IAAA,aAAa,EAAE,oBAAoB;AACnC,IAAA,QAAQ,EAAE,eAAe;AACzB,IAAA,8BAA8B,EAAE,mCAAmC;AACnE,IAAA,6BAA6B,EAAE,kCAAkC;AACjE,IAAA,4BAA4B,EAAE,iCAAiC;AAC/D,IAAA,2BAA2B,EAAE,gCAAgC;AAC7D,IAAA,0BAA0B,EAAE,+BAA+B;AAC3D,IAAA,oBAAoB,EAAE,+BAA+B;AACrD,IAAA,eAAe,EAAE,0BAA0B;AAC3C,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,eAAe,EAAE,0BAA0B;AAC3C,IAAA,4BAA4B,EAAE,sCAAsC;AACpE,IAAA,0BAA0B,EAAE,oCAAoC;AAChE,IAAA,0BAA0B,EAAE,oCAAoC;AAChE,IAAA,wBAAwB,EAAE,kCAAkC;AAC5D,IAAA,2BAA2B,EAAE,qCAAqC;AAClE,IAAA,yBAAyB,EAAE,mCAAmC;CAC/D;AAED,MAAM,YAAY,GAAG,CAAC,SAAkB,KAAY;AAClD,IAAA,IAAI,CAAC,SAAS;AAAE,QAAA,OAAO,eAAe;AACtC,IAAA,OAAO,WAAW,CAAC,SAAS,CAAC,IAAI,SAAS;AAC5C,CAAC;AAEM,eAAe,GAAG,CAAC,MAAc,EAAE,SAAkB,EAAE,QAAkB,EAAA;AAC9E,IAAA,MAAM,eAAe,GAAG,YAAY,CAAC,SAAS,CAAC;AAC/C,IAAA,MAAM,eAAe,GAAG,EAAE,IAAI,EAAE,QAAQ,GAAG,aAAa,GAAG,MAAM,EAAE;AAEnE,IAAA,MAAM,IAAI,GAAG;AACX,QAAA,QAAQ,EAAE;AACR,YAAA,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,8BAA8B,EAAE;AAC3D,YAAA,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE;AAClC,SAAA;AACD,QAAA,WAAW,EAAE,GAAG;AAChB,QAAA,KAAK,EAAE,GAAG;AACV,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,KAAK,EAAE,eAAe;QACtB,eAAe;KAChB;AAED,IAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,aAAa,EAAE;AAC1C,QAAA,MAAM,EAAE,MAAM;AACd,QAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC1B,QAAA,OAAO,EAAE;AACP,YAAA,cAAc,EAAE,CAAA,gBAAA,CAAkB;AACnC,SAAA;AACF,KAAA,CAAC;AAEF,IAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AAChB,QAAA,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,CAAA,oBAAA,EAAuB,QAAQ,CAAC,MAAM,CAAA,CAAA,EAAI,QAAQ,CAAC,UAAU,CAAA,GAAA,EAAM,SAAS,CAAA,CAAE,CAAC;IACjG;IAEA,MAAM,IAAI,IAAI,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAQ;AAC3C,IAAA,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO;AAC/C,IAAA,OAAO,OAAO;AAChB;SAEgB,SAAS,CAAC,OAAiB,EAAE,GAAG,MAAa,EAAA;AAC3D,IAAA,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,KAAK,MAAM,GAAG,GAAG,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC;AACjF;;;;"}
|
||||
@@ -1,2 +0,0 @@
|
||||
import type { PluginOption } from 'vite';
|
||||
export declare const runtimeBuildPlugin: () => PluginOption;
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
||||
69786
packages/spark-tools/dist/runtimeProxy.js
vendored
69786
packages/spark-tools/dist/runtimeProxy.js
vendored
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
const e=new Map,o=async(e,o,r)=>(console.log("Mock LLM called with prompt:",e,"model:",o,"jsonMode:",r),"This is a mock response from the Spark LLM service.");o.chat=async e=>(console.log("Mock LLM chat called with messages:",e),{role:"assistant",content:"This is a mock response from the Spark LLM service."}),o.complete=async e=>(console.log("Mock LLM complete called with prompt:",e),"This is a mock completion from the Spark LLM service.");const r={kv:{get:o=>{try{const r=e.get(o);if(void 0!==r)return r;const t=localStorage.getItem(o);return t?JSON.parse(t):void 0}catch(e){return void console.error("Error getting KV value:",e)}},set:(o,r)=>{try{e.set(o,r),localStorage.setItem(o,JSON.stringify(r))}catch(e){console.error("Error setting KV value:",e)}},delete:o=>{try{e.delete(o),localStorage.removeItem(o)}catch(e){console.error("Error deleting KV value:",e)}},clear:()=>{try{const o=Array.from(e.keys());e.clear(),o.forEach(e=>localStorage.removeItem(e))}catch(e){console.error("Error clearing KV storage:",e)}},keys:()=>Array.from(e.keys())},llm:o,user:{getCurrentUser:()=>({id:"mock-user-id",name:"Mock User",email:"mock@example.com"}),isAuthenticated:()=>!0}};export{r as s};
|
||||
//# sourceMappingURL=spark-runtime-wNXbhm34.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"spark-runtime-wNXbhm34.js","sources":["../src/lib/spark-runtime.ts"],"sourcesContent":["/**\n * Spark Runtime - Core runtime services for Spark applications\n * \n * This module provides mock implementations of Spark services including:\n * - KV storage (key-value store)\n * - LLM service (language model integration)\n * - User authentication\n */\n\n// Type definitions for LLM responses\ninterface LLMChatResponse {\n role: string\n content: string\n}\n\n// Mock KV Storage\nconst kvStorage = new Map<string, any>()\n\n// Create llm function with additional properties\nconst llmFunction = async (prompt: string, model?: string, jsonMode?: boolean): Promise<string> => {\n console.log('Mock LLM called with prompt:', prompt, 'model:', model, 'jsonMode:', jsonMode)\n return 'This is a mock response from the Spark LLM service.'\n}\n\nllmFunction.chat = async (messages: any[]): Promise<LLMChatResponse> => {\n console.log('Mock LLM chat called with messages:', messages)\n return {\n role: 'assistant',\n content: 'This is a mock response from the Spark LLM service.'\n }\n}\n\nllmFunction.complete = async (prompt: string): Promise<string> => {\n console.log('Mock LLM complete called with prompt:', prompt)\n return 'This is a mock completion from the Spark LLM service.'\n}\n\nexport const sparkRuntime = {\n kv: {\n get: <T = any>(key: string): T | undefined => {\n try {\n const value = kvStorage.get(key)\n if (value !== undefined) {\n return value as T\n }\n const stored = localStorage.getItem(key)\n return stored ? JSON.parse(stored) : undefined\n } catch (error) {\n console.error('Error getting KV value:', error)\n return undefined\n }\n },\n set: (key: string, value: any) => {\n try {\n kvStorage.set(key, value)\n localStorage.setItem(key, JSON.stringify(value))\n } catch (error) {\n console.error('Error setting KV value:', error)\n }\n },\n delete: (key: string) => {\n try {\n kvStorage.delete(key)\n localStorage.removeItem(key)\n } catch (error) {\n console.error('Error deleting KV value:', error)\n }\n },\n clear: () => {\n try {\n // Get keys before clearing\n const keysToRemove = Array.from(kvStorage.keys())\n kvStorage.clear()\n // Clear corresponding keys from localStorage\n keysToRemove.forEach(key => localStorage.removeItem(key))\n } catch (error) {\n console.error('Error clearing KV storage:', error)\n }\n },\n keys: () => Array.from(kvStorage.keys())\n },\n \n llm: llmFunction,\n \n user: {\n getCurrentUser: () => ({\n id: 'mock-user-id',\n name: 'Mock User',\n email: 'mock@example.com'\n }),\n isAuthenticated: () => true\n }\n}\n"],"names":["kvStorage","Map","llmFunction","async","prompt","model","jsonMode","console","log","chat","messages","role","content","complete","sparkRuntime","kv","get","key","value","undefined","stored","localStorage","getItem","JSON","parse","error","set","setItem","stringify","delete","removeItem","clear","keysToRemove","Array","from","keys","forEach","llm","user","getCurrentUser","id","name","email","isAuthenticated"],"mappings":"AAgBA,MAAMA,EAAY,IAAIC,IAGhBC,EAAcC,MAAOC,EAAgBC,EAAgBC,KACzDC,QAAQC,IAAI,+BAAgCJ,EAAQ,SAAUC,EAAO,YAAaC,GAC3E,uDAGTJ,EAAYO,KAAON,MAAOO,IACxBH,QAAQC,IAAI,sCAAuCE,GAC5C,CACLC,KAAM,YACNC,QAAS,wDAIbV,EAAYW,SAAWV,MAAOC,IAC5BG,QAAQC,IAAI,wCAAyCJ,GAC9C,yDAGF,MAAMU,EAAe,CAC1BC,GAAI,CACFC,IAAeC,IACb,IACE,MAAMC,EAAQlB,EAAUgB,IAAIC,GAC5B,QAAcE,IAAVD,EACF,OAAOA,EAET,MAAME,EAASC,aAAaC,QAAQL,GACpC,OAAOG,EAASG,KAAKC,MAAMJ,QAAUD,CACvC,CAAE,MAAOM,GAEP,YADAlB,QAAQkB,MAAM,0BAA2BA,EAE3C,GAEFC,IAAK,CAACT,EAAaC,KACjB,IACElB,EAAU0B,IAAIT,EAAKC,GACnBG,aAAaM,QAAQV,EAAKM,KAAKK,UAAUV,GAC3C,CAAE,MAAOO,GACPlB,QAAQkB,MAAM,0BAA2BA,EAC3C,GAEFI,OAASZ,IACP,IACEjB,EAAU6B,OAAOZ,GACjBI,aAAaS,WAAWb,EAC1B,CAAE,MAAOQ,GACPlB,QAAQkB,MAAM,2BAA4BA,EAC5C,GAEFM,MAAO,KACL,IAEE,MAAMC,EAAeC,MAAMC,KAAKlC,EAAUmC,QAC1CnC,EAAU+B,QAEVC,EAAaI,QAAQnB,GAAOI,aAAaS,WAAWb,GACtD,CAAE,MAAOQ,GACPlB,QAAQkB,MAAM,6BAA8BA,EAC9C,GAEFU,KAAM,IAAMF,MAAMC,KAAKlC,EAAUmC,SAGnCE,IAAKnC,EAELoC,KAAM,CACJC,eAAgB,KAAA,CACdC,GAAI,eACJC,KAAM,YACNC,MAAO,qBAETC,gBAAiB,KAAM"}
|
||||
8
packages/spark-tools/dist/spark.d.ts
vendored
8
packages/spark-tools/dist/spark.d.ts
vendored
@@ -1,8 +0,0 @@
|
||||
/**
|
||||
* Spark Module - Main export for spark runtime
|
||||
*
|
||||
* Re-export spark runtime for '@github/spark/spark' imports
|
||||
*/
|
||||
export { default } from './lib/spark';
|
||||
export { sparkRuntime } from './lib/spark-runtime';
|
||||
//# sourceMappingURL=spark.d.ts.map
|
||||
1
packages/spark-tools/dist/spark.d.ts.map
vendored
1
packages/spark-tools/dist/spark.d.ts.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"spark.d.ts","sourceRoot":"","sources":["../src/spark.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAA"}
|
||||
2
packages/spark-tools/dist/spark.js
vendored
2
packages/spark-tools/dist/spark.js
vendored
@@ -1,2 +0,0 @@
|
||||
import{s as e}from"./spark-runtime-wNXbhm34.js";"undefined"!=typeof window&&(window.spark=e);export{e as default,e as sparkRuntime};
|
||||
//# sourceMappingURL=spark.js.map
|
||||
1
packages/spark-tools/dist/spark.js.map
vendored
1
packages/spark-tools/dist/spark.js.map
vendored
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"spark.js","sources":["../src/lib/spark.ts"],"sourcesContent":["/**\n * Spark Initialization Module\n * \n * This module initializes the Spark runtime and makes it available globally\n * via window.spark. It should be imported early in the application lifecycle.\n */\n\nimport { sparkRuntime } from './spark-runtime'\n\n// Declare global window.spark\ndeclare global {\n interface Window {\n spark: typeof sparkRuntime\n }\n}\n\n// Initialize window.spark\nif (typeof window !== 'undefined') {\n window.spark = sparkRuntime\n}\n\nexport default sparkRuntime\n"],"names":["window","spark","sparkRuntime"],"mappings":"gDAiBsB,oBAAXA,SACTA,OAAOC,MAAQC"}
|
||||
12
packages/spark-tools/dist/spark.package.json
vendored
12
packages/spark-tools/dist/spark.package.json
vendored
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "app",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "node proxy.js"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"description": "A wrapper for deploying to the GitHub runtime"
|
||||
}
|
||||
15
packages/spark-tools/dist/sparkVitePlugin.d.ts
vendored
15
packages/spark-tools/dist/sparkVitePlugin.d.ts
vendored
@@ -1,15 +0,0 @@
|
||||
/**
|
||||
* Spark Vite Plugin
|
||||
*
|
||||
* This plugin integrates Spark functionality into the Vite build process.
|
||||
* Currently provides a minimal implementation that can be extended with:
|
||||
* - Spark runtime injection
|
||||
* - Configuration validation
|
||||
* - Development server enhancements
|
||||
*/
|
||||
export default function sparkPlugin(): {
|
||||
name: string;
|
||||
configResolved(config: any): void;
|
||||
transformIndexHtml(html: string): string;
|
||||
};
|
||||
//# sourceMappingURL=sparkVitePlugin.d.ts.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"sparkVitePlugin.d.ts","sourceRoot":"","sources":["../src/sparkVitePlugin.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,MAAM,CAAC,OAAO,UAAU,WAAW;;2BAIR,GAAG;6BAKD,MAAM;EAMlC"}
|
||||
2
packages/spark-tools/dist/sparkVitePlugin.js
vendored
2
packages/spark-tools/dist/sparkVitePlugin.js
vendored
@@ -1,2 +0,0 @@
|
||||
function e(){return{name:"spark-vite-plugin",configResolved(e){},transformIndexHtml:e=>e}}export{e as default};
|
||||
//# sourceMappingURL=sparkVitePlugin.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"sparkVitePlugin.js","sources":["../src/sparkVitePlugin.ts"],"sourcesContent":["/**\n * Spark Vite Plugin\n * \n * This plugin integrates Spark functionality into the Vite build process.\n * Currently provides a minimal implementation that can be extended with:\n * - Spark runtime injection\n * - Configuration validation\n * - Development server enhancements\n */\n\nexport default function sparkPlugin() {\n return {\n name: 'spark-vite-plugin',\n \n configResolved(config: any) {\n // TODO: Add Spark-specific configuration handling if needed\n // This hook is called after the Vite config is resolved\n },\n \n transformIndexHtml(html: string) {\n // TODO: Add Spark runtime injection to HTML if needed\n // Currently returns HTML unchanged\n return html\n }\n }\n}\n"],"names":["sparkPlugin","name","configResolved","config","transformIndexHtml","html"],"mappings":"AAUc,SAAUA,IACtB,MAAO,CACLC,KAAM,oBAEN,cAAAC,CAAeC,GAGf,EAEAC,mBAAmBC,GAGVA,EAGb"}
|
||||
@@ -1,12 +0,0 @@
|
||||
export declare const EventType: {
|
||||
SPARK_RUNTIME_ERROR: string;
|
||||
SPARK_RUNTIME_PING: string;
|
||||
SPARK_RUNTIME_LOADED: string;
|
||||
SPARK_VITE_WS_CONNECT: string;
|
||||
SPARK_VITE_WS_DISCONNECT: string;
|
||||
SPARK_VITE_ERROR: string;
|
||||
SPARK_VITE_AFTER_UPDATE: string;
|
||||
ROOT_ELEMENT_STATUS: string;
|
||||
KV_CLIENT_ERROR: string;
|
||||
};
|
||||
export type EventType = (typeof EventType)[keyof typeof EventType];
|
||||
@@ -1,5 +0,0 @@
|
||||
export declare const KvEventType: {
|
||||
SPARK_KV_UPDATED: string;
|
||||
SPARK_KV_DELETED: string;
|
||||
};
|
||||
export type KvEventType = (typeof KvEventType)[keyof typeof KvEventType];
|
||||
@@ -1,16 +0,0 @@
|
||||
/**
|
||||
* Vite Phosphor Icon Proxy Plugin
|
||||
*
|
||||
* This plugin provides a proxy for Phosphor icon imports.
|
||||
* Currently provides a pass-through implementation that allows
|
||||
* Vite to handle icon imports normally. Can be extended to:
|
||||
* - Optimize icon bundle sizes
|
||||
* - Implement lazy loading for icons
|
||||
* - Transform icon imports for better tree-shaking
|
||||
*/
|
||||
export default function createIconImportProxy(): {
|
||||
name: string;
|
||||
resolveId(id: string): null | undefined;
|
||||
transform(code: string, id: string): null;
|
||||
};
|
||||
//# sourceMappingURL=vitePhosphorIconProxyPlugin.d.ts.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"vitePhosphorIconProxyPlugin.d.ts","sourceRoot":"","sources":["../src/vitePhosphorIconProxyPlugin.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,MAAM,CAAC,OAAO,UAAU,qBAAqB;;kBAI3B,MAAM;oBAQJ,MAAM,MAAM,MAAM;EAMrC"}
|
||||
@@ -1,2 +0,0 @@
|
||||
function r(){return{name:"vite-phosphor-icon-proxy",resolveId(r){if(r.includes("@phosphor-icons/react"))return null},transform:(r,n)=>null}}export{r as default};
|
||||
//# sourceMappingURL=vitePhosphorIconProxyPlugin.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"vitePhosphorIconProxyPlugin.js","sources":["../src/vitePhosphorIconProxyPlugin.ts"],"sourcesContent":["/**\n * Vite Phosphor Icon Proxy Plugin\n * \n * This plugin provides a proxy for Phosphor icon imports.\n * Currently provides a pass-through implementation that allows\n * Vite to handle icon imports normally. Can be extended to:\n * - Optimize icon bundle sizes\n * - Implement lazy loading for icons\n * - Transform icon imports for better tree-shaking\n */\n\nexport default function createIconImportProxy() {\n return {\n name: 'vite-phosphor-icon-proxy',\n \n resolveId(id: string) {\n // TODO: Add custom icon resolution if needed\n // Currently lets Vite handle all icon imports normally\n if (id.includes('@phosphor-icons/react')) {\n return null // Let Vite handle it normally\n }\n },\n \n transform(code: string, id: string) {\n // TODO: Add icon import transformations if needed\n // Currently returns null to let Vite handle transformations\n return null\n }\n }\n}\n"],"names":["createIconImportProxy","name","resolveId","id","includes","transform","code"],"mappings":"AAWc,SAAUA,IACtB,MAAO,CACLC,KAAM,2BAEN,SAAAC,CAAUC,GAGR,GAAIA,EAAGC,SAAS,yBACd,OAAO,IAEX,EAEAC,UAAS,CAACC,EAAcH,IAGf,KAGb"}
|
||||
@@ -1,119 +0,0 @@
|
||||
{
|
||||
"name": "@github/spark",
|
||||
"version": "0.0.1",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "rollup --config rollup.config.ts --configPlugin @rollup/plugin-typescript",
|
||||
"prepack": "MODE=PACKAGE npm run build",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"exports": {
|
||||
"./agent-plugin": {
|
||||
"types": "./dist/agentPlugin.d.ts",
|
||||
"import": "./dist/agentPlugin.js"
|
||||
},
|
||||
"./db": {
|
||||
"types": "./dist/lib/db.d.ts",
|
||||
"import": "./dist/db.js"
|
||||
},
|
||||
"./designer-styles.css": "./dist/designer-styles.css",
|
||||
"./designerHost": {
|
||||
"types": "./dist/designerHost.d.ts",
|
||||
"import": "./dist/designerHost.js"
|
||||
},
|
||||
"./designerPlugin": {
|
||||
"types": "./dist/designerPlugin.d.ts",
|
||||
"import": "./dist/designerPlugin.js"
|
||||
},
|
||||
"./heartbeat": {
|
||||
"types": "./dist/heartbeat/heartbeat.d.ts",
|
||||
"import": "./dist/heartbeat.js"
|
||||
},
|
||||
"./heartbeatPlugin": {
|
||||
"types": "./dist/heartbeat/heartbeatPlugin.d.ts",
|
||||
"import": "./dist/heartbeatPlugin.js"
|
||||
},
|
||||
"./hooks": {
|
||||
"types": "./dist/hooks/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
},
|
||||
"./initializeTelemetry": {
|
||||
"types": "./dist/heartbeat/heartbeat.d.ts",
|
||||
"import": "./dist/heartbeat.js"
|
||||
},
|
||||
"./llm": {
|
||||
"types": "./dist/lib/llm.d.ts",
|
||||
"import": "./dist/llm.js"
|
||||
},
|
||||
"./logToFileLogger": {
|
||||
"types": "./dist/heartbeat/logToFileLogger.d.ts",
|
||||
"import": "./dist/logToFileLogger.js"
|
||||
},
|
||||
"./package.json": "./package.json",
|
||||
"./spark": {
|
||||
"types": "./dist/lib/spark.d.ts",
|
||||
"import": "./dist/spark.js"
|
||||
},
|
||||
"./spark-vite-plugin": {
|
||||
"types": "./dist/sparkVitePlugin.d.ts",
|
||||
"import": "./dist/sparkVitePlugin.js"
|
||||
},
|
||||
"./telemetryPlugin": {
|
||||
"types": "./dist/heartbeat/heartbeatPlugin.d.ts",
|
||||
"import": "./dist/heartbeatPlugin.js"
|
||||
},
|
||||
"./vitePhosphorIconProxyPlugin": {
|
||||
"types": "./dist/vitePhosphorIconProxyPlugin.d.ts",
|
||||
"import": "./dist/vitePhosphorIconProxyPlugin.js"
|
||||
}
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 120
|
||||
},
|
||||
"files": [
|
||||
"LICENSE",
|
||||
"dist",
|
||||
"package.json"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "^28.0.3",
|
||||
"@rollup/plugin-json": "^6.1.0",
|
||||
"@rollup/plugin-node-resolve": "^16.0.0",
|
||||
"@rollup/plugin-replace": "^6.0.2",
|
||||
"@rollup/plugin-terser": "^0.4.4",
|
||||
"@rollup/plugin-typescript": "^12.1.2",
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.0.1",
|
||||
"@types/body-parser": "^1.19.6",
|
||||
"@types/express": "^5.0.1",
|
||||
"@types/node": "^22.13.9",
|
||||
"@types/react": "^19.0.0",
|
||||
"jsdom": "^25.0.1",
|
||||
"rollup": "^4.35.0",
|
||||
"rollup-plugin-delete": "^3.0.1",
|
||||
"tslib": "^2.8.1",
|
||||
"ulid": "^3.0.0",
|
||||
"vitest": "^3.0.9",
|
||||
"zod": "^3.24.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^19.0.0",
|
||||
"vite": "^7.0.0 || ^6.4.1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"keywords": [],
|
||||
"description": "",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"dependencies": {
|
||||
"body-parser": "^1.20.3",
|
||||
"express": "^5.2.0",
|
||||
"octokit": "^5.0.3"
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import typescript from '@rollup/plugin-typescript'
|
||||
import resolve from '@rollup/plugin-node-resolve'
|
||||
import commonjs from '@rollup/plugin-commonjs'
|
||||
import terser from '@rollup/plugin-terser'
|
||||
import del from 'rollup-plugin-delete'
|
||||
|
||||
export default {
|
||||
input: {
|
||||
index: 'src/index.ts',
|
||||
spark: 'src/spark.ts',
|
||||
sparkVitePlugin: 'src/sparkVitePlugin.ts',
|
||||
vitePhosphorIconProxyPlugin: 'src/vitePhosphorIconProxyPlugin.ts',
|
||||
},
|
||||
output: {
|
||||
dir: 'dist',
|
||||
format: 'es',
|
||||
sourcemap: true,
|
||||
preserveModules: false,
|
||||
},
|
||||
external: ['react', 'react-dom', 'vite'],
|
||||
plugins: [
|
||||
del({ targets: 'dist/*' }),
|
||||
resolve(),
|
||||
commonjs(),
|
||||
typescript({
|
||||
tsconfig: './tsconfig.json',
|
||||
declaration: true,
|
||||
declarationDir: 'dist',
|
||||
rootDir: 'src',
|
||||
}),
|
||||
terser(),
|
||||
],
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
import { useState, useEffect, useCallback } from 'react'
|
||||
|
||||
/**
|
||||
* useKV Hook - Persistent key-value storage with localStorage and window.spark.kv integration
|
||||
*
|
||||
* This hook provides persistent state management that syncs with localStorage
|
||||
* and integrates with the Spark KV storage system if available.
|
||||
*
|
||||
* @param key - Storage key
|
||||
* @param defaultValue - Default value if key doesn't exist
|
||||
* @returns Tuple of [value, setValue, deleteValue]
|
||||
*/
|
||||
export function useKV<T>(
|
||||
key: string,
|
||||
defaultValue: T
|
||||
): [T, (value: T | ((prev: T) => T)) => void, () => void] {
|
||||
// Initialize state from localStorage or default value
|
||||
const [value, setValueInternal] = useState<T>(() => {
|
||||
try {
|
||||
// Try to get from window.spark.kv first
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
const sparkValue = window.spark.kv.get(key)
|
||||
if (sparkValue !== undefined) {
|
||||
return sparkValue as T
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to localStorage
|
||||
const item = localStorage.getItem(key)
|
||||
return item ? JSON.parse(item) : defaultValue
|
||||
} catch (error) {
|
||||
console.error('Error reading from storage:', error)
|
||||
return defaultValue
|
||||
}
|
||||
})
|
||||
|
||||
// Set value and sync to storage
|
||||
const setValue = useCallback(
|
||||
(newValue: T | ((prev: T) => T)) => {
|
||||
try {
|
||||
setValueInternal((prevValue) => {
|
||||
const valueToStore =
|
||||
typeof newValue === 'function'
|
||||
? (newValue as (prev: T) => T)(prevValue)
|
||||
: newValue
|
||||
|
||||
// Store in localStorage
|
||||
localStorage.setItem(key, JSON.stringify(valueToStore))
|
||||
|
||||
// Store in window.spark.kv if available
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
window.spark.kv.set(key, valueToStore)
|
||||
}
|
||||
|
||||
return valueToStore
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error writing to storage:', error)
|
||||
}
|
||||
},
|
||||
[key]
|
||||
)
|
||||
|
||||
// Delete value from storage
|
||||
const deleteValue = useCallback(() => {
|
||||
try {
|
||||
localStorage.removeItem(key)
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
window.spark.kv.delete(key)
|
||||
}
|
||||
setValueInternal(defaultValue)
|
||||
} catch (error) {
|
||||
console.error('Error deleting from storage:', error)
|
||||
}
|
||||
}, [key, defaultValue])
|
||||
|
||||
// Sync with localStorage changes from other tabs
|
||||
useEffect(() => {
|
||||
const handleStorageChange = (e: StorageEvent) => {
|
||||
if (e.key === key && e.newValue !== null) {
|
||||
try {
|
||||
setValueInternal(JSON.parse(e.newValue))
|
||||
} catch (error) {
|
||||
console.error('Error parsing storage event:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('storage', handleStorageChange)
|
||||
return () => window.removeEventListener('storage', handleStorageChange)
|
||||
}, [key])
|
||||
|
||||
return [value, setValue, deleteValue]
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
/**
|
||||
* @github/spark - Main Hooks Entry Point
|
||||
*
|
||||
* This is the entry point for the hooks exports from the Spark package.
|
||||
*/
|
||||
|
||||
export { useKV } from './hooks/index'
|
||||
export { sparkRuntime } from './lib/spark-runtime'
|
||||
@@ -1,93 +0,0 @@
|
||||
/**
|
||||
* Spark Runtime - Core runtime services for Spark applications
|
||||
*
|
||||
* This module provides mock implementations of Spark services including:
|
||||
* - KV storage (key-value store)
|
||||
* - LLM service (language model integration)
|
||||
* - User authentication
|
||||
*/
|
||||
|
||||
// Type definitions for LLM responses
|
||||
interface LLMChatResponse {
|
||||
role: string
|
||||
content: string
|
||||
}
|
||||
|
||||
// Mock KV Storage
|
||||
const kvStorage = new Map<string, any>()
|
||||
|
||||
// Create llm function with additional properties
|
||||
const llmFunction = async (prompt: string, model?: string, jsonMode?: boolean): Promise<string> => {
|
||||
console.log('Mock LLM called with prompt:', prompt, 'model:', model, 'jsonMode:', jsonMode)
|
||||
return 'This is a mock response from the Spark LLM service.'
|
||||
}
|
||||
|
||||
llmFunction.chat = async (messages: any[]): Promise<LLMChatResponse> => {
|
||||
console.log('Mock LLM chat called with messages:', messages)
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: 'This is a mock response from the Spark LLM service.'
|
||||
}
|
||||
}
|
||||
|
||||
llmFunction.complete = async (prompt: string): Promise<string> => {
|
||||
console.log('Mock LLM complete called with prompt:', prompt)
|
||||
return 'This is a mock completion from the Spark LLM service.'
|
||||
}
|
||||
|
||||
export const sparkRuntime = {
|
||||
kv: {
|
||||
get: <T = any>(key: string): T | undefined => {
|
||||
try {
|
||||
const value = kvStorage.get(key)
|
||||
if (value !== undefined) {
|
||||
return value as T
|
||||
}
|
||||
const stored = localStorage.getItem(key)
|
||||
return stored ? JSON.parse(stored) : undefined
|
||||
} catch (error) {
|
||||
console.error('Error getting KV value:', error)
|
||||
return undefined
|
||||
}
|
||||
},
|
||||
set: (key: string, value: any) => {
|
||||
try {
|
||||
kvStorage.set(key, value)
|
||||
localStorage.setItem(key, JSON.stringify(value))
|
||||
} catch (error) {
|
||||
console.error('Error setting KV value:', error)
|
||||
}
|
||||
},
|
||||
delete: (key: string) => {
|
||||
try {
|
||||
kvStorage.delete(key)
|
||||
localStorage.removeItem(key)
|
||||
} catch (error) {
|
||||
console.error('Error deleting KV value:', error)
|
||||
}
|
||||
},
|
||||
clear: () => {
|
||||
try {
|
||||
// Get keys before clearing
|
||||
const keysToRemove = Array.from(kvStorage.keys())
|
||||
kvStorage.clear()
|
||||
// Clear corresponding keys from localStorage
|
||||
keysToRemove.forEach(key => localStorage.removeItem(key))
|
||||
} catch (error) {
|
||||
console.error('Error clearing KV storage:', error)
|
||||
}
|
||||
},
|
||||
keys: () => Array.from(kvStorage.keys())
|
||||
},
|
||||
|
||||
llm: llmFunction,
|
||||
|
||||
user: {
|
||||
getCurrentUser: () => ({
|
||||
id: 'mock-user-id',
|
||||
name: 'Mock User',
|
||||
email: 'mock@example.com'
|
||||
}),
|
||||
isAuthenticated: () => true
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
/**
|
||||
* Spark Initialization Module
|
||||
*
|
||||
* This module initializes the Spark runtime and makes it available globally
|
||||
* via window.spark. It should be imported early in the application lifecycle.
|
||||
*/
|
||||
|
||||
import { sparkRuntime } from './spark-runtime'
|
||||
|
||||
// Declare global window.spark
|
||||
declare global {
|
||||
interface Window {
|
||||
spark: typeof sparkRuntime
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize window.spark
|
||||
if (typeof window !== 'undefined') {
|
||||
window.spark = sparkRuntime
|
||||
}
|
||||
|
||||
export default sparkRuntime
|
||||
@@ -1,8 +0,0 @@
|
||||
/**
|
||||
* Spark Module - Main export for spark runtime
|
||||
*
|
||||
* Re-export spark runtime for '@github/spark/spark' imports
|
||||
*/
|
||||
|
||||
export { default } from './lib/spark'
|
||||
export { sparkRuntime } from './lib/spark-runtime'
|
||||
@@ -1,47 +0,0 @@
|
||||
import { mkdir } from 'fs/promises'
|
||||
import { existsSync } from 'fs'
|
||||
import { resolve } from 'path'
|
||||
|
||||
/**
|
||||
* Spark Vite Plugin
|
||||
*
|
||||
* This plugin integrates Spark functionality into the Vite build process.
|
||||
* Currently provides a minimal implementation that can be extended with:
|
||||
* - Spark runtime injection
|
||||
* - Configuration validation
|
||||
* - Development server enhancements
|
||||
*/
|
||||
|
||||
export default function sparkPlugin() {
|
||||
return {
|
||||
name: 'spark-vite-plugin',
|
||||
|
||||
configResolved(config: any) {
|
||||
// TODO: Add Spark-specific configuration handling if needed
|
||||
// This hook is called after the Vite config is resolved
|
||||
},
|
||||
|
||||
transformIndexHtml(html: string) {
|
||||
// TODO: Add Spark runtime injection to HTML if needed
|
||||
// Currently returns HTML unchanged
|
||||
return html
|
||||
},
|
||||
|
||||
async buildStart() {
|
||||
const tmpDist = '/tmp/dist'
|
||||
if (!existsSync(tmpDist)) {
|
||||
try {
|
||||
await mkdir(tmpDist, { recursive: true })
|
||||
} catch (err) {
|
||||
console.warn('[spark-vite-plugin] Could not create /tmp/dist:', err)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
closeBundle() {
|
||||
// Build completed successfully
|
||||
// The Spark runtime may attempt to copy additional files after the build
|
||||
// This hook ensures the build process completes gracefully
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
/**
|
||||
* Vite Phosphor Icon Proxy Plugin
|
||||
*
|
||||
* This plugin provides a proxy for Phosphor icon imports.
|
||||
* Currently provides a pass-through implementation that allows
|
||||
* Vite to handle icon imports normally. Can be extended to:
|
||||
* - Optimize icon bundle sizes
|
||||
* - Implement lazy loading for icons
|
||||
* - Transform icon imports for better tree-shaking
|
||||
*/
|
||||
|
||||
export default function createIconImportProxy() {
|
||||
return {
|
||||
name: 'vite-phosphor-icon-proxy',
|
||||
|
||||
resolveId(id: string) {
|
||||
// TODO: Add custom icon resolution if needed
|
||||
// Currently lets Vite handle all icon imports normally
|
||||
if (id.includes('@phosphor-icons/react')) {
|
||||
return null // Let Vite handle it normally
|
||||
}
|
||||
},
|
||||
|
||||
transform(code: string, id: string) {
|
||||
// TODO: Add icon import transformations if needed
|
||||
// Currently returns null to let Vite handle transformations
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "ESNext",
|
||||
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"jsx": "react-jsx",
|
||||
"allowSyntheticDefaultImports": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
# @github/spark
|
||||
|
||||
Spark runtime and hooks for low-code React applications.
|
||||
|
||||
## Overview
|
||||
|
||||
The `@github/spark` package provides core functionality for Spark-powered applications:
|
||||
|
||||
- **useKV Hook**: Persistent key-value storage with localStorage and Spark KV integration
|
||||
- **Spark Runtime**: Mock LLM service, KV storage, and user authentication APIs
|
||||
- **Vite Plugins**: Build-time integrations for Spark applications
|
||||
|
||||
## Installation
|
||||
|
||||
This package is designed to be used as a workspace dependency:
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"@github/spark": "workspace:*"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### useKV Hook
|
||||
|
||||
The `useKV` hook provides persistent state management:
|
||||
|
||||
```typescript
|
||||
import { useKV } from '@github/spark/hooks'
|
||||
|
||||
function MyComponent() {
|
||||
const [count, setCount, deleteCount] = useKV('counter', 0)
|
||||
|
||||
return (
|
||||
<div>
|
||||
<p>Count: {count}</p>
|
||||
<button onClick={() => setCount(count + 1)}>Increment</button>
|
||||
<button onClick={deleteCount}>Reset</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### Spark Runtime
|
||||
|
||||
Initialize the Spark runtime in your application entry point:
|
||||
|
||||
```typescript
|
||||
import '@github/spark/spark'
|
||||
```
|
||||
|
||||
Access the runtime APIs:
|
||||
|
||||
```typescript
|
||||
// KV Storage
|
||||
window.spark.kv.set('key', 'value')
|
||||
const value = window.spark.kv.get('key')
|
||||
|
||||
// LLM Service
|
||||
const response = await window.spark.llm.chat([
|
||||
{ role: 'user', content: 'Hello!' }
|
||||
])
|
||||
|
||||
// User Info
|
||||
const user = window.spark.user.getCurrentUser()
|
||||
```
|
||||
|
||||
### Vite Plugins
|
||||
|
||||
Add Spark plugins to your Vite configuration:
|
||||
|
||||
```typescript
|
||||
import sparkPlugin from '@github/spark/spark-vite-plugin'
|
||||
import createIconImportProxy from '@github/spark/vitePhosphorIconProxyPlugin'
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
sparkPlugin(),
|
||||
createIconImportProxy()
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
### useKV<T>(key: string, defaultValue: T)
|
||||
|
||||
Returns: `[value: T, setValue: (value: T | ((prev: T) => T)) => void, deleteValue: () => void]`
|
||||
|
||||
- `key`: Storage key
|
||||
- `defaultValue`: Default value if key doesn't exist
|
||||
- `value`: Current value
|
||||
- `setValue`: Update the value (supports functional updates)
|
||||
- `deleteValue`: Delete the value and reset to default
|
||||
|
||||
### window.spark
|
||||
|
||||
Global runtime object with the following APIs:
|
||||
|
||||
- `kv.get(key)`: Get value from KV storage
|
||||
- `kv.set(key, value)`: Set value in KV storage
|
||||
- `kv.delete(key)`: Delete key from KV storage
|
||||
- `kv.clear()`: Clear all KV storage
|
||||
- `kv.keys()`: Get all keys
|
||||
- `llm.chat(messages)`: Chat with LLM
|
||||
- `llm.complete(prompt)`: Complete a prompt
|
||||
- `user.getCurrentUser()`: Get current user info
|
||||
- `user.isAuthenticated()`: Check if user is authenticated
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"name": "@local/spark-wrapper",
|
||||
"version": "1.0.0",
|
||||
"description": "Local Spark wrapper (deprecated - use @github/spark from spark-tools)",
|
||||
"type": "module",
|
||||
"main": "./src/index.ts",
|
||||
"types": "./src/types.d.ts",
|
||||
"exports": {
|
||||
".": "./src/index.ts",
|
||||
"./hooks": "./src/hooks/index.ts",
|
||||
"./spark": "./src/spark.ts",
|
||||
"./spark-vite-plugin": "./src/spark-vite-plugin.mjs",
|
||||
"./vitePhosphorIconProxyPlugin": "./src/vitePhosphorIconProxyPlugin.mjs"
|
||||
},
|
||||
"files": [
|
||||
"src"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.0.0 || ^19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19.0.0",
|
||||
"typescript": "~5.7.2"
|
||||
}
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
import { useState, useEffect, useCallback } from 'react'
|
||||
|
||||
/**
|
||||
* useKV Hook - Persistent key-value storage with localStorage and window.spark.kv integration
|
||||
*
|
||||
* This hook provides persistent state management that syncs with localStorage
|
||||
* and integrates with the Spark KV storage system if available.
|
||||
*
|
||||
* @param key - Storage key
|
||||
* @param defaultValue - Default value if key doesn't exist
|
||||
* @returns Tuple of [value, setValue, deleteValue]
|
||||
*/
|
||||
export function useKV<T>(
|
||||
key: string,
|
||||
defaultValue: T
|
||||
): [T, (value: T | ((prev: T) => T)) => void, () => void] {
|
||||
// Initialize state from localStorage or default value
|
||||
const [value, setValueInternal] = useState<T>(() => {
|
||||
try {
|
||||
// Try to get from window.spark.kv first
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
const sparkValue = window.spark.kv.get(key)
|
||||
if (sparkValue !== undefined) {
|
||||
return sparkValue as T
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to localStorage
|
||||
const item = localStorage.getItem(key)
|
||||
return item ? JSON.parse(item) : defaultValue
|
||||
} catch (error) {
|
||||
console.error('Error reading from storage:', error)
|
||||
return defaultValue
|
||||
}
|
||||
})
|
||||
|
||||
// Set value and sync to storage
|
||||
const setValue = useCallback(
|
||||
(newValue: T | ((prev: T) => T)) => {
|
||||
try {
|
||||
setValueInternal((prevValue) => {
|
||||
const valueToStore =
|
||||
typeof newValue === 'function'
|
||||
? (newValue as (prev: T) => T)(prevValue)
|
||||
: newValue
|
||||
|
||||
// Store in localStorage
|
||||
localStorage.setItem(key, JSON.stringify(valueToStore))
|
||||
|
||||
// Store in window.spark.kv if available
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
window.spark.kv.set(key, valueToStore)
|
||||
}
|
||||
|
||||
return valueToStore
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error writing to storage:', error)
|
||||
}
|
||||
},
|
||||
[key]
|
||||
)
|
||||
|
||||
// Delete value from storage
|
||||
const deleteValue = useCallback(() => {
|
||||
try {
|
||||
localStorage.removeItem(key)
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
window.spark.kv.delete(key)
|
||||
}
|
||||
setValueInternal(defaultValue)
|
||||
} catch (error) {
|
||||
console.error('Error deleting from storage:', error)
|
||||
}
|
||||
}, [key, defaultValue])
|
||||
|
||||
// Sync with localStorage changes from other tabs
|
||||
useEffect(() => {
|
||||
const handleStorageChange = (e: StorageEvent) => {
|
||||
if (e.key === key && e.newValue !== null) {
|
||||
try {
|
||||
setValueInternal(JSON.parse(e.newValue))
|
||||
} catch (error) {
|
||||
console.error('Error parsing storage event:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('storage', handleStorageChange)
|
||||
return () => window.removeEventListener('storage', handleStorageChange)
|
||||
}, [key])
|
||||
|
||||
return [value, setValue, deleteValue]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
/**
|
||||
* @github/spark - Main Entry Point
|
||||
*
|
||||
* This is the main entry point for the Spark package.
|
||||
* It re-exports all core functionality.
|
||||
*/
|
||||
|
||||
export { sparkRuntime } from './spark-runtime'
|
||||
export { useKV } from './hooks/index'
|
||||
export { default as sparkPlugin } from './spark-vite-plugin.mjs'
|
||||
export { default as createIconImportProxy } from './vitePhosphorIconProxyPlugin.mjs'
|
||||
@@ -1,87 +0,0 @@
|
||||
/**
|
||||
* Spark Runtime - Core runtime services for Spark applications
|
||||
*
|
||||
* This module provides mock implementations of Spark services including:
|
||||
* - KV storage (key-value store)
|
||||
* - LLM service (language model integration)
|
||||
* - User authentication
|
||||
*/
|
||||
|
||||
// Mock KV Storage
|
||||
const kvStorage = new Map<string, any>()
|
||||
|
||||
// Create llm function with additional properties
|
||||
const llmFunction = async (prompt: string, model?: string, jsonMode?: boolean): Promise<any> => {
|
||||
console.log('Mock LLM called with prompt:', prompt, 'model:', model, 'jsonMode:', jsonMode)
|
||||
return 'This is a mock response from the Spark LLM service.'
|
||||
}
|
||||
|
||||
llmFunction.chat = async (messages: any[]) => {
|
||||
console.log('Mock LLM chat called with messages:', messages)
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: 'This is a mock response from the Spark LLM service.'
|
||||
}
|
||||
}
|
||||
|
||||
llmFunction.complete = async (prompt: string) => {
|
||||
console.log('Mock LLM complete called with prompt:', prompt)
|
||||
return 'This is a mock completion from the Spark LLM service.'
|
||||
}
|
||||
|
||||
export const sparkRuntime = {
|
||||
kv: {
|
||||
get: <T = any>(key: string): T | undefined => {
|
||||
try {
|
||||
const value = kvStorage.get(key)
|
||||
if (value !== undefined) {
|
||||
return value as T
|
||||
}
|
||||
const stored = localStorage.getItem(key)
|
||||
return stored ? JSON.parse(stored) : undefined
|
||||
} catch (error) {
|
||||
console.error('Error getting KV value:', error)
|
||||
return undefined
|
||||
}
|
||||
},
|
||||
set: (key: string, value: any) => {
|
||||
try {
|
||||
kvStorage.set(key, value)
|
||||
localStorage.setItem(key, JSON.stringify(value))
|
||||
} catch (error) {
|
||||
console.error('Error setting KV value:', error)
|
||||
}
|
||||
},
|
||||
delete: (key: string) => {
|
||||
try {
|
||||
kvStorage.delete(key)
|
||||
localStorage.removeItem(key)
|
||||
} catch (error) {
|
||||
console.error('Error deleting KV value:', error)
|
||||
}
|
||||
},
|
||||
clear: () => {
|
||||
try {
|
||||
// Get keys before clearing
|
||||
const keysToRemove = Array.from(kvStorage.keys())
|
||||
kvStorage.clear()
|
||||
// Clear corresponding keys from localStorage
|
||||
keysToRemove.forEach(key => localStorage.removeItem(key))
|
||||
} catch (error) {
|
||||
console.error('Error clearing KV storage:', error)
|
||||
}
|
||||
},
|
||||
keys: () => Array.from(kvStorage.keys())
|
||||
},
|
||||
|
||||
llm: llmFunction,
|
||||
|
||||
user: {
|
||||
getCurrentUser: () => ({
|
||||
id: 'mock-user-id',
|
||||
name: 'Mock User',
|
||||
email: 'mock@example.com'
|
||||
}),
|
||||
isAuthenticated: () => true
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
/**
|
||||
* Spark Vite Plugin
|
||||
*
|
||||
* This plugin integrates Spark functionality into the Vite build process.
|
||||
* It handles initialization and configuration of Spark features.
|
||||
*/
|
||||
|
||||
export default function sparkPlugin() {
|
||||
return {
|
||||
name: 'spark-vite-plugin',
|
||||
|
||||
configResolved(config) {
|
||||
// Plugin configuration
|
||||
},
|
||||
|
||||
transformIndexHtml(html) {
|
||||
// Inject Spark initialization if needed
|
||||
return html
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
/**
|
||||
* Spark Initialization Module
|
||||
*
|
||||
* This module initializes the Spark runtime and makes it available globally
|
||||
* via window.spark. It should be imported early in the application lifecycle.
|
||||
*/
|
||||
|
||||
import { sparkRuntime } from './spark-runtime'
|
||||
|
||||
// Declare global window.spark
|
||||
declare global {
|
||||
interface Window {
|
||||
spark: typeof sparkRuntime
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize window.spark
|
||||
if (typeof window !== 'undefined') {
|
||||
window.spark = sparkRuntime
|
||||
}
|
||||
|
||||
export default sparkRuntime
|
||||
29
packages/spark/src/types.d.ts
vendored
29
packages/spark/src/types.d.ts
vendored
@@ -1,29 +0,0 @@
|
||||
/**
|
||||
* TypeScript Type Definitions for Spark
|
||||
*
|
||||
* Global type declarations for window.spark and Spark APIs
|
||||
*/
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
spark: {
|
||||
kv: {
|
||||
get: <T = any>(key: string) => T | undefined
|
||||
set: (key: string, value: any) => void
|
||||
delete: (key: string) => void
|
||||
clear: () => void
|
||||
keys: () => string[]
|
||||
}
|
||||
llm: ((prompt: string, model?: string, jsonMode?: boolean) => Promise<any>) & {
|
||||
chat: (messages: any[]) => Promise<{ role: string; content: string }>
|
||||
complete: (prompt: string) => Promise<string>
|
||||
}
|
||||
user: {
|
||||
getCurrentUser: () => { id: string; name: string; email: string }
|
||||
isAuthenticated: () => boolean
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export {}
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* Vite Phosphor Icon Proxy Plugin
|
||||
*
|
||||
* This plugin provides a proxy for Phosphor icon imports to improve
|
||||
* build performance and bundle size.
|
||||
*/
|
||||
|
||||
export default function createIconImportProxy() {
|
||||
return {
|
||||
name: 'vite-phosphor-icon-proxy',
|
||||
|
||||
resolveId(id) {
|
||||
// Handle icon imports
|
||||
if (id.includes('@phosphor-icons/react')) {
|
||||
return null // Let Vite handle it normally
|
||||
}
|
||||
},
|
||||
|
||||
transform(code, id) {
|
||||
// Transform icon imports if needed
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "ESNext",
|
||||
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
Reference in New Issue
Block a user