diff --git a/electron-builder.yml b/electron-builder.yml index fc172df..03d40e9 100644 --- a/electron-builder.yml +++ b/electron-builder.yml @@ -6,6 +6,7 @@ directories: files: - dist/**/* # Include everything in the dist folder generated by Vite + - preloader/**/* # - node_modules/**/* # Include necessary node modules # - package.json - electron.js diff --git a/electron.js b/electron.js index 37b1421..ccfa9f9 100644 --- a/electron.js +++ b/electron.js @@ -24,7 +24,7 @@ function createWindow() { if(app.isPackaged) { // eslint-disable-next-line win.loadFile(path.join(__dirname, 'dist/index.html')) - Menu.setApplicationMenu(null); + // Menu.setApplicationMenu(null); } else { win.loadURL("http://localhost:3000"); } @@ -46,3 +46,7 @@ app.whenReady().then(() => { BrowserWindow.getAllWindows().length === 0 && createWindow() }) }) + +ipcMain.handle('electron-settings', ()=>{ + return { userDataPath: app.getPath("userData"), isPackaged: app.isPackaged } +}) \ No newline at end of file diff --git a/package.json b/package.json index c99efc6..3ea5a7f 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "name": "Bohan Cheng", "email": "cbh778899@outlook.com" }, - "version": "0.1.12", + "version": "0.2.0", "main": "electron.js", "scripts": { "dev": "npm run start & npm run electron", diff --git a/preloader/node-llama-cpp-preloader.js b/preloader/node-llama-cpp-preloader.js index 7c4e7e3..b897797 100644 --- a/preloader/node-llama-cpp-preloader.js +++ b/preloader/node-llama-cpp-preloader.js @@ -1,17 +1,20 @@ -const { createWriteStream, existsSync, statSync } = require("fs"); +const { ipcRenderer } = require("electron"); +const { createWriteStream, existsSync, statSync, mkdirSync } = require("fs"); const path = require("path"); let llama, getLlama, LlamaChatSession, current_model; -async function importer() { +let model_path = ''; +async function initer() { const nodeLlamaCpp = await import('node-llama-cpp') getLlama = nodeLlamaCpp.getLlama; LlamaChatSession = nodeLlamaCpp.LlamaChatSession; -} -importer(); -const model_path = path.join(__dirname, '..', 'models') -// const model_path = path.join(path.dirname(fileURLToPath(import.meta.url)), '..', 'models') + const {isPackaged, userDataPath} = await ipcRenderer.invoke('electron-settings'); + model_path = isPackaged ? path.join(userDataPath, 'models') : path.join(__dirname, '..', 'models') + if(!existsSync(model_path)) mkdirSync(model_path) +} +initer(); let llama_session, stop_signal; @@ -22,14 +25,15 @@ let llama_session, stop_signal; async function loadModel(model_name = '') { if(!model_name || current_model === model_name) return; current_model = model_name; + const modelPath = path.join(model_path, model_name) + if(!existsSync(modelPath)) { + return; + } if(llama) await llama.dispose(); llama = await getLlama() - const model = await llama.loadModel({ - modelPath: path.join(model_path, model_name) - }) - + const model = await llama.loadModel({modelPath}) const context = await model.createContext(); llama_session = new LlamaChatSession({ contextSequence: context.getSequence()