diff --git a/llama/CMakeLists.txt b/llama/CMakeLists.txt index e379d6a5..efd8fce4 100644 --- a/llama/CMakeLists.txt +++ b/llama/CMakeLists.txt @@ -40,6 +40,7 @@ endif() add_definitions(-DNAPI_VERSION=7) set(CMAKE_POSITION_INDEPENDENT_CODE ON) +set(CMAKE_PLATFORM_NO_VERSIONED_SONAME ON) set(LLAMA_BUILD_COMMON ON) diff --git a/llama/addon/globals/addonLog.cpp b/llama/addon/globals/addonLog.cpp index c80820a5..4e66b316 100644 --- a/llama/addon/globals/addonLog.cpp +++ b/llama/addon/globals/addonLog.cpp @@ -94,12 +94,12 @@ void addonLlamaCppLogCallback(ggml_log_level level, const char* text, void* user } Napi::Value setLogger(const Napi::CallbackInfo& info) { - if (info.Length() < 1 || !info[0].IsFunction()) { - if (addonJsLoggerCallbackSet) { - addonJsLoggerCallbackSet = false; - addonThreadSafeLoggerCallback.Release(); - } + if (addonJsLoggerCallbackSet) { + addonJsLoggerCallbackSet = false; + addonThreadSafeLoggerCallback.Release(); + } + if (info.Length() < 1 || !info[0].IsFunction()) { return info.Env().Undefined(); } diff --git a/llama/cmake/addVariantSuffix.cmake b/llama/cmake/addVariantSuffix.cmake index e5eb4954..064c5b62 100644 --- a/llama/cmake/addVariantSuffix.cmake +++ b/llama/cmake/addVariantSuffix.cmake @@ -1,5 +1,5 @@ function(addVariantSuffix originalTarget variantSuffix) - if (NOT TARGET ${originalTarget} OR variantSuffix STREQUAL "") + if (NOT TARGET ${originalTarget} OR ${variantSuffix} STREQUAL "") return() endif() diff --git a/src/bindings/Llama.ts b/src/bindings/Llama.ts index d5725e96..55c8592a 100644 --- a/src/bindings/Llama.ts +++ b/src/bindings/Llama.ts @@ -137,7 +137,7 @@ export class Llama { this._supportsMmap = bindings.getSupportsMmap(); this._gpuSupportsMmap = bindings.getGpuSupportsMmap(); this._supportsMlock = bindings.getSupportsMlock(); - this._mathCores = bindings.getMathCores(); + this._mathCores = Math.floor(bindings.getMathCores()); this._consts = bindings.getConsts(); this._vramOrchestrator = vramOrchestrator; this._vramPadding = vramPadding; diff --git a/templates/electron-typescript-react/src/utils/createRendererSideBirpc.ts b/templates/electron-typescript-react/src/utils/createRendererSideBirpc.ts index 368b550c..2d308e57 100644 --- a/templates/electron-typescript-react/src/utils/createRendererSideBirpc.ts +++ b/templates/electron-typescript-react/src/utils/createRendererSideBirpc.ts @@ -1,14 +1,14 @@ import {createBirpc} from "birpc"; export function createRendererSideBirpc< - const RendererFunction = Record, - const ElectronFunctions extends object = Record + const ElectronFunction = Record, + const RendererFunctions extends object = Record >( toRendererEventName: string, fromRendererEventName: string, - electronFunctions: ElectronFunctions + rendererFunctions: RendererFunctions ) { - return createBirpc(electronFunctions, { + return createBirpc(rendererFunctions, { post: (data) => window.ipcRenderer.send(fromRendererEventName, data), on: (onData) => window.ipcRenderer.on(toRendererEventName, (event, data) => { onData(data); @@ -17,4 +17,3 @@ export function createRendererSideBirpc< deserialize: (value) => JSON.parse(value) }); } - diff --git a/test/modelDependent/llama3.2/sequenceState.test.ts b/test/modelDependent/llama3.2/sequenceState.test.ts index 79addacc..599d3438 100644 --- a/test/modelDependent/llama3.2/sequenceState.test.ts +++ b/test/modelDependent/llama3.2/sequenceState.test.ts @@ -169,7 +169,7 @@ describe("llama 3.2", () => { await contextSequence2.loadStateFromFile(stateFile1Path, {acceptRisk: true}); const res2 = await chatSession2.prompt("What did I tell you to remember?", {maxTokens: 12}); - expect(res2).to.toMatchInlineSnapshot('"You told me to remember that "locks are not doors"."'); + expect(res2).toMatch(/^(You told me to remember that "locks are not doors".|You told me to remember that "locks are not doors.")/); const contextSequence2TokensState = contextSequence2.tokenMeter.getState(); expect(contextSequence2TokensState.usedInputTokens).to.be.lessThan(contextSequence1TokensState.usedInputTokens); expect(contextSequence2TokensState).toMatchInlineSnapshot(`