Skip to content

Commit

Permalink
fix: embedding context deadlock (#402)
Browse files Browse the repository at this point in the history
  • Loading branch information
giladgd authored Dec 26, 2024
1 parent 6a54163 commit e2c5c3f
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 9 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ jobs:
- name: Download latest llama.cpp release
env:
CI: true
run: node ./dist/cli/cli.js source download --release latest --skipBuild --noBundle --noUsageExample --updateBinariesReleaseMetadataAndSaveGitBundle
# pinned to `b4291` temporarily until the Windows on Arm64 build is fixed
run: node ./dist/cli/cli.js source download --release b4291 --skipBuild --noBundle --noUsageExample --updateBinariesReleaseMetadataAndSaveGitBundle
- name: Upload build artifact
uses: actions/upload-artifact@v4
with:
Expand Down
9 changes: 1 addition & 8 deletions src/evaluator/LlamaModel/LlamaModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -517,14 +517,7 @@ export class LlamaModel {
if (this._vocabOnly)
throw new Error("Model is loaded in vocabOnly mode, so no context can be created");

return await withLock(this._llama._memoryLock, LlamaLocks.loadToMemory, options.createSignal, async () => {
const preventDisposalHandle = this._backendModelDisposeGuard.createPreventDisposalHandle();
try {
return await LlamaEmbeddingContext._create({_model: this}, options);
} finally {
preventDisposalHandle.dispose();
}
});
return await LlamaEmbeddingContext._create({_model: this}, options);
}

/**
Expand Down

0 comments on commit e2c5c3f

Please sign in to comment.