From 61ef55132faa9ba368f61374cf6ea0e19c451c83 Mon Sep 17 00:00:00 2001 From: shiruixing <2946193417@qq.com> Date: Fri, 7 Feb 2025 23:40:24 +0800 Subject: [PATCH] fix:fix top await in examples --- examples/fill-in-middle/fill.ts | 16 +++++++----- examples/multimodal/multimodal.ts | 22 +++++++++------- examples/pull-progress/pull.ts | 42 +++++++++++++++++-------------- 3 files changed, 46 insertions(+), 34 deletions(-) diff --git a/examples/fill-in-middle/fill.ts b/examples/fill-in-middle/fill.ts index 149c4c8..044e229 100644 --- a/examples/fill-in-middle/fill.ts +++ b/examples/fill-in-middle/fill.ts @@ -1,8 +1,12 @@ import ollama from 'ollama' -const response = await ollama.generate({ - model: 'deepseek-coder-v2', - prompt: `def add(`, - suffix: `return c`, -}) -console.log(response.response) +async function main() { + const response = await ollama.generate({ + model: 'deepseek-coder-v2', + prompt: `def add(`, + suffix: `return c`, + }) + console.log(response.response) +} + +main().catch(console.error) diff --git a/examples/multimodal/multimodal.ts b/examples/multimodal/multimodal.ts index c03db12..ec66446 100644 --- a/examples/multimodal/multimodal.ts +++ b/examples/multimodal/multimodal.ts @@ -1,12 +1,16 @@ import ollama from 'ollama' -const imagePath = './examples/multimodal/cat.jpg' -const response = await ollama.generate({ - model: 'llava', - prompt: 'describe this image:', - images: [imagePath], - stream: true, -}) -for await (const part of response) { - process.stdout.write(part.response) +async function main() { + const imagePath = './examples/multimodal/cat.jpg' + const response = await ollama.generate({ + model: 'llava', + prompt: 'describe this image:', + images: [imagePath], + stream: true, + }) + for await (const part of response) { + process.stdout.write(part.response) + } } + +main().catch(console.error) diff --git a/examples/pull-progress/pull.ts b/examples/pull-progress/pull.ts index 5d04945..fc32ec7 100644 --- a/examples/pull-progress/pull.ts +++ b/examples/pull-progress/pull.ts @@ -1,25 +1,29 @@ import ollama from 'ollama' -const model = 'llama3.1' -console.log(`downloading ${model}...`) -let currentDigestDone = false -const stream = await ollama.pull({ model: model, stream: true }) -for await (const part of stream) { - if (part.digest) { - let percent = 0 - if (part.completed && part.total) { - percent = Math.round((part.completed / part.total) * 100) - } - process.stdout.clearLine(0) // Clear the current line - process.stdout.cursorTo(0) // Move cursor to the beginning of the line - process.stdout.write(`${part.status} ${percent}%...`) // Write the new text - if (percent === 100 && !currentDigestDone) { - console.log() // Output to a new line - currentDigestDone = true +async function main() { + const model = 'llama3.1' + console.log(`downloading ${model}...`) + let currentDigestDone = false + const stream = await ollama.pull({ model: model, stream: true }) + for await (const part of stream) { + if (part.digest) { + let percent = 0 + if (part.completed && part.total) { + percent = Math.round((part.completed / part.total) * 100) + } + process.stdout.clearLine(0) // Clear the current line + process.stdout.cursorTo(0) // Move cursor to the beginning of the line + process.stdout.write(`${part.status} ${percent}%...`) // Write the new text + if (percent === 100 && !currentDigestDone) { + console.log() // Output to a new line + currentDigestDone = true + } else { + currentDigestDone = false + } } else { - currentDigestDone = false + console.log(part.status) } - } else { - console.log(part.status) } } + +main().catch(console.error)