update download links to the releases page until we have a better download url

This commit is contained in:
Jeffrey Morgan 2023-07-07 15:21:40 -04:00
parent b24be8c6b3
commit 12199bcfa8
3 changed files with 8 additions and 11 deletions

View file

@ -16,7 +16,7 @@ Run large language models with `llama.cpp`.
## Install
- Download for macOS
- [Download](https://github.com/jmorganca/ollama/releases/latest) for macOS
- Download for Windows (coming soon)
- Docker: `docker run -p 11434:11434 ollama/ollama`

View file

@ -5,18 +5,15 @@ export default async function Home() {
<main className='flex min-h-screen max-w-2xl flex-col p-4 lg:p-24'>
<h1 className='font-serif text-3xl'>ollama</h1>
<section className='my-8'>
<p className='my-3 mb-8 max-w-md'>
<p className='my-3 max-w-md'>
<a className='underline' href='https://github.com/jmorganca/ollama'>
Ollama
</a>{' '}
is a tool for running large language models.
<br />
<br />
Get started with Ollama using pip:
is a tool for running large language models. The latest version is available for download{' '}
<a className='underline' href='https://github.com/jmorganca/ollama/releases/latest'>
here.
</a>
</p>
<pre className='my-4'>
<code>pip install ollama</code>
</pre>
</section>
<section className='my-4'>
<h2 className='mb-4 text-lg'>Example models you can try running:</h2>

4
web/package-lock.json generated
View file

@ -1,12 +1,12 @@
{
"name": "web",
"version": "0.1.0",
"version": "0.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "web",
"version": "0.1.0",
"version": "0.0.0",
"dependencies": {
"@octokit/rest": "^19.0.13",
"@types/node": "20.4.0",